Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-05-07 15:10:27 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-05-07 15:10:27 +0300
commit53f456b167f19877d663ee6ed510673cebee0f91 (patch)
treefcc0bb52b79c195bf0eda100cc5d7e7a16dc0c0b
parente8a31d8dc2afd673ca50d74d26edab0a0fec83ca (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--app/assets/javascripts/content_editor/extensions/blockquote.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/bold.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/bullet_list.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/code.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/code_block_highlight.js6
-rw-r--r--app/assets/javascripts/content_editor/extensions/document.js3
-rw-r--r--app/assets/javascripts/content_editor/extensions/dropcursor.js3
-rw-r--r--app/assets/javascripts/content_editor/extensions/gapcursor.js3
-rw-r--r--app/assets/javascripts/content_editor/extensions/hard_break.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/heading.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/history.js3
-rw-r--r--app/assets/javascripts/content_editor/extensions/horizontal_rule.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/image.js9
-rw-r--r--app/assets/javascripts/content_editor/extensions/italic.js4
-rw-r--r--app/assets/javascripts/content_editor/extensions/link.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/list_item.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/ordered_list.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/paragraph.js5
-rw-r--r--app/assets/javascripts/content_editor/extensions/text.js5
-rw-r--r--app/assets/javascripts/content_editor/services/build_serializer_config.js22
-rw-r--r--app/assets/javascripts/content_editor/services/create_content_editor.js97
-rw-r--r--app/assets/javascripts/content_editor/services/markdown_serializer.js111
-rw-r--r--app/assets/javascripts/import_entities/import_groups/components/import_table.vue78
-rw-r--r--app/assets/javascripts/import_entities/import_groups/components/import_table_row.vue38
-rw-r--r--app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js96
-rw-r--r--app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql4
-rw-r--r--app/assets/javascripts/import_entities/import_groups/graphql/mutations/add_validation_error.mutation.graphql9
-rw-r--r--app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql9
-rw-r--r--app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql9
-rw-r--r--app/assets/javascripts/import_entities/import_groups/graphql/mutations/remove_validation_error.mutation.graphql9
-rw-r--r--app/assets/javascripts/import_entities/import_groups/graphql/services/source_groups_manager.js46
-rw-r--r--app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql18
-rw-r--r--app/assets/javascripts/issue_show/components/form.vue4
-rw-r--r--app/assets/javascripts/issues_list/components/issues_list_app.vue5
-rw-r--r--app/assets/javascripts/issues_list/constants.js129
-rw-r--r--app/assets/javascripts/issues_list/utils.js23
-rw-r--r--app/assets/javascripts/pipelines/components/pipelines_list/pipelines_artifacts.vue89
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline.vue9
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/index.js4
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js23
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue4
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/weight_token.vue4
-rw-r--r--app/assets/javascripts/vue_shared/components/user_select/user_select.vue10
-rw-r--r--app/graphql/types/repository/blob_type.rb5
-rw-r--r--app/helpers/projects_helper.rb24
-rw-r--r--app/models/namespace/traversal_hierarchy.rb22
-rw-r--r--app/models/namespaces/traversal/linear.rb18
-rw-r--r--app/presenters/blob_presenter.rb6
-rw-r--r--app/presenters/snippet_blob_presenter.rb6
-rw-r--r--app/serializers/test_case_entity.rb2
-rw-r--r--app/services/system_notes/time_tracking_service.rb4
-rw-r--r--app/views/import/bulk_imports/status.html.haml3
-rw-r--r--app/views/layouts/nav/sidebar/_project_menus.html.haml58
-rw-r--r--app/views/projects/merge_requests/_widget.html.haml4
-rw-r--r--changelogs/unreleased/323195-add-blob-plain-data-attribute.yml5
-rw-r--r--changelogs/unreleased/lm-remove-multiple-cache-ff.yml5
-rw-r--r--changelogs/unreleased/mobsf-remove-service.yml5
-rw-r--r--changelogs/unreleased/retain-timelog-spent_at-time.yml5
-rw-r--r--changelogs/unreleased/sh-lazy-load-pipeline-mr-widget-artifacts.yml5
-rw-r--r--changelogs/unreleased/sh-remove-junit-pipeline-screenshots-view-feature-flag.yml5
-rw-r--r--changelogs/unreleased/suppress-every-non-nullable-graphql-error-assignee-widget.yml6
-rw-r--r--changelogs/unreleased/xanf-bulk-import-all-on-the-page.yml5
-rw-r--r--config/feature_flags/development/junit_pipeline_screenshots_view.yml8
-rw-r--r--config/feature_flags/development/multiple_cache_per_job.yml8
-rw-r--r--config/metrics/license/20210201124933_uuid.yml1
-rw-r--r--config/metrics/schema.json4
-rw-r--r--doc/api/graphql/reference/index.md1
-rw-r--r--doc/ci/unit_test_reports.md23
-rw-r--r--doc/ci/yaml/README.md25
-rw-r--r--doc/development/database/index.md2
-rw-r--r--doc/development/database/pagination_guidelines.md315
-rw-r--r--doc/development/database/pagination_performance_guidelines.md325
-rw-r--r--doc/development/elasticsearch.md2
-rw-r--r--doc/development/graphql_guide/pagination.md4
-rw-r--r--doc/development/i18n/externalization.md42
-rw-r--r--doc/development/img/offset_pagination_ui_v13_11.jpgbin0 -> 5030 bytes
-rw-r--r--doc/development/img/project_issues_pagination_v13_11.jpgbin0 -> 51436 bytes
-rw-r--r--doc/development/merge_request_performance_guidelines.md2
-rw-r--r--lib/gitlab/ci/build/cache.rb31
-rw-r--r--lib/gitlab/ci/config/entry/cache.rb110
-rw-r--r--lib/gitlab/ci/config/entry/caches.rb40
-rw-r--r--lib/gitlab/ci/config/entry/default.rb2
-rw-r--r--lib/gitlab/ci/config/entry/job.rb2
-rw-r--r--lib/gitlab/ci/config/entry/root.rb2
-rw-r--r--lib/gitlab/ci/features.rb4
-rw-r--r--lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml12
-rw-r--r--lib/gitlab/quick_actions/spend_time_and_date_separator.rb2
-rw-r--r--lib/gitlab/subscription_portal.rb5
-rw-r--r--lib/gitlab/usage/metric_definition.rb4
-rw-r--r--lib/gitlab/usage/metrics/instrumentations/base_metric.rb19
-rw-r--r--lib/gitlab/usage/metrics/instrumentations/database_metric.rb68
-rw-r--r--lib/gitlab/usage/metrics/instrumentations/generic_metric.rb32
-rw-r--r--lib/gitlab/usage/metrics/instrumentations/redis_hll_metric.rb45
-rw-r--r--lib/gitlab/usage/metrics/instrumentations/uuid_metric.rb15
-rw-r--r--lib/gitlab/usage/metrics/key_path_processor.rb27
-rw-r--r--lib/gitlab/usage_data_metrics.rb28
-rw-r--r--lib/sidebars/projects/menus/settings_menu.rb145
-rw-r--r--lib/sidebars/projects/panel.rb1
-rw-r--r--lib/tasks/gitlab/usage_data.rake5
-rw-r--r--locale/gitlab.pot15
-rw-r--r--qa/qa/page/project/sub_menus/settings.rb26
-rw-r--r--rubocop/rubocop-code_reuse.yml1
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb49
-rw-r--r--spec/features/projects/settings/operations_settings_spec.rb11
-rw-r--r--spec/frontend/content_editor/components/toolbar_button_spec.js16
-rw-r--r--spec/frontend/content_editor/services/build_serializer_config_spec.js38
-rw-r--r--spec/frontend/content_editor/services/create_content_editor_spec.js17
-rw-r--r--spec/frontend/content_editor/test_utils.js34
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_row_spec.js17
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js69
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js61
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/fixtures.js1
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js29
-rw-r--r--spec/frontend/issues_list/mock_data.js28
-rw-r--r--spec/frontend/issues_list/utils_spec.js25
-rw-r--r--spec/frontend/pipelines/pipelines_artifacts_spec.js110
-rw-r--r--spec/frontend/vue_shared/components/user_select_spec.js48
-rw-r--r--spec/graphql/types/repository/blob_type_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/build/cache_spec.rb98
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb352
-rw-r--r--spec/lib/gitlab/ci/config/entry/caches_spec.rb70
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb127
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb247
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb85
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb149
-rw-r--r--spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb6
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb37
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb9
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb7
-rw-r--r--spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb23
-rw-r--r--spec/lib/gitlab/usage_data_metrics_spec.rb25
-rw-r--r--spec/lib/sidebars/projects/menus/settings_menu_spec.rb167
-rw-r--r--spec/models/group_spec.rb88
-rw-r--r--spec/models/namespace/traversal_hierarchy_spec.rb30
-rw-r--r--spec/presenters/blob_presenter_spec.rb39
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb39
-rw-r--r--spec/serializers/test_case_entity_spec.rb44
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb24
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb28
-rw-r--r--spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb13
-rw-r--r--spec/support/helpers/query_recorder.rb4
-rw-r--r--spec/support/helpers/reload_helpers.rb2
-rw-r--r--spec/support/shared_examples/row_lock_shared_examples.rb27
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb156
146 files changed, 3316 insertions, 1711 deletions
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index 8c25be0f02f..24687be7f9b 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-07aa359a7724540bbfa4868407b9a2c9b45bf139
+2982990541d6980bf5035987996e976042c8ccc6
diff --git a/app/assets/javascripts/content_editor/extensions/blockquote.js b/app/assets/javascripts/content_editor/extensions/blockquote.js
new file mode 100644
index 00000000000..a4297b4550c
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/blockquote.js
@@ -0,0 +1,5 @@
+import { Blockquote } from '@tiptap/extension-blockquote';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = Blockquote;
+export const serializer = defaultMarkdownSerializer.nodes.blockquote;
diff --git a/app/assets/javascripts/content_editor/extensions/bold.js b/app/assets/javascripts/content_editor/extensions/bold.js
new file mode 100644
index 00000000000..e90e7b59da0
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/bold.js
@@ -0,0 +1,5 @@
+import { Bold } from '@tiptap/extension-bold';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = Bold;
+export const serializer = defaultMarkdownSerializer.marks.strong;
diff --git a/app/assets/javascripts/content_editor/extensions/bullet_list.js b/app/assets/javascripts/content_editor/extensions/bullet_list.js
new file mode 100644
index 00000000000..178b798e2d4
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/bullet_list.js
@@ -0,0 +1,5 @@
+import { BulletList } from '@tiptap/extension-bullet-list';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = BulletList;
+export const serializer = defaultMarkdownSerializer.nodes.bullet_list;
diff --git a/app/assets/javascripts/content_editor/extensions/code.js b/app/assets/javascripts/content_editor/extensions/code.js
new file mode 100644
index 00000000000..8be50dc39c5
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/code.js
@@ -0,0 +1,5 @@
+import { Code } from '@tiptap/extension-code';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = Code;
+export const serializer = defaultMarkdownSerializer.marks.code;
diff --git a/app/assets/javascripts/content_editor/extensions/code_block_highlight.js b/app/assets/javascripts/content_editor/extensions/code_block_highlight.js
index f5d97b9201f..ce8bd57c7e3 100644
--- a/app/assets/javascripts/content_editor/extensions/code_block_highlight.js
+++ b/app/assets/javascripts/content_editor/extensions/code_block_highlight.js
@@ -1,8 +1,9 @@
import { CodeBlockLowlight } from '@tiptap/extension-code-block-lowlight';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
const extractLanguage = (element) => element.firstElementChild?.getAttribute('lang');
-export default CodeBlockLowlight.extend({
+const ExtendedCodeBlockLowlight = CodeBlockLowlight.extend({
addAttributes() {
return {
...this.parent(),
@@ -21,3 +22,6 @@ export default CodeBlockLowlight.extend({
};
},
});
+
+export const tiptapExtension = ExtendedCodeBlockLowlight;
+export const serializer = defaultMarkdownSerializer.nodes.code_block;
diff --git a/app/assets/javascripts/content_editor/extensions/document.js b/app/assets/javascripts/content_editor/extensions/document.js
new file mode 100644
index 00000000000..99aa8d6235a
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/document.js
@@ -0,0 +1,3 @@
+import Document from '@tiptap/extension-document';
+
+export const tiptapExtension = Document;
diff --git a/app/assets/javascripts/content_editor/extensions/dropcursor.js b/app/assets/javascripts/content_editor/extensions/dropcursor.js
new file mode 100644
index 00000000000..44c378ac7db
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/dropcursor.js
@@ -0,0 +1,3 @@
+import Dropcursor from '@tiptap/extension-dropcursor';
+
+export const tiptapExtension = Dropcursor;
diff --git a/app/assets/javascripts/content_editor/extensions/gapcursor.js b/app/assets/javascripts/content_editor/extensions/gapcursor.js
new file mode 100644
index 00000000000..2db862e4580
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/gapcursor.js
@@ -0,0 +1,3 @@
+import Gapcursor from '@tiptap/extension-gapcursor';
+
+export const tiptapExtension = Gapcursor;
diff --git a/app/assets/javascripts/content_editor/extensions/hard_break.js b/app/assets/javascripts/content_editor/extensions/hard_break.js
new file mode 100644
index 00000000000..dc1ba431151
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/hard_break.js
@@ -0,0 +1,5 @@
+import { HardBreak } from '@tiptap/extension-hard-break';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = HardBreak;
+export const serializer = defaultMarkdownSerializer.nodes.hard_break;
diff --git a/app/assets/javascripts/content_editor/extensions/heading.js b/app/assets/javascripts/content_editor/extensions/heading.js
new file mode 100644
index 00000000000..f69869d1e09
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/heading.js
@@ -0,0 +1,5 @@
+import { Heading } from '@tiptap/extension-heading';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = Heading;
+export const serializer = defaultMarkdownSerializer.nodes.heading;
diff --git a/app/assets/javascripts/content_editor/extensions/history.js b/app/assets/javascripts/content_editor/extensions/history.js
new file mode 100644
index 00000000000..554d797d30a
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/history.js
@@ -0,0 +1,3 @@
+import History from '@tiptap/extension-history';
+
+export const tiptapExtension = History;
diff --git a/app/assets/javascripts/content_editor/extensions/horizontal_rule.js b/app/assets/javascripts/content_editor/extensions/horizontal_rule.js
new file mode 100644
index 00000000000..dcc59476518
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/horizontal_rule.js
@@ -0,0 +1,5 @@
+import { HorizontalRule } from '@tiptap/extension-horizontal-rule';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = HorizontalRule;
+export const serializer = defaultMarkdownSerializer.nodes.horizontal_rule;
diff --git a/app/assets/javascripts/content_editor/extensions/image.js b/app/assets/javascripts/content_editor/extensions/image.js
new file mode 100644
index 00000000000..4f0109fd751
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/image.js
@@ -0,0 +1,9 @@
+import { Image } from '@tiptap/extension-image';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+const ExtendedImage = Image.extend({
+ defaultOptions: { inline: true },
+});
+
+export const tiptapExtension = ExtendedImage;
+export const serializer = defaultMarkdownSerializer.nodes.image;
diff --git a/app/assets/javascripts/content_editor/extensions/italic.js b/app/assets/javascripts/content_editor/extensions/italic.js
new file mode 100644
index 00000000000..b8a7c4aba3e
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/italic.js
@@ -0,0 +1,4 @@
+import { Italic } from '@tiptap/extension-italic';
+
+export const tiptapExtension = Italic;
+export const serializer = { open: '_', close: '_', mixable: true, expelEnclosingWhitespace: true };
diff --git a/app/assets/javascripts/content_editor/extensions/link.js b/app/assets/javascripts/content_editor/extensions/link.js
new file mode 100644
index 00000000000..9a2fa7a5c98
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/link.js
@@ -0,0 +1,5 @@
+import { Link } from '@tiptap/extension-link';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = Link;
+export const serializer = defaultMarkdownSerializer.marks.link;
diff --git a/app/assets/javascripts/content_editor/extensions/list_item.js b/app/assets/javascripts/content_editor/extensions/list_item.js
new file mode 100644
index 00000000000..86da98f6df7
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/list_item.js
@@ -0,0 +1,5 @@
+import { ListItem } from '@tiptap/extension-list-item';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = ListItem;
+export const serializer = defaultMarkdownSerializer.nodes.list_item;
diff --git a/app/assets/javascripts/content_editor/extensions/ordered_list.js b/app/assets/javascripts/content_editor/extensions/ordered_list.js
new file mode 100644
index 00000000000..d980ab8bf10
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/ordered_list.js
@@ -0,0 +1,5 @@
+import { OrderedList } from '@tiptap/extension-ordered-list';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = OrderedList;
+export const serializer = defaultMarkdownSerializer.nodes.ordered_list;
diff --git a/app/assets/javascripts/content_editor/extensions/paragraph.js b/app/assets/javascripts/content_editor/extensions/paragraph.js
new file mode 100644
index 00000000000..6c9f204b8ac
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/paragraph.js
@@ -0,0 +1,5 @@
+import { Paragraph } from '@tiptap/extension-paragraph';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = Paragraph;
+export const serializer = defaultMarkdownSerializer.nodes.paragraph;
diff --git a/app/assets/javascripts/content_editor/extensions/text.js b/app/assets/javascripts/content_editor/extensions/text.js
new file mode 100644
index 00000000000..0d76aa1f1a7
--- /dev/null
+++ b/app/assets/javascripts/content_editor/extensions/text.js
@@ -0,0 +1,5 @@
+import { Text } from '@tiptap/extension-text';
+import { defaultMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
+
+export const tiptapExtension = Text;
+export const serializer = defaultMarkdownSerializer.nodes.text;
diff --git a/app/assets/javascripts/content_editor/services/build_serializer_config.js b/app/assets/javascripts/content_editor/services/build_serializer_config.js
new file mode 100644
index 00000000000..75e2b0f9eba
--- /dev/null
+++ b/app/assets/javascripts/content_editor/services/build_serializer_config.js
@@ -0,0 +1,22 @@
+const buildSerializerConfig = (extensions = []) =>
+ extensions
+ .filter(({ serializer }) => serializer)
+ .reduce(
+ (serializers, { serializer, tiptapExtension: { name, type } }) => {
+ const collection = `${type}s`;
+
+ return {
+ ...serializers,
+ [collection]: {
+ ...serializers[collection],
+ [name]: serializer,
+ },
+ };
+ },
+ {
+ nodes: {},
+ marks: {},
+ },
+ );
+
+export default buildSerializerConfig;
diff --git a/app/assets/javascripts/content_editor/services/create_content_editor.js b/app/assets/javascripts/content_editor/services/create_content_editor.js
index 59ab30218b0..dbb0814e6c6 100644
--- a/app/assets/javascripts/content_editor/services/create_content_editor.js
+++ b/app/assets/javascripts/content_editor/services/create_content_editor.js
@@ -1,53 +1,57 @@
-import Blockquote from '@tiptap/extension-blockquote';
-import Bold from '@tiptap/extension-bold';
-import BulletList from '@tiptap/extension-bullet-list';
-import Code from '@tiptap/extension-code';
-import Document from '@tiptap/extension-document';
-import Dropcursor from '@tiptap/extension-dropcursor';
-import Gapcursor from '@tiptap/extension-gapcursor';
-import HardBreak from '@tiptap/extension-hard-break';
-import Heading from '@tiptap/extension-heading';
-import History from '@tiptap/extension-history';
-import HorizontalRule from '@tiptap/extension-horizontal-rule';
-import Image from '@tiptap/extension-image';
-import Italic from '@tiptap/extension-italic';
-import Link from '@tiptap/extension-link';
-import ListItem from '@tiptap/extension-list-item';
-import OrderedList from '@tiptap/extension-ordered-list';
-import Paragraph from '@tiptap/extension-paragraph';
-import Text from '@tiptap/extension-text';
import { Editor } from '@tiptap/vue-2';
import { isFunction } from 'lodash';
-
import { PROVIDE_SERIALIZER_OR_RENDERER_ERROR } from '../constants';
-import CodeBlockHighlight from '../extensions/code_block_highlight';
+import * as Blockquote from '../extensions/blockquote';
+import * as Bold from '../extensions/bold';
+import * as BulletList from '../extensions/bullet_list';
+import * as Code from '../extensions/code';
+import * as CodeBlockHighlight from '../extensions/code_block_highlight';
+import * as Document from '../extensions/document';
+import * as Dropcursor from '../extensions/dropcursor';
+import * as Gapcursor from '../extensions/gapcursor';
+import * as HardBreak from '../extensions/hard_break';
+import * as Heading from '../extensions/heading';
+import * as History from '../extensions/history';
+import * as HorizontalRule from '../extensions/horizontal_rule';
+import * as Image from '../extensions/image';
+import * as Italic from '../extensions/italic';
+import * as Link from '../extensions/link';
+import * as ListItem from '../extensions/list_item';
+import * as OrderedList from '../extensions/ordered_list';
+import * as Paragraph from '../extensions/paragraph';
+import * as Text from '../extensions/text';
+import buildSerializerConfig from './build_serializer_config';
import { ContentEditor } from './content_editor';
import createMarkdownSerializer from './markdown_serializer';
-const createTiptapEditor = ({ extensions = [], options } = {}) =>
+const builtInContentEditorExtensions = [
+ Blockquote,
+ Bold,
+ BulletList,
+ Code,
+ CodeBlockHighlight,
+ Document,
+ Dropcursor,
+ Gapcursor,
+ HardBreak,
+ Heading,
+ History,
+ HorizontalRule,
+ Image,
+ Italic,
+ Link,
+ ListItem,
+ OrderedList,
+ Paragraph,
+ Text,
+];
+
+const collectTiptapExtensions = (extensions = []) =>
+ extensions.map(({ tiptapExtension }) => tiptapExtension);
+
+const createTiptapEditor = ({ extensions = [], ...options } = {}) =>
new Editor({
- extensions: [
- Dropcursor,
- Gapcursor,
- History,
- Document,
- Text,
- Paragraph,
- Bold,
- Italic,
- Code,
- Link,
- Heading,
- HardBreak,
- Blockquote,
- HorizontalRule,
- BulletList,
- OrderedList,
- ListItem,
- Image.configure({ inline: true }),
- CodeBlockHighlight,
- ...extensions,
- ],
+ extensions: [...extensions],
editorProps: {
attributes: {
class: 'gl-outline-0!',
@@ -61,8 +65,11 @@ export const createContentEditor = ({ renderMarkdown, extensions = [], tiptapOpt
throw new Error(PROVIDE_SERIALIZER_OR_RENDERER_ERROR);
}
- const tiptapEditor = createTiptapEditor({ extensions, options: tiptapOptions });
- const serializer = createMarkdownSerializer({ render: renderMarkdown });
+ const allExtensions = [...builtInContentEditorExtensions, ...extensions];
+ const tiptapExtensions = collectTiptapExtensions(allExtensions);
+ const tiptapEditor = createTiptapEditor({ extensions: tiptapExtensions, ...tiptapOptions });
+ const serializerConfig = buildSerializerConfig(allExtensions);
+ const serializer = createMarkdownSerializer({ render: renderMarkdown, serializerConfig });
return new ContentEditor({ tiptapEditor, serializer });
};
diff --git a/app/assets/javascripts/content_editor/services/markdown_serializer.js b/app/assets/javascripts/content_editor/services/markdown_serializer.js
index ab8f6ce7eb5..f121cc9affd 100644
--- a/app/assets/javascripts/content_editor/services/markdown_serializer.js
+++ b/app/assets/javascripts/content_editor/services/markdown_serializer.js
@@ -1,7 +1,4 @@
-import {
- MarkdownSerializer as ProseMirrorMarkdownSerializer,
- defaultMarkdownSerializer,
-} from 'prosemirror-markdown/src/to_markdown';
+import { MarkdownSerializer as ProseMirrorMarkdownSerializer } from 'prosemirror-markdown/src/to_markdown';
import { DOMParser as ProseMirrorDOMParser } from 'prosemirror-model';
const wrapHtmlPayload = (payload) => `<div>${payload}</div>`;
@@ -18,66 +15,46 @@ const wrapHtmlPayload = (payload) => `<div>${payload}</div>`;
* that parses the Markdown and converts it into HTML.
* @returns a markdown serializer
*/
-const create = ({ render = () => null }) => {
- return {
- /**
- * Converts a Markdown string into a ProseMirror JSONDocument based
- * on a ProseMirror schema.
- * @param {ProseMirror.Schema} params.schema A ProseMirror schema that defines
- * the types of content supported in the document
- * @param {String} params.content An arbitrary markdown string
- * @returns A ProseMirror JSONDocument
- */
- deserialize: async ({ schema, content }) => {
- const html = await render(content);
-
- if (!html) {
- return null;
- }
-
- const parser = new DOMParser();
- const {
- body: { firstElementChild },
- } = parser.parseFromString(wrapHtmlPayload(html), 'text/html');
- const state = ProseMirrorDOMParser.fromSchema(schema).parse(firstElementChild);
-
- return state.toJSON();
- },
-
- /**
- * Converts a ProseMirror JSONDocument based
- * on a ProseMirror schema into Markdown
- * @param {ProseMirror.Schema} params.schema A ProseMirror schema that defines
- * the types of content supported in the document
- * @param {String} params.content A ProseMirror JSONDocument
- * @returns A Markdown string
- */
- serialize: ({ schema, content }) => {
- const document = schema.nodeFromJSON(content);
- const { nodes, marks } = defaultMarkdownSerializer;
-
- const serializer = new ProseMirrorMarkdownSerializer(
- {
- ...defaultMarkdownSerializer.nodes,
- horizontalRule: nodes.horizontal_rule,
- bulletList: nodes.bullet_list,
- listItem: nodes.list_item,
- orderedList: nodes.ordered_list,
- codeBlock: nodes.code_block,
- hardBreak: nodes.hard_break,
- },
- {
- ...defaultMarkdownSerializer.marks,
- bold: marks.strong,
- italic: { open: '_', close: '_', mixable: true, expelEnclosingWhitespace: true },
- },
- );
-
- return serializer.serialize(document, {
- tightLists: true,
- });
- },
- };
-};
-
-export default create;
+export default ({ render = () => null, serializerConfig }) => ({
+ /**
+ * Converts a Markdown string into a ProseMirror JSONDocument based
+ * on a ProseMirror schema.
+ * @param {ProseMirror.Schema} params.schema A ProseMirror schema that defines
+ * the types of content supported in the document
+ * @param {String} params.content An arbitrary markdown string
+ * @returns A ProseMirror JSONDocument
+ */
+ deserialize: async ({ schema, content }) => {
+ const html = await render(content);
+
+ if (!html) {
+ return null;
+ }
+
+ const parser = new DOMParser();
+ const {
+ body: { firstElementChild },
+ } = parser.parseFromString(wrapHtmlPayload(html), 'text/html');
+ const state = ProseMirrorDOMParser.fromSchema(schema).parse(firstElementChild);
+
+ return state.toJSON();
+ },
+
+ /**
+ * Converts a ProseMirror JSONDocument based
+ * on a ProseMirror schema into Markdown
+ * @param {ProseMirror.Schema} params.schema A ProseMirror schema that defines
+ * the types of content supported in the document
+ * @param {String} params.content A ProseMirror JSONDocument
+ * @returns A Markdown string
+ */
+ serialize: ({ schema, content }) => {
+ const proseMirrorDocument = schema.nodeFromJSON(content);
+ const { nodes, marks } = serializerConfig;
+ const serializer = new ProseMirrorMarkdownSerializer(nodes, marks);
+
+ return serializer.serialize(proseMirrorDocument, {
+ tightLists: true,
+ });
+ },
+});
diff --git a/app/assets/javascripts/import_entities/import_groups/components/import_table.vue b/app/assets/javascripts/import_entities/import_groups/components/import_table.vue
index f337520b0db..79b3e689442 100644
--- a/app/assets/javascripts/import_entities/import_groups/components/import_table.vue
+++ b/app/assets/javascripts/import_entities/import_groups/components/import_table.vue
@@ -1,5 +1,6 @@
<script>
import {
+ GlButton,
GlEmptyState,
GlDropdown,
GlDropdownItem,
@@ -8,10 +9,13 @@ import {
GlLoadingIcon,
GlSearchBoxByClick,
GlSprintf,
+ GlSafeHtmlDirective as SafeHtml,
+ GlTooltip,
} from '@gitlab/ui';
-import { s__, __ } from '~/locale';
+import { s__, __, n__ } from '~/locale';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
-import importGroupMutation from '../graphql/mutations/import_group.mutation.graphql';
+import { STATUSES } from '../../constants';
+import importGroupsMutation from '../graphql/mutations/import_groups.mutation.graphql';
import setNewNameMutation from '../graphql/mutations/set_new_name.mutation.graphql';
import setTargetNamespaceMutation from '../graphql/mutations/set_target_namespace.mutation.graphql';
import availableNamespacesQuery from '../graphql/queries/available_namespaces.query.graphql';
@@ -23,6 +27,7 @@ const DEFAULT_PAGE_SIZE = PAGE_SIZES[0];
export default {
components: {
+ GlButton,
GlEmptyState,
GlDropdown,
GlDropdownItem,
@@ -31,9 +36,13 @@ export default {
GlLoadingIcon,
GlSearchBoxByClick,
GlSprintf,
+ GlTooltip,
ImportTableRow,
PaginationLinks,
},
+ directives: {
+ SafeHtml,
+ },
props: {
sourceUrl: {
@@ -65,12 +74,28 @@ export default {
},
computed: {
+ groups() {
+ return this.bulkImportSourceGroups?.nodes ?? [];
+ },
+
+ hasGroupsWithValidationError() {
+ return this.groups.some((g) => g.validation_errors.length);
+ },
+
+ availableGroupsForImport() {
+ return this.groups.filter((g) => g.progress.status === STATUSES.NONE);
+ },
+
+ isImportAllButtonDisabled() {
+ return this.hasGroupsWithValidationError || this.availableGroupsForImport.length === 0;
+ },
+
humanizedTotal() {
return this.paginationInfo.total >= 1000 ? __('1000+') : this.paginationInfo.total;
},
hasGroups() {
- return this.bulkImportSourceGroups?.nodes?.length > 0;
+ return this.groups.length > 0;
},
hasEmptyFilter() {
@@ -105,6 +130,10 @@ export default {
},
methods: {
+ groupsCount(count) {
+ return n__('%d group', '%d groups', count);
+ },
+
setPage(page) {
this.page = page;
},
@@ -123,24 +152,57 @@ export default {
});
},
- importGroup(sourceGroupId) {
+ importGroups(sourceGroupIds) {
this.$apollo.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId },
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds },
});
},
+ importAllGroups() {
+ this.importGroups(this.availableGroupsForImport.map((g) => g.id));
+ },
+
setPageSize(size) {
this.perPage = size;
},
},
+ gitlabLogo: window.gon.gitlab_logo,
PAGE_SIZES,
};
</script>
<template>
<div>
+ <h1
+ class="gl-my-0 gl-py-4 gl-font-size-h1 gl-border-solid gl-border-gray-200 gl-border-0 gl-border-b-1 gl-display-flex"
+ >
+ <img :src="$options.gitlabLogo" class="gl-w-6 gl-h-6 gl-mb-2 gl-display-inline gl-mr-2" />
+ {{ s__('BulkImport|Import groups from GitLab') }}
+ <div ref="importAllButtonWrapper" class="gl-ml-auto">
+ <gl-button
+ v-if="!$apollo.loading && hasGroups"
+ :disabled="isImportAllButtonDisabled"
+ variant="confirm"
+ @click="importAllGroups"
+ >
+ <gl-sprintf :message="s__('BulkImport|Import %{groups}')">
+ <template #groups>
+ {{ groupsCount(availableGroupsForImport.length) }}
+ </template>
+ </gl-sprintf>
+ </gl-button>
+ </div>
+ <gl-tooltip v-if="isImportAllButtonDisabled" :target="() => $refs.importAllButtonWrapper">
+ <template v-if="hasGroupsWithValidationError">
+ {{ s__('BulkImport|One or more groups has validation errors') }}
+ </template>
+ <template v-else>
+ {{ s__('BulkImport|No groups on this page are available for import') }}
+ </template>
+ </gl-tooltip>
+ </h1>
<div
class="gl-py-5 gl-border-solid gl-border-gray-200 gl-border-0 gl-border-b-1 gl-display-flex"
>
@@ -153,7 +215,7 @@ export default {
<strong>{{ paginationInfo.end }}</strong>
</template>
<template #total>
- <strong>{{ n__('%d group', '%d groups', paginationInfo.total) }}</strong>
+ <strong>{{ groupsCount(paginationInfo.total) }}</strong>
</template>
<template #filter>
<strong>{{ filter }}</strong>
@@ -196,7 +258,7 @@ export default {
:group-path-regex="groupPathRegex"
@update-target-namespace="updateTargetNamespace(group.id, $event)"
@update-new-name="updateNewName(group.id, $event)"
- @import-group="importGroup(group.id)"
+ @import-group="importGroups([group.id])"
/>
</template>
</tbody>
diff --git a/app/assets/javascripts/import_entities/import_groups/components/import_table_row.vue b/app/assets/javascripts/import_entities/import_groups/components/import_table_row.vue
index 1d36b370457..51333f810a6 100644
--- a/app/assets/javascripts/import_entities/import_groups/components/import_table_row.vue
+++ b/app/assets/javascripts/import_entities/import_groups/components/import_table_row.vue
@@ -10,8 +10,11 @@ import {
GlFormInput,
} from '@gitlab/ui';
import { joinPaths } from '~/lib/utils/url_utility';
+import { s__ } from '~/locale';
import ImportStatus from '../../components/import_status.vue';
import { STATUSES } from '../../constants';
+import addValidationErrorMutation from '../graphql/mutations/add_validation_error.mutation.graphql';
+import removeValidationErrorMutation from '../graphql/mutations/remove_validation_error.mutation.graphql';
import groupQuery from '../graphql/queries/group.query.graphql';
const DEBOUNCE_INTERVAL = 300;
@@ -52,6 +55,27 @@ export default {
fullPath: this.fullPath,
};
},
+ update({ existingGroup }) {
+ const variables = {
+ field: 'new_name',
+ sourceGroupId: this.group.id,
+ };
+
+ if (!existingGroup) {
+ this.$apollo.mutate({
+ mutation: removeValidationErrorMutation,
+ variables,
+ });
+ } else {
+ this.$apollo.mutate({
+ mutation: addValidationErrorMutation,
+ variables: {
+ ...variables,
+ message: s__('BulkImport|Name already exists.'),
+ },
+ });
+ }
+ },
skip() {
return !this.isNameValid || this.isAlreadyImported;
},
@@ -63,8 +87,12 @@ export default {
return this.group.import_target;
},
+ invalidNameValidationMessage() {
+ return this.group.validation_errors.find(({ field }) => field === 'new_name')?.message;
+ },
+
isInvalid() {
- return Boolean(!this.isNameValid || this.existingGroup);
+ return Boolean(!this.isNameValid || this.invalidNameValidationMessage);
},
isNameValid() {
@@ -157,21 +185,21 @@ export default {
<template v-if="!isNameValid">
{{ __('Please choose a group URL with no special characters.') }}
</template>
- <template v-else-if="existingGroup">
- {{ s__('BulkImport|Name already exists.') }}
+ <template v-else-if="invalidNameValidationMessage">
+ {{ invalidNameValidationMessage }}
</template>
</p>
</div>
</div>
</td>
<td class="gl-p-4 gl-white-space-nowrap">
- <import-status :status="group.progress.status" />
+ <import-status :status="group.progress.status" class="gl-mt-2" />
</td>
<td class="gl-p-4">
<gl-button
v-if="!isAlreadyImported"
:disabled="isInvalid"
- variant="success"
+ variant="confirm"
category="secondary"
@click="$emit('import-group')"
>{{ __('Import') }}</gl-button
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js b/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js
index facefe316eb..2cde3781a6a 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/client_factory.js
@@ -20,6 +20,7 @@ export const clientTypenames = {
BulkImportPageInfo: 'ClientBulkImportPageInfo',
BulkImportTarget: 'ClientBulkImportTarget',
BulkImportProgress: 'ClientBulkImportProgress',
+ BulkImportValidationError: 'ClientBulkImportValidationError',
};
function makeGroup(data) {
@@ -106,6 +107,7 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
return makeGroup({
...group,
+ validation_errors: [],
progress: {
id: jobId ?? localProgressId(group.id),
status: cachedImportState?.status ?? STATUSES.NONE,
@@ -152,7 +154,7 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
async setImportProgress(_, { sourceGroupId, status, jobId }) {
if (jobId) {
- groupsManager.saveImportState(jobId, { status });
+ groupsManager.updateImportProgress(jobId, status);
}
return makeGroup({
@@ -165,7 +167,7 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
},
async updateImportStatus(_, { id, status }) {
- groupsManager.saveImportState(id, { status });
+ groupsManager.updateImportProgress(id, status);
return {
__typename: clientTypenames.BulkImportProgress,
@@ -174,39 +176,81 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
};
},
- async importGroup(_, { sourceGroupId }, { client }) {
+ async addValidationError(_, { sourceGroupId, field, message }, { client }) {
const {
- data: { bulkImportSourceGroup: group },
+ data: {
+ bulkImportSourceGroup: { validation_errors: validationErrors, ...group },
+ },
} = await client.query({
query: bulkImportSourceGroupQuery,
variables: { id: sourceGroupId },
});
- const GROUP_BEING_SCHEDULED = makeGroup({
- id: sourceGroupId,
- progress: {
- id: localProgressId(sourceGroupId),
- status: STATUSES.SCHEDULING,
+ return {
+ ...group,
+ validation_errors: [
+ ...validationErrors.filter(({ field: f }) => f !== field),
+ {
+ __typename: clientTypenames.BulkImportValidationError,
+ field,
+ message,
+ },
+ ],
+ };
+ },
+
+ async removeValidationError(_, { sourceGroupId, field }, { client }) {
+ const {
+ data: {
+ bulkImportSourceGroup: { validation_errors: validationErrors, ...group },
},
+ } = await client.query({
+ query: bulkImportSourceGroupQuery,
+ variables: { id: sourceGroupId },
});
+ return {
+ ...group,
+ validation_errors: validationErrors.filter(({ field: f }) => f !== field),
+ };
+ },
+
+ async importGroups(_, { sourceGroupIds }, { client }) {
+ const groups = await Promise.all(
+ sourceGroupIds.map((id) =>
+ client
+ .query({
+ query: bulkImportSourceGroupQuery,
+ variables: { id },
+ })
+ .then(({ data }) => data.bulkImportSourceGroup),
+ ),
+ );
+
+ const GROUPS_BEING_SCHEDULED = sourceGroupIds.map((sourceGroupId) =>
+ makeGroup({
+ id: sourceGroupId,
+ progress: {
+ id: localProgressId(sourceGroupId),
+ status: STATUSES.SCHEDULING,
+ },
+ }),
+ );
+
const defaultErrorMessage = s__('BulkImport|Importing the group failed');
axios
.post(endpoints.createBulkImport, {
- bulk_import: [
- {
- source_type: 'group_entity',
- source_full_path: group.full_path,
- destination_namespace: group.import_target.target_namespace,
- destination_name: group.import_target.new_name,
- },
- ],
+ bulk_import: groups.map((group) => ({
+ source_type: 'group_entity',
+ source_full_path: group.full_path,
+ destination_namespace: group.import_target.target_namespace,
+ destination_name: group.import_target.new_name,
+ })),
})
.then(({ data: { id: jobId } }) => {
- groupsManager.saveImportState(jobId, {
- id: group.id,
- importTarget: group.import_target,
+ groupsManager.createImportState(jobId, {
status: STATUSES.CREATED,
+ groups,
});
return { status: STATUSES.CREATED, jobId };
@@ -217,14 +261,16 @@ export function createResolvers({ endpoints, sourceUrl, GroupsManager = SourceGr
return { status: STATUSES.NONE };
})
.then((newStatus) =>
- client.mutate({
- mutation: setImportProgressMutation,
- variables: { sourceGroupId, ...newStatus },
- }),
+ sourceGroupIds.forEach((sourceGroupId) =>
+ client.mutate({
+ mutation: setImportProgressMutation,
+ variables: { sourceGroupId, ...newStatus },
+ }),
+ ),
)
.catch(() => createFlash({ message: defaultErrorMessage }));
- return GROUP_BEING_SCHEDULED;
+ return GROUPS_BEING_SCHEDULED;
},
},
};
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql
index ee3add7966c..47675cd1bd0 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql
@@ -12,4 +12,8 @@ fragment BulkImportSourceGroupItem on ClientBulkImportSourceGroup {
target_namespace
new_name
}
+ validation_errors {
+ field
+ message
+ }
}
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/mutations/add_validation_error.mutation.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/add_validation_error.mutation.graphql
new file mode 100644
index 00000000000..d95c460c046
--- /dev/null
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/add_validation_error.mutation.graphql
@@ -0,0 +1,9 @@
+mutation addValidationError($sourceGroupId: String!, $field: String!, $message: String!) {
+ addValidationError(sourceGroupId: $sourceGroupId, field: $field, message: $message) @client {
+ id
+ validation_errors {
+ field
+ message
+ }
+ }
+}
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql
deleted file mode 100644
index 41d32a1d639..00000000000
--- a/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql
+++ /dev/null
@@ -1,9 +0,0 @@
-mutation importGroup($sourceGroupId: String!) {
- importGroup(sourceGroupId: $sourceGroupId) @client {
- id
- progress {
- id
- status
- }
- }
-}
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql
new file mode 100644
index 00000000000..d8e46329e38
--- /dev/null
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql
@@ -0,0 +1,9 @@
+mutation importGroups($sourceGroupIds: [String!]!) {
+ importGroups(sourceGroupIds: $sourceGroupIds) @client {
+ id
+ progress {
+ id
+ status
+ }
+ }
+}
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/mutations/remove_validation_error.mutation.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/remove_validation_error.mutation.graphql
new file mode 100644
index 00000000000..940bf4dfaac
--- /dev/null
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/mutations/remove_validation_error.mutation.graphql
@@ -0,0 +1,9 @@
+mutation removeValidationError($sourceGroupId: String!, $field: String!) {
+ removeValidationError(sourceGroupId: $sourceGroupId, field: $field) @client {
+ id
+ validation_errors {
+ field
+ message
+ }
+ }
+}
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/services/source_groups_manager.js b/app/assets/javascripts/import_entities/import_groups/graphql/services/source_groups_manager.js
index 536f96529d7..97dbdbf518a 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/services/source_groups_manager.js
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/services/source_groups_manager.js
@@ -13,25 +13,42 @@ export class SourceGroupsManager {
loadImportStatesFromStorage() {
try {
- return JSON.parse(this.storage.getItem(KEY)) ?? {};
+ return Object.fromEntries(
+ Object.entries(JSON.parse(this.storage.getItem(KEY)) ?? {}).map(([jobId, config]) => {
+ // new format of storage
+ if (config.groups) {
+ return [jobId, config];
+ }
+
+ return [
+ jobId,
+ {
+ status: config.status,
+ groups: [{ id: config.id, importTarget: config.importTarget }],
+ },
+ ];
+ }),
+ );
} catch {
return {};
}
}
- saveImportState(importId, group) {
- const key = this.getStorageKey(importId);
- const oldState = this.importStates[key] ?? {};
+ createImportState(importId, jobConfig) {
+ this.importStates[this.getStorageKey(importId)] = {
+ status: jobConfig.status,
+ groups: jobConfig.groups.map((g) => ({ importTarget: g.import_target, id: g.id })),
+ };
+ this.saveImportStatesToStorage();
+ }
- if (!oldState.id && !group.id) {
+ updateImportProgress(importId, status) {
+ const currentState = this.importStates[this.getStorageKey(importId)];
+ if (!currentState) {
return;
}
- this.importStates[key] = {
- ...oldState,
- ...group,
- status: group.status,
- };
+ currentState.status = status;
this.saveImportStatesToStorage();
}
@@ -39,10 +56,15 @@ export class SourceGroupsManager {
const PREFIX = this.getStorageKey('');
const [jobId, importState] =
Object.entries(this.importStates).find(
- ([key, group]) => key.startsWith(PREFIX) && group.id === groupId,
+ ([key, state]) => key.startsWith(PREFIX) && state.groups.some((g) => g.id === groupId),
) ?? [];
- return { jobId, importState };
+ if (!jobId) {
+ return null;
+ }
+
+ const group = importState.groups.find((g) => g.id === groupId);
+ return { jobId, importState: { ...group, status: importState.status } };
}
getStorageKey(importId) {
diff --git a/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql b/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql
index ba042d33893..c830aaa75e6 100644
--- a/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql
+++ b/app/assets/javascripts/import_entities/import_groups/graphql/typedefs.graphql
@@ -18,6 +18,11 @@ type ClientBulkImportProgress {
status: String!
}
+type ClientBulkImportValidationError {
+ field: String!
+ message: String!
+}
+
type ClientBulkImportSourceGroup {
id: ID!
web_url: String!
@@ -25,6 +30,7 @@ type ClientBulkImportSourceGroup {
full_name: String!
progress: ClientBulkImportProgress!
import_target: ClientBulkImportTarget!
+ validation_errors: [ClientBulkImportValidationError!]!
}
type ClientBulkImportPageInfo {
@@ -45,9 +51,15 @@ extend type Query {
}
extend type Mutation {
- setNewName(newName: String, sourceGroupId: ID!): ClientTargetNamespace!
- setTargetNamespace(targetNamespace: String, sourceGroupId: ID!): ClientTargetNamespace!
- importGroup(id: ID!): ClientBulkImportSourceGroup!
+ setNewName(newName: String, sourceGroupId: ID!): ClientBulkImportSourceGroup!
+ setTargetNamespace(targetNamespace: String, sourceGroupId: ID!): ClientBulkImportSourceGroup!
+ importGroups(sourceGroupIds: [ID!]!): [ClientBulkImportSourceGroup!]!
setImportProgress(id: ID, status: String!): ClientBulkImportSourceGroup!
updateImportProgress(id: ID, status: String!): ClientBulkImportProgress
+ addValidationError(
+ sourceGroupId: ID!
+ field: String!
+ message: String!
+ ): ClientBulkImportSourceGroup!
+ removeValidationError(sourceGroupId: ID!, field: String!): ClientBulkImportSourceGroup!
}
diff --git a/app/assets/javascripts/issue_show/components/form.vue b/app/assets/javascripts/issue_show/components/form.vue
index 7ea8ef6dfcd..b37a911a669 100644
--- a/app/assets/javascripts/issue_show/components/form.vue
+++ b/app/assets/javascripts/issue_show/components/form.vue
@@ -168,8 +168,8 @@ export default {
v-if="showOutdatedDescriptionWarning"
class="gl-mb-5"
variant="warning"
- primary-button-text="__('Keep')"
- secondary-button-text="__('Discard')"
+ :primary-button-text="__('Keep')"
+ :secondary-button-text="__('Discard')"
:dismissible="false"
@primaryAction="keepAutosave"
@secondaryAction="discardAutosave"
diff --git a/app/assets/javascripts/issues_list/components/issues_list_app.vue b/app/assets/javascripts/issues_list/components/issues_list_app.vue
index 372c15be931..142438bec21 100644
--- a/app/assets/javascripts/issues_list/components/issues_list_app.vue
+++ b/app/assets/javascripts/issues_list/components/issues_list_app.vue
@@ -34,6 +34,7 @@ import {
import axios from '~/lib/utils/axios_utils';
import { convertObjectPropsToCamelCase, getParameterByName } from '~/lib/utils/common_utils';
import { __ } from '~/locale';
+import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
import IterationToken from '~/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue';
@@ -186,7 +187,7 @@ export default {
token: AuthorToken,
dataType: 'user',
unique: true,
- defaultAuthors: [],
+ defaultAuthors: DEFAULT_NONE_ANY,
fetchAuthors: this.fetchUsers,
},
{
@@ -213,7 +214,6 @@ export default {
token: EmojiToken,
unique: true,
operators: [{ value: '=', description: __('is') }],
- defaultEmojis: [],
fetchEmojis: this.fetchEmojis,
},
{
@@ -237,7 +237,6 @@ export default {
icon: 'iteration',
token: IterationToken,
unique: true,
- defaultIterations: [],
fetchIterations: this.fetchIterations,
});
}
diff --git a/app/assets/javascripts/issues_list/constants.js b/app/assets/javascripts/issues_list/constants.js
index ee515079668..3b01d0df523 100644
--- a/app/assets/javascripts/issues_list/constants.js
+++ b/app/assets/javascripts/issues_list/constants.js
@@ -1,4 +1,9 @@
import { __, s__ } from '~/locale';
+import {
+ FILTER_ANY,
+ FILTER_CURRENT,
+ FILTER_NONE,
+} from '~/vue_shared/components/filtered_search_bar/constants';
// Maps sort order as it appears in the URL query to API `order_by` and `sort` params.
const PRIORITY = 'priority';
@@ -194,81 +199,149 @@ export const FILTERED_SEARCH_TERM = 'filtered-search-term';
export const OPERATOR_IS = '=';
export const OPERATOR_IS_NOT = '!=';
+export const NORMAL_FILTER = 'normalFilter';
+export const SPECIAL_FILTER = 'specialFilter';
+export const SPECIAL_FILTER_VALUES = [FILTER_NONE, FILTER_ANY, FILTER_CURRENT];
+
export const filters = {
author_username: {
apiParam: {
- [OPERATOR_IS]: 'author_username',
- [OPERATOR_IS_NOT]: 'not[author_username]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'author_username',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[author_username]',
+ },
},
urlParam: {
- [OPERATOR_IS]: 'author_username',
- [OPERATOR_IS_NOT]: 'not[author_username]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'author_username',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[author_username]',
+ },
},
},
assignee_username: {
apiParam: {
- [OPERATOR_IS]: 'assignee_username',
- [OPERATOR_IS_NOT]: 'not[assignee_username]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'assignee_username',
+ [SPECIAL_FILTER]: 'assignee_id',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[assignee_username]',
+ },
},
urlParam: {
- [OPERATOR_IS]: 'assignee_username[]',
- [OPERATOR_IS_NOT]: 'not[assignee_username][]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'assignee_username[]',
+ [SPECIAL_FILTER]: 'assignee_id',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[assignee_username][]',
+ },
},
},
milestone: {
apiParam: {
- [OPERATOR_IS]: 'milestone',
- [OPERATOR_IS_NOT]: 'not[milestone]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'milestone',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[milestone]',
+ },
},
urlParam: {
- [OPERATOR_IS]: 'milestone_title',
- [OPERATOR_IS_NOT]: 'not[milestone_title]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'milestone_title',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[milestone_title]',
+ },
},
},
labels: {
apiParam: {
- [OPERATOR_IS]: 'labels',
- [OPERATOR_IS_NOT]: 'not[labels]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'labels',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[labels]',
+ },
},
urlParam: {
- [OPERATOR_IS]: 'label_name[]',
- [OPERATOR_IS_NOT]: 'not[label_name][]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'label_name[]',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[label_name][]',
+ },
},
},
my_reaction_emoji: {
apiParam: {
- [OPERATOR_IS]: 'my_reaction_emoji',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'my_reaction_emoji',
+ [SPECIAL_FILTER]: 'my_reaction_emoji',
+ },
},
urlParam: {
- [OPERATOR_IS]: 'my_reaction_emoji',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'my_reaction_emoji',
+ [SPECIAL_FILTER]: 'my_reaction_emoji',
+ },
},
},
confidential: {
apiParam: {
- [OPERATOR_IS]: 'confidential',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'confidential',
+ },
},
urlParam: {
- [OPERATOR_IS]: 'confidential',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'confidential',
+ },
},
},
iteration: {
apiParam: {
- [OPERATOR_IS]: 'iteration_title',
- [OPERATOR_IS_NOT]: 'not[iteration_title]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'iteration_title',
+ [SPECIAL_FILTER]: 'iteration_id',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[iteration_title]',
+ },
},
urlParam: {
- [OPERATOR_IS]: 'iteration_title',
- [OPERATOR_IS_NOT]: 'not[iteration_title]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'iteration_title',
+ [SPECIAL_FILTER]: 'iteration_id',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[iteration_title]',
+ },
},
},
weight: {
apiParam: {
- [OPERATOR_IS]: 'weight',
- [OPERATOR_IS_NOT]: 'not[weight]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'weight',
+ [SPECIAL_FILTER]: 'weight',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[weight]',
+ },
},
urlParam: {
- [OPERATOR_IS]: 'weight',
- [OPERATOR_IS_NOT]: 'not[weight]',
+ [OPERATOR_IS]: {
+ [NORMAL_FILTER]: 'weight',
+ [SPECIAL_FILTER]: 'weight',
+ },
+ [OPERATOR_IS_NOT]: {
+ [NORMAL_FILTER]: 'not[weight]',
+ },
},
},
};
diff --git a/app/assets/javascripts/issues_list/utils.js b/app/assets/javascripts/issues_list/utils.js
index 30d122e7bbe..f39f34894aa 100644
--- a/app/assets/javascripts/issues_list/utils.js
+++ b/app/assets/javascripts/issues_list/utils.js
@@ -11,12 +11,15 @@ import {
LABEL_PRIORITY_DESC,
MILESTONE_DUE_ASC,
MILESTONE_DUE_DESC,
+ NORMAL_FILTER,
POPULARITY_ASC,
POPULARITY_DESC,
PRIORITY_ASC,
PRIORITY_DESC,
RELATIVE_POSITION_ASC,
sortParams,
+ SPECIAL_FILTER,
+ SPECIAL_FILTER_VALUES,
UPDATED_ASC,
UPDATED_DESC,
WEIGHT_ASC,
@@ -124,13 +127,18 @@ export const getSortOptions = (hasIssueWeightsFeature, hasBlockedIssuesFeature)
const tokenTypes = Object.keys(filters);
-const urlParamKeys = tokenTypes.flatMap((key) => Object.values(filters[key].urlParam));
+const getUrlParams = (tokenType) =>
+ Object.values(filters[tokenType].urlParam).flatMap((filterObj) => Object.values(filterObj));
+
+const urlParamKeys = tokenTypes.flatMap(getUrlParams);
const getTokenTypeFromUrlParamKey = (urlParamKey) =>
- tokenTypes.find((key) => Object.values(filters[key].urlParam).includes(urlParamKey));
+ tokenTypes.find((tokenType) => getUrlParams(tokenType).includes(urlParamKey));
const getOperatorFromUrlParamKey = (tokenType, urlParamKey) =>
- Object.entries(filters[tokenType].urlParam).find(([, urlParam]) => urlParam === urlParamKey)[0];
+ Object.entries(filters[tokenType].urlParam).find(([, filterObj]) =>
+ Object.values(filterObj).includes(urlParamKey),
+ )[0];
const convertToFilteredTokens = (locationSearch) =>
Array.from(new URLSearchParams(locationSearch).entries())
@@ -164,11 +172,15 @@ export const getFilterTokens = (locationSearch) => {
return filterTokens.concat(searchTokens);
};
+const getFilterType = (data) =>
+ SPECIAL_FILTER_VALUES.includes(data) ? SPECIAL_FILTER : NORMAL_FILTER;
+
export const convertToApiParams = (filterTokens) =>
filterTokens
.filter((token) => token.type !== FILTERED_SEARCH_TERM)
.reduce((acc, token) => {
- const apiParam = filters[token.type].apiParam[token.value.operator];
+ const filterType = getFilterType(token.value.data);
+ const apiParam = filters[token.type].apiParam[token.value.operator][filterType];
return Object.assign(acc, {
[apiParam]: acc[apiParam] ? `${acc[apiParam]},${token.value.data}` : token.value.data,
});
@@ -178,7 +190,8 @@ export const convertToUrlParams = (filterTokens) =>
filterTokens
.filter((token) => token.type !== FILTERED_SEARCH_TERM)
.reduce((acc, token) => {
- const urlParam = filters[token.type].urlParam[token.value.operator];
+ const filterType = getFilterType(token.value.data);
+ const urlParam = filters[token.type].urlParam[token.value.operator]?.[filterType];
return Object.assign(acc, {
[urlParam]: acc[urlParam] ? acc[urlParam].concat(token.value.data) : [token.value.data],
});
diff --git a/app/assets/javascripts/pipelines/components/pipelines_list/pipelines_artifacts.vue b/app/assets/javascripts/pipelines/components/pipelines_list/pipelines_artifacts.vue
index 9c3990f82df..147fff52101 100644
--- a/app/assets/javascripts/pipelines/components/pipelines_list/pipelines_artifacts.vue
+++ b/app/assets/javascripts/pipelines/components/pipelines_list/pipelines_artifacts.vue
@@ -1,40 +1,107 @@
<script>
-import { GlDropdown, GlDropdownItem, GlSprintf, GlTooltipDirective } from '@gitlab/ui';
-import { __ } from '~/locale';
+import {
+ GlAlert,
+ GlDropdown,
+ GlDropdownItem,
+ GlLoadingIcon,
+ GlSprintf,
+ GlTooltipDirective,
+} from '@gitlab/ui';
+import axios from '~/lib/utils/axios_utils';
+import { __, s__ } from '~/locale';
+
+export const i18n = {
+ artifacts: __('Artifacts'),
+ downloadArtifact: __('Download %{name} artifact'),
+ artifactSectionHeader: __('Download artifacts'),
+ artifactsFetchErrorMessage: s__('Pipelines|Could not load artifacts.'),
+ noArtifacts: s__('Pipelines|No artifacts available'),
+};
export default {
+ i18n,
directives: {
GlTooltip: GlTooltipDirective,
},
components: {
+ GlAlert,
GlDropdown,
GlDropdownItem,
+ GlLoadingIcon,
GlSprintf,
},
- translations: {
- artifacts: __('Artifacts'),
- downloadArtifact: __('Download %{name} artifact'),
+ inject: {
+ artifactsEndpoint: {
+ default: '',
+ },
+ artifactsEndpointPlaceholder: {
+ default: '',
+ },
},
props: {
- artifacts: {
- type: Array,
+ pipelineId: {
+ type: Number,
required: true,
},
},
+ data() {
+ return {
+ artifacts: [],
+ hasError: false,
+ isLoading: false,
+ };
+ },
+ computed: {
+ hasArtifacts() {
+ return Boolean(this.artifacts.length);
+ },
+ },
+ methods: {
+ fetchArtifacts() {
+ this.isLoading = true;
+ // Replace the placeholder with the ID of the pipeline we are viewing
+ const endpoint = this.artifactsEndpoint.replace(
+ this.artifactsEndpointPlaceholder,
+ this.pipelineId,
+ );
+ return axios
+ .get(endpoint)
+ .then(({ data }) => {
+ this.artifacts = data.artifacts;
+ })
+ .catch(() => {
+ this.hasError = true;
+ })
+ .finally(() => {
+ this.isLoading = false;
+ });
+ },
+ },
};
</script>
<template>
<gl-dropdown
v-gl-tooltip
class="build-artifacts js-pipeline-dropdown-download"
- :title="$options.translations.artifacts"
- :text="$options.translations.artifacts"
- :aria-label="$options.translations.artifacts"
+ :title="$options.i18n.artifacts"
+ :text="$options.i18n.artifacts"
+ :aria-label="$options.i18n.artifacts"
icon="download"
right
lazy
text-sr-only
+ @show.once="fetchArtifacts"
>
+ <gl-alert v-if="hasError" variant="danger" :dismissible="false">
+ {{ $options.i18n.artifactsFetchErrorMessage }}
+ </gl-alert>
+
+ <gl-loading-icon v-if="isLoading" />
+
+ <gl-alert v-else-if="!hasArtifacts" variant="info" :dismissible="false">
+ {{ $options.i18n.noArtifacts }}
+ </gl-alert>
+
<gl-dropdown-item
v-for="(artifact, i) in artifacts"
:key="i"
@@ -42,7 +109,7 @@ export default {
rel="nofollow"
download
>
- <gl-sprintf :message="$options.translations.downloadArtifact">
+ <gl-sprintf :message="$options.i18n.downloadArtifact">
<template #name>{{ artifact.name }}</template>
</gl-sprintf>
</gl-dropdown-item>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline.vue
index 1248a891ed9..fa46b4b1364 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_pipeline.vue
@@ -107,9 +107,6 @@ export default {
hasCommitInfo() {
return this.pipeline.commit && Object.keys(this.pipeline.commit).length > 0;
},
- hasArtifacts() {
- return this.pipeline?.details?.artifacts?.length > 0;
- },
isMergeRequestPipeline() {
return Boolean(this.pipeline.flags && this.pipeline.flags.merge_request_pipeline);
},
@@ -288,11 +285,7 @@ export default {
/>
</span>
<linked-pipelines-mini-list v-if="triggered.length" :triggered="triggered" />
- <pipeline-artifacts
- v-if="hasArtifacts"
- :artifacts="pipeline.details.artifacts"
- class="gl-ml-3"
- />
+ <pipeline-artifacts :pipeline-id="pipeline.id" class="gl-ml-3" />
</span>
</div>
</div>
diff --git a/app/assets/javascripts/vue_merge_request_widget/index.js b/app/assets/javascripts/vue_merge_request_widget/index.js
index c1c491f6fe0..3a3a1329483 100644
--- a/app/assets/javascripts/vue_merge_request_widget/index.js
+++ b/app/assets/javascripts/vue_merge_request_widget/index.js
@@ -32,6 +32,10 @@ export default () => {
const vm = new Vue({
el: '#js-vue-mr-widget',
+ provide: {
+ artifactsEndpoint: gl.mrWidgetData.artifacts_endpoint,
+ artifactsEndpointPlaceholder: gl.mrWidgetData.artifacts_endpoint_placeholder,
+ },
...MrWidgetOptions,
apolloProvider,
});
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js b/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js
index e2868879425..519b461c015 100644
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js
+++ b/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js
@@ -3,21 +3,24 @@ import { __ } from '~/locale';
export const DEBOUNCE_DELAY = 200;
-const DEFAULT_LABEL_NO_LABEL = { value: 'No label', text: __('No label') };
-export const DEFAULT_LABEL_NONE = { value: 'None', text: __('None') };
-export const DEFAULT_LABEL_ANY = { value: 'Any', text: __('Any') };
-export const DEFAULT_LABEL_CURRENT = { value: 'Current', text: __('Current') };
+export const FILTER_NONE = 'None';
+export const FILTER_ANY = 'Any';
+export const FILTER_CURRENT = 'Current';
-export const DEFAULT_ITERATIONS = [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY, DEFAULT_LABEL_CURRENT];
+export const DEFAULT_LABEL_NONE = { value: FILTER_NONE, text: __(FILTER_NONE) };
+export const DEFAULT_LABEL_ANY = { value: FILTER_ANY, text: __(FILTER_ANY) };
+export const DEFAULT_NONE_ANY = [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY];
-export const DEFAULT_LABELS = [DEFAULT_LABEL_NO_LABEL];
+export const DEFAULT_ITERATIONS = DEFAULT_NONE_ANY.concat([
+ { value: FILTER_CURRENT, text: __(FILTER_CURRENT) },
+]);
-export const DEFAULT_MILESTONES = [
- DEFAULT_LABEL_NONE,
- DEFAULT_LABEL_ANY,
+export const DEFAULT_LABELS = [{ value: 'No label', text: __('No label') }];
+
+export const DEFAULT_MILESTONES = DEFAULT_NONE_ANY.concat([
{ value: 'Upcoming', text: __('Upcoming') },
{ value: 'Started', text: __('Started') },
-];
+]);
export const SortDirection = {
descending: 'descending',
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue
index 269e29a6dff..f2f4787d80b 100644
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue
+++ b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue
@@ -10,7 +10,7 @@ import { debounce } from 'lodash';
import { deprecatedCreateFlash as createFlash } from '~/flash';
import { __ } from '~/locale';
-import { DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY, DEBOUNCE_DELAY } from '../constants';
+import { DEBOUNCE_DELAY, DEFAULT_NONE_ANY } from '../constants';
import { stripQuotes } from '../filtered_search_utils';
export default {
@@ -33,7 +33,7 @@ export default {
data() {
return {
emojis: this.config.initialEmojis || [],
- defaultEmojis: this.config.defaultEmojis || [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY],
+ defaultEmojis: this.config.defaultEmojis || DEFAULT_NONE_ANY,
loading: true,
};
},
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/weight_token.vue b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/weight_token.vue
index cfad79b9afa..72116f0e991 100644
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/weight_token.vue
+++ b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/weight_token.vue
@@ -1,6 +1,6 @@
<script>
import { GlDropdownDivider, GlFilteredSearchSuggestion, GlFilteredSearchToken } from '@gitlab/ui';
-import { DEFAULT_LABEL_ANY, DEFAULT_LABEL_NONE } from '../constants';
+import { DEFAULT_NONE_ANY } from '../constants';
export default {
baseWeights: ['0', '1', '2', '3', '4', '5'],
@@ -22,7 +22,7 @@ export default {
data() {
return {
weights: this.$options.baseWeights,
- defaultWeights: this.config.defaultWeights || [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY],
+ defaultWeights: this.config.defaultWeights || DEFAULT_NONE_ANY,
};
},
methods: {
diff --git a/app/assets/javascripts/vue_shared/components/user_select/user_select.vue b/app/assets/javascripts/vue_shared/components/user_select/user_select.vue
index c570ea09da3..3116d2fbf32 100644
--- a/app/assets/javascripts/vue_shared/components/user_select/user_select.vue
+++ b/app/assets/javascripts/vue_shared/components/user_select/user_select.vue
@@ -108,10 +108,12 @@ export default {
error({ graphQLErrors }) {
// TODO This error suppression is temporary (BE fix required)
// https://gitlab.com/gitlab-org/gitlab/-/issues/329750
- if (
- graphQLErrors.length === 1 &&
- graphQLErrors[0]?.message === 'Cannot return null for non-nullable field GroupMember.user'
- ) {
+ const isNullError = ({ message }) => {
+ return message === 'Cannot return null for non-nullable field GroupMember.user';
+ };
+
+ if (graphQLErrors?.length > 0 && graphQLErrors.every(isNullError)) {
+ // only null-related errors exist, suppress them.
// eslint-disable-next-line no-console
console.error(
"Suppressing the error 'Cannot return null for non-nullable field GroupMember.user'. Please see https://gitlab.com/gitlab-org/gitlab/-/issues/329750",
diff --git a/app/graphql/types/repository/blob_type.rb b/app/graphql/types/repository/blob_type.rb
index 508389715f2..004bceea154 100644
--- a/app/graphql/types/repository/blob_type.rb
+++ b/app/graphql/types/repository/blob_type.rb
@@ -67,6 +67,11 @@ module Types
description: 'Blob content rich viewer.',
null: true
+ field :plain_data, GraphQL::STRING_TYPE,
+ description: 'Blob plain highlighted data.',
+ null: true,
+ calls_gitaly: true
+
def raw_text_blob
object.data unless object.binary?
end
diff --git a/app/helpers/projects_helper.rb b/app/helpers/projects_helper.rb
index 2f496b99bcf..45130dfc76d 100644
--- a/app/helpers/projects_helper.rb
+++ b/app/helpers/projects_helper.rb
@@ -285,10 +285,6 @@ module ProjectsHelper
!disabled && !compact_mode
end
- def settings_operations_available?
- !@project.archived? && can?(current_user, :admin_operations, @project)
- end
-
def error_tracking_setting_project_json
setting = @project.error_tracking_setting
@@ -666,26 +662,6 @@ module ProjectsHelper
"#{request.path}?#{options.to_param}"
end
- def sidebar_settings_paths
- %w[
- projects#edit
- integrations#show
- services#edit
- hooks#index
- hooks#edit
- access_tokens#index
- hook_logs#show
- repository#show
- ci_cd#show
- operations#show
- badges#index
- pages#show
- packages_and_registries#show
- projects/runners#show
- projects/runners#edit
- ]
- end
-
def sidebar_operations_paths
%w[
environments
diff --git a/app/models/namespace/traversal_hierarchy.rb b/app/models/namespace/traversal_hierarchy.rb
index adb31c19a4c..093b7dae246 100644
--- a/app/models/namespace/traversal_hierarchy.rb
+++ b/app/models/namespace/traversal_hierarchy.rb
@@ -34,20 +34,23 @@ class Namespace
sql = """
UPDATE namespaces
SET traversal_ids = cte.traversal_ids
- FROM (#{recursive_traversal_ids(lock: true)}) as cte
+ FROM (#{recursive_traversal_ids}) as cte
WHERE namespaces.id = cte.id
AND namespaces.traversal_ids <> cte.traversal_ids
"""
- Namespace.connection.exec_query(sql)
+ Namespace.transaction do
+ @root.lock!
+ Namespace.connection.exec_query(sql)
+ end
rescue ActiveRecord::Deadlocked
db_deadlock_counter.increment(source: 'Namespace#sync_traversal_ids!')
raise
end
# Identify all incorrect traversal_ids in the current namespace hierarchy.
- def incorrect_traversal_ids(lock: false)
+ def incorrect_traversal_ids
Namespace
- .joins("INNER JOIN (#{recursive_traversal_ids(lock: lock)}) as cte ON namespaces.id = cte.id")
+ .joins("INNER JOIN (#{recursive_traversal_ids}) as cte ON namespaces.id = cte.id")
.where('namespaces.traversal_ids <> cte.traversal_ids')
end
@@ -58,13 +61,10 @@ class Namespace
#
# Note that the traversal_ids represent a calculated traversal path for the
# namespace and not the value stored within the traversal_ids attribute.
- #
- # Optionally locked with FOR UPDATE to ensure isolation between concurrent
- # updates of the heirarchy.
- def recursive_traversal_ids(lock: false)
+ def recursive_traversal_ids
root_id = Integer(@root.id)
- sql = <<~SQL
+ <<~SQL
WITH RECURSIVE cte(id, traversal_ids, cycle) AS (
VALUES(#{root_id}, ARRAY[#{root_id}], false)
UNION ALL
@@ -74,10 +74,6 @@ class Namespace
)
SELECT id, traversal_ids FROM cte
SQL
-
- sql += ' FOR UPDATE' if lock
-
- sql
end
# This is essentially Namespace#root_ancestor which will soon be rewritten
diff --git a/app/models/namespaces/traversal/linear.rb b/app/models/namespaces/traversal/linear.rb
index be43b2e04f7..a1711bc5ee0 100644
--- a/app/models/namespaces/traversal/linear.rb
+++ b/app/models/namespaces/traversal/linear.rb
@@ -41,6 +41,7 @@ module Namespaces
UnboundedSearch = Class.new(StandardError)
included do
+ before_update :lock_both_roots, if: -> { sync_traversal_ids? && parent_id_changed? }
after_create :sync_traversal_ids, if: -> { sync_traversal_ids? }
after_update :sync_traversal_ids, if: -> { sync_traversal_ids? && saved_change_to_parent_id? }
@@ -90,6 +91,23 @@ module Namespaces
Namespace::TraversalHierarchy.for_namespace(root_ancestor).sync_traversal_ids!
end
+ # Lock the root of the hierarchy we just left, and lock the root of the hierarchy
+ # we just joined. In most cases the two hierarchies will be the same.
+ def lock_both_roots
+ parent_ids = [
+ parent_id_was || self.id,
+ parent_id || self.id
+ ].compact
+
+ roots = Gitlab::ObjectHierarchy
+ .new(Namespace.where(id: parent_ids))
+ .base_and_ancestors
+ .reorder(nil)
+ .where(parent_id: nil)
+
+ Namespace.lock.select(:id).where(id: roots).order(id: :asc).load
+ end
+
# Make sure we drop the STI `type = 'Group'` condition for better performance.
# Logically equivalent so long as hierarchies remain homogeneous.
def without_sti_condition
diff --git a/app/presenters/blob_presenter.rb b/app/presenters/blob_presenter.rb
index 000fc8214aa..cfe2bba8aa5 100644
--- a/app/presenters/blob_presenter.rb
+++ b/app/presenters/blob_presenter.rb
@@ -14,6 +14,12 @@ class BlobPresenter < Gitlab::View::Presenter::Delegated
)
end
+ def plain_data
+ return if blob.binary?
+
+ highlight(plain: false)
+ end
+
def web_url
url_helpers.project_blob_url(project, ref_qualified_path)
end
diff --git a/app/presenters/snippet_blob_presenter.rb b/app/presenters/snippet_blob_presenter.rb
index 597ef6ebc39..e9c710e4a0f 100644
--- a/app/presenters/snippet_blob_presenter.rb
+++ b/app/presenters/snippet_blob_presenter.rb
@@ -9,12 +9,6 @@ class SnippetBlobPresenter < BlobPresenter
render_rich_partial
end
- def plain_data
- return if blob.binary?
-
- highlight(plain: false)
- end
-
def raw_path
snippet_blob_raw_route(only_path: true)
end
diff --git a/app/serializers/test_case_entity.rb b/app/serializers/test_case_entity.rb
index 299160cd1bf..0e64b843fd3 100644
--- a/app/serializers/test_case_entity.rb
+++ b/app/serializers/test_case_entity.rb
@@ -20,6 +20,6 @@ class TestCaseEntity < Grape::Entity
alias_method :test_case, :object
def can_read_screenshots?
- Feature.enabled?(:junit_pipeline_screenshots_view, options[:project]) && test_case.has_attachment?
+ test_case.has_attachment?
end
end
diff --git a/app/services/system_notes/time_tracking_service.rb b/app/services/system_notes/time_tracking_service.rb
index 650e40680b1..a804a06fe4c 100644
--- a/app/services/system_notes/time_tracking_service.rb
+++ b/app/services/system_notes/time_tracking_service.rb
@@ -62,12 +62,12 @@ module SystemNotes
if time_spent == :reset
body = "removed time spent"
else
- spent_at = noteable.spent_at
+ spent_at = noteable.spent_at&.to_date
parsed_time = Gitlab::TimeTrackingFormatter.output(time_spent.abs)
action = time_spent > 0 ? 'added' : 'subtracted'
text_parts = ["#{action} #{parsed_time} of time spent"]
- text_parts << "at #{spent_at}" if spent_at
+ text_parts << "at #{spent_at}" if spent_at && spent_at != DateTime.current.to_date
body = text_parts.join(' ')
end
diff --git a/app/views/import/bulk_imports/status.html.haml b/app/views/import/bulk_imports/status.html.haml
index 917d88af75a..cd90c76ed10 100644
--- a/app/views/import/bulk_imports/status.html.haml
+++ b/app/views/import/bulk_imports/status.html.haml
@@ -2,9 +2,6 @@
- add_page_specific_style 'page_bundles/import'
- breadcrumb_title _('Import groups')
-%h1.gl-my-0.gl-py-4.gl-font-size-h1.gl-border-solid.gl-border-gray-200.gl-border-0.gl-border-b-1
- = s_('BulkImport|Import groups from GitLab')
-
#import-groups-mount-element{ data: { status_path: status_import_bulk_imports_path(format: :json),
available_namespaces_path: import_available_namespaces_path(format: :json),
create_bulk_import_path: import_bulk_imports_path(format: :json),
diff --git a/app/views/layouts/nav/sidebar/_project_menus.html.haml b/app/views/layouts/nav/sidebar/_project_menus.html.haml
index 683d9b401cc..719f3a3de33 100644
--- a/app/views/layouts/nav/sidebar/_project_menus.html.haml
+++ b/app/views/layouts/nav/sidebar/_project_menus.html.haml
@@ -1,61 +1,3 @@
-- if project_nav_tab? :settings
- = nav_link(path: sidebar_settings_paths) do
- = link_to edit_project_path(@project) do
- .nav-icon-container
- = sprite_icon('settings')
- %span.nav-item-name.qa-settings-item#js-onboarding-settings-link
- = _('Settings')
-
- %ul.sidebar-sub-level-items
- - can_edit = can?(current_user, :admin_project, @project)
- - if can_edit
- = nav_link(path: sidebar_settings_paths, html_options: { class: "fly-out-top-item" } ) do
- = link_to edit_project_path(@project) do
- %strong.fly-out-top-item-name
- = _('Settings')
- %li.divider.fly-out-top-item
- = nav_link(path: %w[projects#edit]) do
- = link_to edit_project_path(@project), title: _('General'), class: 'qa-general-settings-link' do
- %span
- = _('General')
- - if can_edit
- = nav_link(controller: [:integrations, :services]) do
- = link_to project_settings_integrations_path(@project), title: _('Integrations'), data: { qa_selector: 'integrations_settings_link' } do
- %span
- = _('Integrations')
- = nav_link(controller: [:hooks, :hook_logs]) do
- = link_to project_hooks_path(@project), title: _('Webhooks'), data: { qa_selector: 'webhooks_settings_link' } do
- %span
- = _('Webhooks')
- - if can?(current_user, :read_resource_access_tokens, @project)
- = nav_link(controller: [:access_tokens]) do
- = link_to project_settings_access_tokens_path(@project), title: _('Access Tokens'), data: { qa_selector: 'access_tokens_settings_link' } do
- %span
- = _('Access Tokens')
- = nav_link(controller: :repository) do
- = link_to project_settings_repository_path(@project), title: _('Repository') do
- %span
- = _('Repository')
- - if !@project.archived? && @project.feature_available?(:builds, current_user)
- = nav_link(controller: [:ci_cd, 'projects/runners']) do
- = link_to project_settings_ci_cd_path(@project), title: _('CI/CD') do
- %span
- = _('CI/CD')
- - if settings_operations_available?
- = nav_link(controller: [:operations]) do
- = link_to project_settings_operations_path(@project), title: _('Operations'), data: { qa_selector: 'operations_settings_link' } do
- = _('Operations')
- - if @project.pages_available?
- = nav_link(controller: :pages) do
- = link_to project_pages_path(@project), title: _('Pages') do
- %span
- = _('Pages')
- - if settings_packages_and_registries_enabled?(@project)
- = nav_link(controller: :packages_and_registries) do
- = link_to project_settings_packages_and_registries_path(@project), title: _('Packages & Registries'), data: { qa_selector: 'project_package_settings_link' } do
- %span
- = _('Packages & Registries')
-
-# Shortcut to Project > Activity
%li.hidden
= link_to activity_project_path(@project), title: _('Activity'), class: 'shortcuts-project-activity' do
diff --git a/app/views/projects/merge_requests/_widget.html.haml b/app/views/projects/merge_requests/_widget.html.haml
index 5e63e140888..606442d71a9 100644
--- a/app/views/projects/merge_requests/_widget.html.haml
+++ b/app/views/projects/merge_requests/_widget.html.haml
@@ -1,8 +1,12 @@
+- artifacts_endpoint_placeholder = ':pipeline_artifacts_id'
+
= javascript_tag do
:plain
window.gl = window.gl || {};
window.gl.mrWidgetData = #{serialize_issuable(@merge_request, serializer: 'widget', issues_links: true)}
+ window.gl.mrWidgetData.artifacts_endpoint = '#{downloadable_artifacts_project_pipeline_path(@project, artifacts_endpoint_placeholder, format: :json)}';
+ window.gl.mrWidgetData.artifacts_endpoint_placeholder = '#{artifacts_endpoint_placeholder}';
window.gl.mrWidgetData.squash_before_merge_help_path = '#{help_page_path("user/project/merge_requests/squash_and_merge")}';
window.gl.mrWidgetData.ci_troubleshooting_docs_path = '#{help_page_path('ci/troubleshooting.md')}';
window.gl.mrWidgetData.mr_troubleshooting_docs_path = '#{help_page_path('user/project/merge_requests/reviews/index.md', anchor: 'troubleshooting')}';
diff --git a/changelogs/unreleased/323195-add-blob-plain-data-attribute.yml b/changelogs/unreleased/323195-add-blob-plain-data-attribute.yml
new file mode 100644
index 00000000000..01bb0ca5f6e
--- /dev/null
+++ b/changelogs/unreleased/323195-add-blob-plain-data-attribute.yml
@@ -0,0 +1,5 @@
+---
+title: Expose blob plain data in GraphQL
+merge_request: 61016
+author:
+type: added
diff --git a/changelogs/unreleased/lm-remove-multiple-cache-ff.yml b/changelogs/unreleased/lm-remove-multiple-cache-ff.yml
new file mode 100644
index 00000000000..c4416688333
--- /dev/null
+++ b/changelogs/unreleased/lm-remove-multiple-cache-ff.yml
@@ -0,0 +1,5 @@
+---
+title: Removes multiple_cache_per_job feature flag and associated code
+merge_request:
+author: Laura Montemayor
+type: removed
diff --git a/changelogs/unreleased/mobsf-remove-service.yml b/changelogs/unreleased/mobsf-remove-service.yml
new file mode 100644
index 00000000000..c07a9db9178
--- /dev/null
+++ b/changelogs/unreleased/mobsf-remove-service.yml
@@ -0,0 +1,5 @@
+---
+title: Remove mobsf service for mobsf SAST job
+merge_request: 60770
+author:
+type: changed
diff --git a/changelogs/unreleased/retain-timelog-spent_at-time.yml b/changelogs/unreleased/retain-timelog-spent_at-time.yml
new file mode 100644
index 00000000000..0bf76fa0663
--- /dev/null
+++ b/changelogs/unreleased/retain-timelog-spent_at-time.yml
@@ -0,0 +1,5 @@
+---
+title: Retain timelog spent_at time
+merge_request: 60191
+author: Lee Tickett @leetickett
+type: changed
diff --git a/changelogs/unreleased/sh-lazy-load-pipeline-mr-widget-artifacts.yml b/changelogs/unreleased/sh-lazy-load-pipeline-mr-widget-artifacts.yml
new file mode 100644
index 00000000000..54f934f054f
--- /dev/null
+++ b/changelogs/unreleased/sh-lazy-load-pipeline-mr-widget-artifacts.yml
@@ -0,0 +1,5 @@
+---
+title: Lazy load artifacts dropdown in pipelines merge request widget
+merge_request: 61055
+author:
+type: added
diff --git a/changelogs/unreleased/sh-remove-junit-pipeline-screenshots-view-feature-flag.yml b/changelogs/unreleased/sh-remove-junit-pipeline-screenshots-view-feature-flag.yml
new file mode 100644
index 00000000000..8ae4f650a98
--- /dev/null
+++ b/changelogs/unreleased/sh-remove-junit-pipeline-screenshots-view-feature-flag.yml
@@ -0,0 +1,5 @@
+---
+title: Show unit report attachments in the pipeline test report
+merge_request: 61075
+author:
+type: added
diff --git a/changelogs/unreleased/suppress-every-non-nullable-graphql-error-assignee-widget.yml b/changelogs/unreleased/suppress-every-non-nullable-graphql-error-assignee-widget.yml
new file mode 100644
index 00000000000..83483b9abc2
--- /dev/null
+++ b/changelogs/unreleased/suppress-every-non-nullable-graphql-error-assignee-widget.yml
@@ -0,0 +1,6 @@
+---
+title: Suppress all non-nullable field errors for assignee widget graphql queries
+ to remove assignee fetching error messages in boards
+merge_request: 61091
+author:
+type: fixed
diff --git a/changelogs/unreleased/xanf-bulk-import-all-on-the-page.yml b/changelogs/unreleased/xanf-bulk-import-all-on-the-page.yml
new file mode 100644
index 00000000000..78c223ff187
--- /dev/null
+++ b/changelogs/unreleased/xanf-bulk-import-all-on-the-page.yml
@@ -0,0 +1,5 @@
+---
+title: Implement bulk import for all groups on the page
+merge_request: 61097
+author:
+type: added
diff --git a/config/feature_flags/development/junit_pipeline_screenshots_view.yml b/config/feature_flags/development/junit_pipeline_screenshots_view.yml
deleted file mode 100644
index eae38966064..00000000000
--- a/config/feature_flags/development/junit_pipeline_screenshots_view.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: junit_pipeline_screenshots_view
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/31029
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/202114
-milestone: '13.0'
-type: development
-group: group::verify testing
-default_enabled: false
diff --git a/config/feature_flags/development/multiple_cache_per_job.yml b/config/feature_flags/development/multiple_cache_per_job.yml
deleted file mode 100644
index f65354cce2c..00000000000
--- a/config/feature_flags/development/multiple_cache_per_job.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: multiple_cache_per_job
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/53410
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/321877
-milestone: '13.10'
-type: development
-group: group::pipeline authoring
-default_enabled: true
diff --git a/config/metrics/license/20210201124933_uuid.yml b/config/metrics/license/20210201124933_uuid.yml
index 9146bf1e070..d2e6edec884 100644
--- a/config/metrics/license/20210201124933_uuid.yml
+++ b/config/metrics/license/20210201124933_uuid.yml
@@ -11,6 +11,7 @@ milestone: "9.1"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1521
time_frame: none
data_source: database
+instrumentation_class: 'Gitlab::Usage::Metrics::Instrumentations::UuidMetric'
distribution:
- ee
- ce
diff --git a/config/metrics/schema.json b/config/metrics/schema.json
index a5d408c36fd..e9e3ac5ef40 100644
--- a/config/metrics/schema.json
+++ b/config/metrics/schema.json
@@ -51,6 +51,10 @@
"type": "string",
"enum": ["database", "redis", "redis_hll", "prometheus", "ruby"]
},
+ "instrumentation_class": {
+ "type": "string",
+ "pattern": "^(Gitlab::Usage::Metrics::Instrumentations::)(([A-Z][a-z]+)+::)*(([A-Z][a-z]+)+)$"
+ },
"distribution": {
"type": "array",
"items": {
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 28f91c7139e..d9d6c6d5fe7 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -11790,6 +11790,7 @@ Returns [`Tree`](#tree).
| <a id="repositoryblobname"></a>`name` | [`String`](#string) | Blob name. |
| <a id="repositorybloboid"></a>`oid` | [`String!`](#string) | OID of the blob. |
| <a id="repositoryblobpath"></a>`path` | [`String!`](#string) | Path of the blob. |
+| <a id="repositoryblobplaindata"></a>`plainData` | [`String`](#string) | Blob plain highlighted data. |
| <a id="repositoryblobrawblob"></a>`rawBlob` | [`String`](#string) | The raw content of the blob. |
| <a id="repositoryblobrawpath"></a>`rawPath` | [`String`](#string) | Web path to download the raw blob. |
| <a id="repositoryblobrawsize"></a>`rawSize` | [`Int`](#int) | Size (in bytes) of the blob, or the blob target if stored externally. |
diff --git a/doc/ci/unit_test_reports.md b/doc/ci/unit_test_reports.md
index 8980d32cc99..c71d455670c 100644
--- a/doc/ci/unit_test_reports.md
+++ b/doc/ci/unit_test_reports.md
@@ -333,32 +333,17 @@ If parsing JUnit report XML results in an error, an indicator is shown next to t
## Viewing JUnit screenshots on GitLab
-> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202114) in GitLab 13.0.
-> - It's deployed behind a feature flag, disabled by default.
-> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enabling-the-junit-screenshots-feature). **(FREE SELF)**
-
-WARNING:
-This feature might not be available to you. Check the **version history** note above for details.
-
-When [this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/6061) is complete, the attached file is visible on the pipeline details page.
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202114) in GitLab 13.0 behind the `:junit_pipeline_screenshots_view` feature flag, disabled by default.
+> - The feature flag was removed and was [made generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/216979) in GitLab 13.12.
If JUnit report format XML files contain an `attachment` tag, GitLab parses the attachment.
-Upload your screenshots as [artifacts](yaml/README.md#artifactsreportsjunit) to GitLab. The `attachment` tag **must** contain the absolute path to the screenshots you uploaded.
-
```xml
<testcase time="1.00" name="Test">
<system-out>[[ATTACHMENT|/absolute/path/to/some/file]]</system-out>
</testcase>
```
-### Enabling the JUnit screenshots feature **(FREE SELF)**
-
-This feature comes with the `:junit_pipeline_screenshots_view` feature flag disabled by default.
-
-To enable this feature, ask a GitLab administrator with [Rails console access](../administration/feature_flags.md#how-to-enable-and-disable-features-behind-flags) to run the
-following command:
+Upload your screenshots as [artifacts](yaml/README.md#artifactsreportsjunit) to GitLab. The `attachment` tag **must** contain the absolute path to the screenshots you uploaded.
-```ruby
-Feature.enable(:junit_pipeline_screenshots_view)
-```
+A link to the test case attachment will appear in the test case details in [the pipeline test report](#viewing-unit-test-reports-on-gitlab).
diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md
index 23cdce8187b..858e738e750 100644
--- a/doc/ci/yaml/README.md
+++ b/doc/ci/yaml/README.md
@@ -2970,11 +2970,7 @@ You can specify a [fallback cache key](#fallback-cache-key) to use if the specif
##### Multiple caches
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/32814) in GitLab 13.10.
-> - [Deployed behind a feature flag](../../user/feature_flags.md), disabled by default.
-> - [Enabled by default](https://gitlab.com/gitlab-org/gitlab/-/issues/321877) in GitLab 13.11.
-> - Enabled on GitLab.com.
-> - Recommended for production use.
-> - For GitLab self-managed instances, GitLab administrators can opt to [disable it](#enable-or-disable-multiple-caches). **(FREE SELF)**
+> - [Feature Flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/321877), in GitLab 13.12.
You can have a maximum of four caches:
@@ -3001,25 +2997,6 @@ test-job:
If multiple caches are combined with a [Fallback cache key](#fallback-cache-key),
the fallback is fetched multiple times if multiple caches are not found.
-##### Enable or disable multiple caches **(FREE SELF)**
-
-The multiple caches feature is under development but ready for production use.
-It is deployed behind a feature flag that is **enabled by default**.
-[GitLab administrators with access to the GitLab Rails console](../../administration/feature_flags.md)
-can opt to disable it.
-
-To enable it:
-
-```ruby
-Feature.enable(:multiple_cache_per_job)
-```
-
-To disable it:
-
-```ruby
-Feature.disable(:multiple_cache_per_job)
-```
-
#### Fallback cache key
> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1534) in GitLab Runner 13.4.
diff --git a/doc/development/database/index.md b/doc/development/database/index.md
index 01f6753e7a0..b61a71ffb8e 100644
--- a/doc/development/database/index.md
+++ b/doc/development/database/index.md
@@ -60,6 +60,8 @@ info: To determine the technical writer assigned to the Stage/Group associated w
- [Updating multiple values](setting_multiple_values.md)
- [Constraints naming conventions](constraint_naming_convention.md)
- [Query performance guidelines](../query_performance.md)
+- [Pagination guidelines](pagination_guidelines.md)
+ - [Pagination performance guidelines](pagination_performance_guidelines.md)
## Case studies
diff --git a/doc/development/database/pagination_guidelines.md b/doc/development/database/pagination_guidelines.md
new file mode 100644
index 00000000000..aa3915cd4b6
--- /dev/null
+++ b/doc/development/database/pagination_guidelines.md
@@ -0,0 +1,315 @@
+---
+stage: Enablement
+group: Database
+info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
+---
+
+# Pagination guidelines
+
+This document gives an overview of the current capabilities and provides best practices for paginating over data in GitLab, and in particular for PostgreSQL.
+
+## Why do we need pagination?
+
+Pagination is a popular technique to avoid loading too much data in one web request. This usually happens when we render a list of records. A common scenario is visualizing parent-children relations (has many) on the UI.
+
+Example: listing issues within a project
+
+As the number of issues grows within the project, the list gets longer. To render the list, the backend does the following:
+
+1. Loads the records from the database, usually in a particular order.
+1. Serializes the records in Ruby. Build Ruby (ActiveRecord) objects and then build a JSON or HTML string.
+1. Sends the response back to the browser.
+1. The browser renders the content.
+
+We have two options for rendering the content:
+
+- HTML: backend deals with the rendering (HAML template).
+- JSON: the client (client-side JavaScript) transforms the payload into HTML.
+
+Rendering long lists can significantly affect both the frontend and backend performance:
+
+- The database will need to read a lot of data from the disk.
+- The result of the query (records) will eventually be transformed to Ruby objects which increases memory allocation.
+- Large responses will take more time to send over the wire, to the user's browser.
+- Rendering long lists might freeze the browser (bad user experience).
+
+With pagination, the data is split into equal pieces (pages). On the first visit, the user receives only a limited number of items (page size). The user can see more items by paginating forward which results in a new HTTP request and a new database query.
+
+![Project issues page with pagination](../img/project_issues_pagination_v13_11.jpg)
+
+## General guidelines for paginating
+
+### Pick the right approach
+
+Let the database handle the pagination, filtering, and data retrieval. Implementing in-memory pagination on the backend (`paginate_array` from kaminari) or on the frontend (JavaScript) might work for a few hundreds of records. If application limits are not defined, things can get out of control quickly.
+
+### Reduce complexity
+
+When we list records on the page we often provide additional filters and different sort options. This can complicate things on the backend side significantly.
+
+For the MVC version, consider the following:
+
+- Reduce the number of sort options to the minimum.
+- Reduce the number of filters (dropdown, search bar) to the minimum.
+
+To make sorting and pagination efficient, for each sort option we need at least two database indexes (ascending, descending order). If we add filter options (by state or by author), we might need more indexes to maintain good performance. Note that indexes are not free, they can significantly affect the `UPDATE` query timings.
+
+It's not possible to make all filter and sort combinations performant, so we should try optimizing the performance by usage patterns.
+
+### Prepare for scaling
+
+Offset-based pagination is the easiest way to paginate over records, however, it does not scale well for large tables. As a long-term solution, keyset pagination is preferred. The tooling around keyset pagination is not as mature as for offset pagination so currently, it's easier to start with offset pagination and then switch to keyset pagination.
+
+To avoid losing functionality and maintaining backward compatibility when switching pagination methods, it's advised to consider the following approach in the design phase:
+
+- Avoid presenting total counts, prefer limit counts.
+ - Example: count maximum 1001 records, and then on the UI show 1000+ if the count is 1001, show the actual number otherwise.
+ - See the [badge counters approach](../merge_request_performance_guidelines.md#badge-counters) for more information.
+- Avoid using page numbers, use next and previous page buttons.
+ - Keyset pagination doesn't support page numbers.
+- For APIs, advise against building URLs for the next page by "hand".
+ - Promote the usage of the [`Link` header](../../api/README.md#pagination-link-header) where the URLs for the next and previous page are provided by the backend.
+ - This way changing the URL structure is possible without breaking backward compatibility.
+
+NOTE:
+Infinite scroll can use keyset pagination without affecting the user experience since there are no exposed page numbers.
+
+## Options for pagination
+
+### Offset pagination
+
+The most common way to paginate lists is using offset-based pagination (UI and REST API). It's backed by the popular [kaminari](https://github.com/kaminari/kaminari) Ruby gem, which provides convenient helper methods to implement pagination on ActiveRecord queries.
+
+Offset-based pagination is leveraging the `LIMIT` and `OFFSET` SQL clauses to take out a specific slice from the table.
+
+Example database query when looking for the 2nd page of the issues within our project:
+
+```sql
+SELECT issues.* FROM issues WHERE project_id = 1 ORDER BY id LIMIT 20 OFFSET 20
+```
+
+1. Move an imaginary pointer over the table rows and skip 20 rows.
+1. Take the next 20 rows.
+
+Notice that the query also orders the rows by the primary key (`id`). When paginating data, specifying the order is very important. Without it, the returned rows are non-deterministic and can confuse the end-user.
+
+#### Page numbers
+
+Example pagination bar:
+
+![Page selector rendered by kaminari](../img/offset_pagination_ui_v13_11.jpg)
+
+The kaminari gem renders a nice pagination bar on the UI with page numbers and optionally quick shortcuts the next, previous, first, and last page buttons. To render these buttons, kaminari needs to know the number of rows, and for that, a count query is executed.
+
+```sql
+SELECT COUNT(*) FROM issues WHERE project_id = 1
+```
+
+#### Performance
+
+##### Index coverage
+
+To achieve the good performance, the `ORDER BY` clause needs to be covered by an index.
+
+Assuming that we have the following index:
+
+```sql
+CREATE INDEX index_on_issues_project_id ON issues (project_id);
+```
+
+Let's try to request the first page:
+
+```sql
+SELECT issues.* FROM issues WHERE project_id = 1 ORDER BY id LIMIT 20;
+```
+
+We can produce the same query in Rails:
+
+```ruby
+Issue.where(project_id: 1).page(1).per(20)
+```
+
+The SQL query will return a maximum of 20 rows from the database. However, it doesn't mean that the database will only read 20 rows from the disk to produce the result.
+
+This is what will happen:
+
+1. The database will try to plan the execution in the most efficient way possible based on the table statistics and the available indexes.
+1. The planner knows that we have an index covering the `project_id` column.
+1. The database will read all rows using the index on `project_id`.
+1. The rows at this point are not sorted, so the database will need to sort the rows.
+1. The database returns the first 20 rows.
+
+In case the project has 10_000 rows, the database will read 10_000 rows and sort them in memory (or on disk). This is not going to scale well in the long term.
+
+To fix this we need the following index:
+
+```sql
+CREATE INDEX index_on_issues_project_id ON issues (project_id, id);
+```
+
+By making the `id` column part of the index, the previous query will read maximum 20 rows. The query will perform well regardless of the number of issues within a project. So with this change, we've also improved the initial page load (when the user loads the issue page).
+
+NOTE:
+Here we're leveraging the ordered property of the b-tree database index. Values in the index are sorted so reading 20 rows will not require further sorting.
+
+#### Limitations
+
+##### `COUNT(*)` on a large dataset
+
+Kaminari by default executes a count query to determine the number of pages for rendering the page links. Count queries can be quite expensive for a large table, in an unfortunate scenario the queries will simply time out.
+
+To work around this, we can run kaminari without invoking the count SQL query.
+
+```ruby
+Issue.where(project_id: 1).page(1).per(20).without_count
+```
+
+In this case, the count query will not be executed and the pagination will no longer render the page numbers. We'll see only the next and previous links.
+
+##### `OFFSET` on a large dataset
+
+When we paginate over a large dataset, we might notice that the response time will get slower and slower. This is due to the `OFFSET` clause that seeks through the rows and skips N rows.
+
+From the user point of view, this might not be always noticeable. As the user paginates forward, the previous rows might be still in the buffer cache of the database. If the user shares the link with someone else and it's opened after a few minutes or hours, the response time might be significantly higher or it would even time out.
+
+When requesting a large page number, the database needs to read `PAGE * PAGE_SIZE` rows. This makes offset pagination **unsuitable for large database tables**.
+
+Example: listing users on the Admin page
+
+Listing users with a very simple SQL query:
+
+```sql
+SELECT "users".* FROM "users" ORDER BY "users"."id" DESC LIMIT 20 OFFSET 0
+```
+
+The query execution plan shows that this query is efficient, the database only read 20 rows from the database (`rows=20`):
+
+```plaintext
+ Limit (cost=0.43..3.19 rows=20 width=1309) (actual time=0.098..2.093 rows=20 loops=1)
+ Buffers: shared hit=103
+ -> Index Scan Backward using users_pkey on users (cost=0.43..X rows=X width=1309) (actual time=0.097..2.087 rows=20 loops=1)
+ Buffers: shared hit=103
+ Planning Time: 0.333 ms
+ Execution Time: 2.145 ms
+(6 rows)
+```
+
+See the [Understanding EXPLAIN plans](../understanding_explain_plans.md) to find more information about reading execution plans.
+
+Let's visit the 50_000th page:
+
+```sql
+SELECT "users".* FROM "users" ORDER BY "users"."id" DESC LIMIT 20 OFFSET 999980;
+```
+
+The plan shows that the database reads 1_000_000 rows to return 20 rows, with a very high execution time (5.5 seconds):
+
+```plaintext
+Limit (cost=137878.89..137881.65 rows=20 width=1309) (actual time=5523.588..5523.667 rows=20 loops=1)
+ Buffers: shared hit=1007901 read=14774 written=609
+ I/O Timings: read=420.591 write=57.344
+ -> Index Scan Backward using users_pkey on users (cost=0.43..X rows=X width=1309) (actual time=0.060..5459.353 rows=1000000 loops=1)
+ Buffers: shared hit=1007901 read=14774 written=609
+ I/O Timings: read=420.591 write=57.344
+ Planning Time: 0.821 ms
+ Execution Time: 5523.745 ms
+(8 rows)
+```
+
+We can argue that a normal user will not be going to visit these pages, however, API users could easily navigate to very high page numbers (scraping, collecting data).
+
+### Keyset pagination
+
+Keyset pagination addresses the performance concerns of "skipping" previous rows when requesting a large page, however, it's not a drop-in replacement for offset-based pagination. Keyset pagination is used only in the [GraphQL API](../graphql_guide/pagination.md)
+
+Consider the following `issues` table:
+
+|`id`|`project_id`|
+|-|-|
+|1|1|
+|2|1|
+|3|2|
+|4|1|
+|5|1|
+|6|2|
+|7|2|
+|8|1|
+|9|1|
+|10|2|
+
+Let's paginate over the whole table ordered by the primary key (`id`). The query for the first page is the same as the offset pagination query, for simplicity, we use 5 as the page size:
+
+```sql
+SELECT "issues".* FROM "issues" ORDER BY "issues"."id" ASC LIMIT 5
+```
+
+Notice that we didn't add the `OFFSET` clause.
+
+To get to the next page, we need to extract values that are part of the `ORDER BY` clause from the last row. In this case, we just need the `id`, which is 5. Now we construct the query for the next page:
+
+```sql
+SELECT "issues".* FROM "issues" WHERE "issues"."id" > 5 ORDER BY "issues"."id" ASC LIMIT 5
+```
+
+Looking at the query execution plan, we can see that this query read only 5 rows (offset-based pagination would read 10 rows):
+
+```plaintext
+ Limit (cost=0.56..2.08 rows=5 width=1301) (actual time=0.093..0.137 rows=5 loops=1)
+ -> Index Scan using issues_pkey on issues (cost=0.56..X rows=X width=1301) (actual time=0.092..0.136 rows=5 loops=1)
+ Index Cond: (id > 5)
+ Planning Time: 7.710 ms
+ Execution Time: 0.224 ms
+(5 rows)
+```
+
+#### Limitations
+
+##### No page numbers
+
+Offset pagination provides an easy way to request a specific page. We can simply edit the URL and modify the `page=` URL parameter. Keyset pagination cannot provide page numbers because the paging logic might depend on different columns.
+
+In the previous example, the column is the `id`, so we might see something like this in the `URL`:
+
+```plaintext
+id_after=5
+```
+
+In GraphQL, the parameters are serialized to JSON and then encoded:
+
+```plaintext
+eyJpZCI6Ijk0NzMzNTk0IiwidXBkYXRlZF9hdCI6IjIwMjEtMDQtMDkgMDg6NTA6MDUuODA1ODg0MDAwIFVUQyJ9
+```
+
+NOTE:
+Pagination parameters will be visible to the user, so we need to be careful about which columns we order by.
+
+Keyset pagination can only provide the next, previous, first, and last pages.
+
+##### Complexity
+
+Building queries when we order by a single column is very easy, however, things get more complex if tie-breaker or multi-column ordering is used. The complexity increases if the columns are nullable.
+
+Example: ordering by `id` and `created_at` where `created_at` is nullable, query for getting the second page:
+
+```sql
+SELECT "issues".*
+FROM "issues"
+WHERE (("issues"."id" > 99
+ AND "issues"."created_at" = '2021-02-16 11:26:17.408466')
+ OR ("issues"."created_at" > '2021-02-16 11:26:17.408466')
+ OR ("issues"."created_at" IS NULL))
+ORDER BY "issues"."created_at" DESC NULLS LAST, "issues"."id" DESC
+LIMIT 20
+```
+
+##### Tooling
+
+Using keyset pagination outside of GraphQL is not straightforward. We have the low-level blocks for building keyset pagination database queries, however, the usage in application code is still not streamlined yet.
+
+#### Performance
+
+Keyset pagination provides stable performance regardless of the number of pages we moved forward. To achieve this performance, the paginated query needs an index that covers all the columns in the `ORDER BY` clause, similarly to the offset pagination.
+
+### General performance guidelines
+
+See the [pagination general performance guidelines page](pagination_performance_guidelines.md).
diff --git a/doc/development/database/pagination_performance_guidelines.md b/doc/development/database/pagination_performance_guidelines.md
new file mode 100644
index 00000000000..ade1e853027
--- /dev/null
+++ b/doc/development/database/pagination_performance_guidelines.md
@@ -0,0 +1,325 @@
+---
+stage: Enablement
+group: Database
+info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
+---
+
+# Pagination performance guidelines
+
+The following document gives a few ideas for improving the pagination (sorting) performance. These apply both on [offset](pagination_guidelines.md#offset-pagination) and [keyset](pagination_guidelines.md#keyset-pagination) paginations.
+
+## Tie-breaker column
+
+When ordering the columns it's advised to order by distinct columns only. Consider the following example:
+
+|`id`|`created_at`|
+|-|-|
+|1|2021-01-04 14:13:43|
+|2|2021-01-05 19:03:12|
+|3|2021-01-05 19:03:12|
+
+If we order by `created_at`, the result would likely depend on how the records are located on the disk.
+
+Using the tie-breaker column is advised when the data is exposed via a well defined interface and its consumed
+by an automated process, such as an API. Without the tie-breaker column, the order of the rows could change
+(data is re-imported) which could cause problems that are hard to debug, such as:
+
+- An integration comparing the rows to determine changes breaks.
+- E-tag cache values change, which requires a complete re-download.
+
+```sql
+SELECT issues.* FROM issues ORDER BY created_at;
+```
+
+We can fix this by adding a second column to `ORDER BY`:
+
+```sql
+SELECT issues.* FROM issues ORDER BY created_at, id;
+```
+
+This change makes the order distinct so we have "stable" sorting.
+
+NOTE:
+To make the query efficient, we need an index covering both columns: `(created_at, id)`. The order of the columns **should match** the columns in the `ORDER BY` clause.
+
+## Ordering by joined table column
+
+Oftentimes, we want to order the data by a column on a joined database table. The following example orders `issues` records by the `first_mentioned_in_commit_at` metric column:
+
+```sql
+SELECT issues.* FROM issues
+INNER JOIN issue_metrics on issue_metrics.issue_id=issues.id
+WHERE issues.project_id = 2
+ORDER BY issue_metrics.first_mentioned_in_commit_at DESC, issues.id DESC
+LIMIT 20
+OFFSET 0
+```
+
+With PostgreSQL version 11, the planner will first look up all issues matching the `project_id` filter and then join all `issue_metrics` rows. The ordering of rows will happen in memory. In case the joined relation is always present (1:1 relationship), the database will read `N * 2` rows where N is the number of rows matching the `project_id` filter.
+
+For performance reasons, we should avoid mixing columns from different tables when specifying the `ORDER BY` clause.
+
+In this particular case there is no simple way (like index creation) to improve the query. We might think that changing the `issues.id` column to `issue_metrics.issue_id` will help, however, this will likely make the query perform worse because it might force the database to process all rows in the `issue_metrics` table.
+
+One idea to address this problem is denormalization. Adding the `project_id` column to the `issue_metrics` table will make the filtering and sorting efficient:
+
+```sql
+SELECT issues.* FROM issues
+INNER JOIN issue_metrics on issue_metrics.issue_id=issues.id
+WHERE issue_metrics.project_id = 2
+ORDER BY issue_metrics.first_mentioned_in_commit_at DESC, issue_metrics.issue_id DESC
+LIMIT 20
+OFFSET 0
+```
+
+NOTE:
+The query will require an index on `issue_metrics` table with the following column configuration: `(project_id, first_mentioned_in_commit_at DESC, issue_id DESC)`.
+
+## Filtering
+
+### By project
+
+Filtering by a project is a very common use case since we have many features on the project level. Examples: merge requests, issues, boards, iterations.
+
+These features will have a filter on `project_id` in their base query. Loading issues for a project:
+
+```ruby
+project = Project.find(5)
+
+# order by internal id
+issues = project.issues.order(:iid).page(1).per(20)
+```
+
+To make the base query efficient, there is usually a database index covering the `project_id` column. This significantly reduces the number of rows the database needs to scan. Without the index, the whole `issues` table would be read (full table scan) by the database.
+
+Since `project_id` is a foreign key, we might have the following index available:
+
+```sql
+"index_issues_on_project_id" btree (project_id)
+```
+
+GitLab 13.11 has the following index definition on the `issues` table:
+
+```sql
+"index_issues_on_project_id_and_iid" UNIQUE, btree (project_id, iid)
+```
+
+This index fully covers the database query and the pagination.
+
+### By group
+
+Unfortunately, there is no efficient way to sort and paginate on the group level. The database query execution time will increase based on the number of records in the group.
+
+Things get worse when group level actually means group and its subgroups. To load the first page, the database needs to look up the group hierarchy, find all projects and then look up all issues.
+
+The main reason behind the inefficient queries on the group level is the way our database schema is designed; our core domain models are associated with a project, and projects are associated with groups. This doesn't mean that the database structure is bad, it's just in a well-normalized form that is not optimized for efficient group level queries. We might need to look into denormalization in the long term.
+
+Example: List issues in a group
+
+```ruby
+group = Group.find(9970)
+
+Issue.where(project_id: group.projects).order(:iid).page(1).per(20)
+```
+
+The generated SQL query:
+
+```sql
+SELECT "issues".*
+FROM "issues"
+WHERE "issues"."project_id" IN
+ (SELECT "projects"."id"
+ FROM "projects"
+ WHERE "projects"."namespace_id" = 5)
+ORDER BY "issues"."iid" ASC
+LIMIT 20
+OFFSET 0
+```
+
+The execution plan shows that we read significantly more rows than requested (20), and the rows are sorted in memory:
+
+```plaintext
+ Limit (cost=10716.87..10716.92 rows=20 width=1300) (actual time=1472.305..1472.308 rows=20 loops=1)
+ -> Sort (cost=10716.87..10717.03 rows=61 width=1300) (actual time=1472.303..1472.305 rows=20 loops=1)
+ Sort Key: issues.iid
+ Sort Method: top-N heapsort Memory: 41kB
+ -> Nested Loop (cost=1.00..10715.25 rows=61 width=1300) (actual time=0.215..1331.647 rows=177267 loops=1)
+ -> Index Only Scan using index_projects_on_namespace_id_and_id on projects (cost=0.44..3.77 rows=19 width=4) (actual time=0.077..1.057 rows=270 loops=1)
+ Index Cond: (namespace_id = 9970)
+ Heap Fetches: 25
+ -> Index Scan using index_issues_on_project_id_and_iid on issues (cost=0.56..559.28 rows=448 width=1300) (actual time=0.101..4.781 rows=657 loops=270)
+ Index Cond: (project_id = projects.id)
+ Planning Time: 12.281 ms
+ Execution Time: 1472.391 ms
+(12 rows)
+```
+
+#### Columns in the same database table
+
+Filtering by columns located in the same database table can be improved with an index. In case we want to support filtering by the `state_id` column, we can add the following index:
+
+```sql
+"index_issues_on_project_id_and_state_id_and_iid" UNIQUE, btree (project_id, state_id, iid)
+```
+
+Example query in Rails:
+
+```ruby
+project = Project.find(5)
+
+# order by internal id
+issues = project.issues.opened.order(:iid).page(1).per(20)
+```
+
+SQL query:
+
+```sql
+SELECT "issues".*
+FROM "issues"
+WHERE
+ "issues"."project_id" = 5
+ AND ("issues"."state_id" IN (1))
+ORDER BY "issues"."iid" ASC
+LIMIT 20
+OFFSET 0
+```
+
+Keep in mind that the index above will not support the following project level query:
+
+```sql
+SELECT "issues".*
+FROM "issues"
+WHERE "issues"."project_id" = 5
+ORDER BY "issues"."iid" ASC
+LIMIT 20
+OFFSET 0
+```
+
+#### Special case: confidential flag
+
+In the `issues` table, we have a boolean field (`confidential`) that marks an issue confidential. This makes the issue invisible (filtered out) for non-member users.
+
+Example SQL query:
+
+```sql
+SELECT "issues".*
+FROM "issues"
+WHERE "issues"."project_id" = 5
+AND "issues"."confidential" = FALSE
+ORDER BY "issues"."iid" ASC
+LIMIT 20
+OFFSET 0
+```
+
+We might be tempted to add an index on `project_id`, `confidential`, and `iid` to improve the database query, however, in this case it's probably unnecessary. Based on the data distribution in the table, confidential issues are rare. Filtering them out does not make the database query significantly slower. The database might read a few extra rows, the performance difference might not even be visible to the end-user.
+
+On the other hand, if we would implement a special filter where we only show confidential issues, we will surely need the index. Finding 20 confidential issues might require the database to scan hundreds of rows or in the worst case, all issues in the project.
+
+NOTE:
+Be aware of the data distribution and the table access patterns (how features work) when introducing a new database index. Sampling production data might be necessary to make the right decision.
+
+#### Columns in a different database table
+
+Example: filtering issues in a project by an assignee
+
+```ruby
+project = Project.find(5)
+
+project
+ .issues
+ .joins(:issue_assignees)
+ .where(issue_assignees: { user_id: 10 })
+ .order(:iid)
+ .page(1)
+ .per(20)
+```
+
+```sql
+SELECT "issues".*
+FROM "issues"
+INNER JOIN "issue_assignees" ON "issue_assignees"."issue_id" = "issues"."id"
+WHERE "issues"."project_id" = 5
+ AND "issue_assignees"."user_id" = 10
+ORDER BY "issues"."iid" ASC
+LIMIT 20
+OFFSET 0
+```
+
+Example database (oversimplified) execution plan:
+
+1. The database parses the SQL query and detects the `JOIN`.
+1. The database splits the query into two subqueries.
+ - `SELECT "issue_assignees".* FROM "issue_assignees" WHERE "issue_assignees"."user_id" = 10`
+ - `SELECT "issues".* FROM "issues" WHERE "issues"."project_id" = 5`
+1. The database estimates the number of rows and the costs to run these queries.
+1. The database executes the cheapest query first.
+1. Using the query result, load the rows from the other table (from the other query) using the JOIN column and filter the rows further.
+
+In this particular example, the `issue_assignees` query would likely be executed first.
+
+Running the query in production for the GitLab project produces the following execution plan:
+
+```plaintext
+ Limit (cost=411.20..411.21 rows=1 width=1300) (actual time=24.071..24.077 rows=20 loops=1)
+ -> Sort (cost=411.20..411.21 rows=1 width=1300) (actual time=24.070..24.073 rows=20 loops=1)
+ Sort Key: issues.iid
+ Sort Method: top-N heapsort Memory: 91kB
+ -> Nested Loop (cost=1.00..411.19 rows=1 width=1300) (actual time=0.826..23.705 rows=190 loops=1)
+ -> Index Scan using index_issue_assignees_on_user_id on issue_assignees (cost=0.44..81.37 rows=92 width=4) (actual time=0.741..13.202 rows=215 loops=1)
+ Index Cond: (user_id = 4156052)
+ -> Index Scan using issues_pkey on issues (cost=0.56..3.58 rows=1 width=1300) (actual time=0.048..0.048 rows=1 loops=215)
+ Index Cond: (id = issue_assignees.issue_id)
+ Filter: (project_id = 278964)
+ Rows Removed by Filter: 0
+ Planning Time: 1.141 ms
+ Execution Time: 24.170 ms
+(13 rows)
+```
+
+The query looks up the `assignees` first, filtered by the `user_id` (`user_id = 4156052`) and it finds 215 rows. Using that 215 rows, the database will look up the 215 associated issue rows by the primary key. Notice that the filter on the `project_id` column is not backed by an index.
+
+In most cases, we are lucky that the joined relation will not be going to return too many rows, therefore, we will end up with a relatively efficient database query that accesses low number of rows. As the database grows, these queries might start to behave differently. Let's say the number `issue_assignees` records for a particular user is very high (millions), then this join query will not perform well, and it will likely time out.
+
+A similar problem could be a double join, where the filter exists in the 2nd JOIN query. Example: `Issue -> LabelLink -> Label(name=bug)`.
+
+There is no easy way to fix these problems. Denormalization of data could help significantly, however, it has also negative effects (data duplication and keeping the data up to date).
+
+Ideas for improving the `issue_assignees` filter:
+
+- Add `project_id` column to the `issue_assignees` table so when JOIN-ing, the extra `project_id` filter will further filter the rows. The sorting will likely happen in memory:
+
+ ```sql
+ SELECT "issues".*
+ FROM "issues"
+ INNER JOIN "issue_assignees" ON "issue_assignees"."issue_id" = "issues"."id"
+ WHERE "issues"."project_id" = 5
+ AND "issue_assignees"."user_id" = 10
+ AND "issue_assignees"."project_id" = 5
+ ORDER BY "issues"."iid" ASC
+ LIMIT 20
+ OFFSET 0
+ ```
+
+- Add the `iid` column to the `issue_assignees` table. Notice that the `ORDER BY` column is different and the `project_id` filter is gone from the `issues` table:
+
+ ```sql
+ SELECT "issues".*
+ FROM "issues"
+ INNER JOIN "issue_assignees" ON "issue_assignees"."issue_id" = "issues"."id"
+ WHERE "issue_assignees"."user_id" = 10
+ AND "issue_assignees"."project_id" = 5
+ ORDER BY "issue_assignees"."iid" ASC
+ LIMIT 20
+ OFFSET 0
+ ```
+
+The query now performs well for any number of `issue_assignees` records, however, we pay a very high price for it:
+
+- Two columns are duplicated which increases the database size.
+- We need to keep the two columns in sync.
+- We need more indexes on the `issue_assignees` table to support the query.
+- The new database query is very specific to the assignee search and needs complex backend code to build it.
+ - If the assignee is filtered by the user, then order by a different column, remove the `project_id` filter, etc.
+
+NOTE:
+Currently we're not doing these kinds of denormalization at GitLab.
diff --git a/doc/development/elasticsearch.md b/doc/development/elasticsearch.md
index b2f5c11a28b..3e466512c79 100644
--- a/doc/development/elasticsearch.md
+++ b/doc/development/elasticsearch.md
@@ -285,7 +285,7 @@ defer it to another release if there is risk of important data loss.
Follow these best practices for best results:
- When working in batches, keep the batch size under 9,000 documents
- and `throttle_delay` over 3 minutes. The bulk indexer is set to run
+ and `throttle_delay` for at least 3 minutes. The bulk indexer is set to run
every 1 minute and process a batch of 10,000 documents. These limits
allow the bulk indexer time to process records before another migration
batch is attempted.
diff --git a/doc/development/graphql_guide/pagination.md b/doc/development/graphql_guide/pagination.md
index 55ff7942418..5db9238faed 100644
--- a/doc/development/graphql_guide/pagination.md
+++ b/doc/development/graphql_guide/pagination.md
@@ -12,6 +12,10 @@ GitLab uses two primary types of pagination: **offset** and **keyset**
(sometimes called cursor-based) pagination.
The GraphQL API mainly uses keyset pagination, falling back to offset pagination when needed.
+### Performance considerations
+
+See the [general pagination guidelines section](../database/pagination_guidelines.md) for more information.
+
### Offset pagination
This is the traditional, page-by-page pagination, that is most common,
diff --git a/doc/development/i18n/externalization.md b/doc/development/i18n/externalization.md
index a9eb2f4ca9a..f3d09903108 100644
--- a/doc/development/i18n/externalization.md
+++ b/doc/development/i18n/externalization.md
@@ -491,6 +491,48 @@ To avoid this error, use the applicable HTML entity code (`&lt;` or `&gt;`) inst
// => 'In < 1 hour'
```
+### Numbers
+
+Different locales may use different number formats. To support localization of numbers, we use `formatNumber`,
+which leverages [`toLocaleString()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/toLocaleString).
+
+`formatNumber` formats numbers as strings using the current user locale by default.
+
+- In JavaScript
+
+```javascript
+import { formatNumber } from '~/locale';
+
+// Assuming "User Preferences > Language" is set to "English":
+
+const tenThousand = formatNumber(10000); // "10,000" (uses comma as decimal symbol in English locale)
+const fiftyPercent = formatNumber(0.5, { style: 'percent' }) // "50%" (other options are passed to toLocaleString)
+```
+
+- In Vue templates
+
+```html
+<script>
+import { formatNumber } from '~/locale';
+
+export default {
+ //...
+ methods: {
+ // ...
+ formatNumber,
+ },
+}
+</script>
+<template>
+<div class="my-number">
+ {{ formatNumber(10000) }} <!-- 10,000 -->
+</div>
+<div class="my-percent">
+ {{ formatNumber(0.5, { style: 'percent' }) }} <!-- 50% -->
+</div>
+</template>
+```
+
### Dates / times
- In JavaScript:
diff --git a/doc/development/img/offset_pagination_ui_v13_11.jpg b/doc/development/img/offset_pagination_ui_v13_11.jpg
new file mode 100644
index 00000000000..d17acc20dcb
--- /dev/null
+++ b/doc/development/img/offset_pagination_ui_v13_11.jpg
Binary files differ
diff --git a/doc/development/img/project_issues_pagination_v13_11.jpg b/doc/development/img/project_issues_pagination_v13_11.jpg
new file mode 100644
index 00000000000..3f3c268cd16
--- /dev/null
+++ b/doc/development/img/project_issues_pagination_v13_11.jpg
Binary files differ
diff --git a/doc/development/merge_request_performance_guidelines.md b/doc/development/merge_request_performance_guidelines.md
index 6ce372ebc0d..543ca809f45 100644
--- a/doc/development/merge_request_performance_guidelines.md
+++ b/doc/development/merge_request_performance_guidelines.md
@@ -426,6 +426,8 @@ Take into consideration the following when choosing a pagination strategy:
The database has to sort and iterate all previous items, and this operation usually
can result in substantial load put on database.
+You can find useful tips related to pagination in the [pagination guidelines](database/pagination_guidelines.md).
+
## Badge counters
Counters should always be truncated. It means that we don't want to present
diff --git a/lib/gitlab/ci/build/cache.rb b/lib/gitlab/ci/build/cache.rb
index 4fcb5168847..375e6b4a96f 100644
--- a/lib/gitlab/ci/build/cache.rb
+++ b/lib/gitlab/ci/build/cache.rb
@@ -7,39 +7,22 @@ module Gitlab
include ::Gitlab::Utils::StrongMemoize
def initialize(cache, pipeline)
- if multiple_cache_per_job?
- cache = Array.wrap(cache)
- @cache = cache.map do |cache|
- Gitlab::Ci::Pipeline::Seed::Build::Cache
- .new(pipeline, cache)
- end
- else
- @cache = Gitlab::Ci::Pipeline::Seed::Build::Cache
- .new(pipeline, cache)
+ cache = Array.wrap(cache)
+ @cache = cache.map do |cache|
+ Gitlab::Ci::Pipeline::Seed::Build::Cache
+ .new(pipeline, cache)
end
end
def cache_attributes
strong_memoize(:cache_attributes) do
- if multiple_cache_per_job?
- if @cache.empty?
- {}
- else
- { options: { cache: @cache.map(&:attributes) } }
- end
+ if @cache.empty?
+ {}
else
- @cache.build_attributes
+ { options: { cache: @cache.map(&:attributes) } }
end
end
end
-
- private
-
- def multiple_cache_per_job?
- strong_memoize(:multiple_cache_per_job) do
- ::Gitlab::Ci::Features.multiple_cache_per_job?
- end
- end
end
end
end
diff --git a/lib/gitlab/ci/config/entry/cache.rb b/lib/gitlab/ci/config/entry/cache.rb
index f9688c500d2..ab79add688b 100644
--- a/lib/gitlab/ci/config/entry/cache.rb
+++ b/lib/gitlab/ci/config/entry/cache.rb
@@ -4,88 +4,52 @@ module Gitlab
module Ci
class Config
module Entry
- ##
- # Entry that represents a cache configuration
- #
- class Cache < ::Gitlab::Config::Entry::Simplifiable
- strategy :Caches, if: -> (config) { Feature.enabled?(:multiple_cache_per_job, default_enabled: :yaml) }
- strategy :Cache, if: -> (config) { Feature.disabled?(:multiple_cache_per_job, default_enabled: :yaml) }
-
- class Caches < ::Gitlab::Config::Entry::ComposableArray
- include ::Gitlab::Config::Entry::Validatable
-
- MULTIPLE_CACHE_LIMIT = 4
-
- validations do
- validate do
- unless config.is_a?(Hash) || config.is_a?(Array)
- errors.add(:config, 'can only be a Hash or an Array')
- end
-
- if config.is_a?(Array) && config.count > MULTIPLE_CACHE_LIMIT
- errors.add(:config, "no more than #{MULTIPLE_CACHE_LIMIT} caches can be created")
- end
- end
- end
-
- def initialize(*args)
- super
-
- @key = nil
- end
-
- def composable_class
- Entry::Cache::Cache
+ class Cache < ::Gitlab::Config::Entry::Node
+ include ::Gitlab::Config::Entry::Configurable
+ include ::Gitlab::Config::Entry::Validatable
+ include ::Gitlab::Config::Entry::Attributable
+
+ ALLOWED_KEYS = %i[key untracked paths when policy].freeze
+ ALLOWED_POLICY = %w[pull-push push pull].freeze
+ DEFAULT_POLICY = 'pull-push'
+ ALLOWED_WHEN = %w[on_success on_failure always].freeze
+ DEFAULT_WHEN = 'on_success'
+
+ validations do
+ validates :config, type: Hash, allowed_keys: ALLOWED_KEYS
+ validates :policy,
+ inclusion: { in: ALLOWED_POLICY, message: 'should be pull-push, push, or pull' },
+ allow_blank: true
+
+ with_options allow_nil: true do
+ validates :when,
+ inclusion: {
+ in: ALLOWED_WHEN,
+ message: 'should be on_success, on_failure or always'
+ }
end
end
- class Cache < ::Gitlab::Config::Entry::Node
- include ::Gitlab::Config::Entry::Configurable
- include ::Gitlab::Config::Entry::Validatable
- include ::Gitlab::Config::Entry::Attributable
-
- ALLOWED_KEYS = %i[key untracked paths when policy].freeze
- ALLOWED_POLICY = %w[pull-push push pull].freeze
- DEFAULT_POLICY = 'pull-push'
- ALLOWED_WHEN = %w[on_success on_failure always].freeze
- DEFAULT_WHEN = 'on_success'
+ entry :key, Entry::Key,
+ description: 'Cache key used to define a cache affinity.'
- validations do
- validates :config, type: Hash, allowed_keys: ALLOWED_KEYS
- validates :policy,
- inclusion: { in: ALLOWED_POLICY, message: 'should be pull-push, push, or pull' },
- allow_blank: true
-
- with_options allow_nil: true do
- validates :when,
- inclusion: {
- in: ALLOWED_WHEN,
- message: 'should be on_success, on_failure or always'
- }
- end
- end
+ entry :untracked, ::Gitlab::Config::Entry::Boolean,
+ description: 'Cache all untracked files.'
- entry :key, Entry::Key,
- description: 'Cache key used to define a cache affinity.'
+ entry :paths, Entry::Paths,
+ description: 'Specify which paths should be cached across builds.'
- entry :untracked, ::Gitlab::Config::Entry::Boolean,
- description: 'Cache all untracked files.'
+ attributes :policy, :when
- entry :paths, Entry::Paths,
- description: 'Specify which paths should be cached across builds.'
+ def value
+ result = super
- attributes :policy, :when
+ result[:key] = key_value
+ result[:policy] = policy || DEFAULT_POLICY
+ # Use self.when to avoid conflict with reserved word
+ result[:when] = self.when || DEFAULT_WHEN
- def value
- result = super
-
- result[:key] = key_value
- result[:policy] = policy || DEFAULT_POLICY
- # Use self.when to avoid conflict with reserved word
- result[:when] = self.when || DEFAULT_WHEN
-
- result
- end
+ result
end
class UnknownStrategy < ::Gitlab::Config::Entry::Node
diff --git a/lib/gitlab/ci/config/entry/caches.rb b/lib/gitlab/ci/config/entry/caches.rb
new file mode 100644
index 00000000000..75240599c9c
--- /dev/null
+++ b/lib/gitlab/ci/config/entry/caches.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ class Config
+ module Entry
+ ##
+ # Entry that represents caches configuration
+ #
+ class Caches < ::Gitlab::Config::Entry::ComposableArray
+ include ::Gitlab::Config::Entry::Validatable
+
+ MULTIPLE_CACHE_LIMIT = 4
+
+ validations do
+ validate do
+ unless config.is_a?(Hash) || config.is_a?(Array)
+ errors.add(:config, 'can only be a Hash or an Array')
+ end
+
+ if config.is_a?(Array) && config.count > MULTIPLE_CACHE_LIMIT
+ errors.add(:config, "no more than #{MULTIPLE_CACHE_LIMIT} caches can be created")
+ end
+ end
+ end
+
+ def initialize(*args)
+ super
+
+ @key = nil
+ end
+
+ def composable_class
+ Entry::Cache
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/config/entry/default.rb b/lib/gitlab/ci/config/entry/default.rb
index ab493ff7d78..eaaf9f69102 100644
--- a/lib/gitlab/ci/config/entry/default.rb
+++ b/lib/gitlab/ci/config/entry/default.rb
@@ -37,7 +37,7 @@ module Gitlab
description: 'Script that will be executed after each job.',
inherit: true
- entry :cache, Entry::Cache,
+ entry :cache, Entry::Caches,
description: 'Configure caching between build jobs.',
inherit: true
diff --git a/lib/gitlab/ci/config/entry/job.rb b/lib/gitlab/ci/config/entry/job.rb
index a20b802be58..d76cab5d7c0 100644
--- a/lib/gitlab/ci/config/entry/job.rb
+++ b/lib/gitlab/ci/config/entry/job.rb
@@ -64,7 +64,7 @@ module Gitlab
description: 'Commands that will be executed when finishing job.',
inherit: true
- entry :cache, Entry::Cache,
+ entry :cache, Entry::Caches,
description: 'Cache definition for this job.',
inherit: true
diff --git a/lib/gitlab/ci/config/entry/root.rb b/lib/gitlab/ci/config/entry/root.rb
index 54ef84b965a..e6290ef2479 100644
--- a/lib/gitlab/ci/config/entry/root.rb
+++ b/lib/gitlab/ci/config/entry/root.rb
@@ -61,7 +61,7 @@ module Gitlab
description: 'Deprecated: stages for this pipeline.',
reserved: true
- entry :cache, Entry::Cache,
+ entry :cache, Entry::Caches,
description: 'Configure caching between build jobs.',
reserved: true
diff --git a/lib/gitlab/ci/features.rb b/lib/gitlab/ci/features.rb
index 64eaffa1e82..162dbc4bcc7 100644
--- a/lib/gitlab/ci/features.rb
+++ b/lib/gitlab/ci/features.rb
@@ -56,8 +56,8 @@ module Gitlab
::Feature.enabled?(:codequality_mr_diff, project, default_enabled: false)
end
- def self.multiple_cache_per_job?
- ::Feature.enabled?(:multiple_cache_per_job, default_enabled: :yaml)
+ def self.gldropdown_tags_enabled?
+ ::Feature.enabled?(:gldropdown_tags, default_enabled: :yaml)
end
end
end
diff --git a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
index b6f98d5034e..f768bcae6d3 100644
--- a/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
@@ -157,11 +157,6 @@ gosec-sast:
mobsf-android-sast:
extends: .sast-analyzer
- services:
- # this version must match with analyzer version mentioned in: https://gitlab.com/gitlab-org/security-products/analyzers/mobsf/-/blob/master/Dockerfile
- # Unfortunately, we need to keep track of mobsf version in 2 different places for now.
- - name: opensecurity/mobile-security-framework-mobsf:v3.4.0
- alias: mobsf
image:
name: "$SAST_ANALYZER_IMAGE"
variables:
@@ -169,7 +164,6 @@ mobsf-android-sast:
# override the analyzer image with a custom value. This may be subject to change or
# breakage across GitLab releases.
SAST_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/mobsf:$SAST_ANALYZER_IMAGE_TAG"
- MOBSF_API_KEY: key
rules:
- if: $SAST_DISABLED
when: never
@@ -183,11 +177,6 @@ mobsf-android-sast:
mobsf-ios-sast:
extends: .sast-analyzer
- services:
- # this version must match with analyzer version mentioned in: https://gitlab.com/gitlab-org/security-products/analyzers/mobsf/-/blob/master/Dockerfile
- # Unfortunately, we need to keep track of mobsf version in 2 different places for now.
- - name: opensecurity/mobile-security-framework-mobsf:v3.4.0
- alias: mobsf
image:
name: "$SAST_ANALYZER_IMAGE"
variables:
@@ -195,7 +184,6 @@ mobsf-ios-sast:
# override the analyzer image with a custom value. This may be subject to change or
# breakage across GitLab releases.
SAST_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/mobsf:$SAST_ANALYZER_IMAGE_TAG"
- MOBSF_API_KEY: key
rules:
- if: $SAST_DISABLED
when: never
diff --git a/lib/gitlab/quick_actions/spend_time_and_date_separator.rb b/lib/gitlab/quick_actions/spend_time_and_date_separator.rb
index 4a62e83e8e9..03b2a1086bb 100644
--- a/lib/gitlab/quick_actions/spend_time_and_date_separator.rb
+++ b/lib/gitlab/quick_actions/spend_time_and_date_separator.rb
@@ -19,7 +19,7 @@ module Gitlab
def execute
return if @spend_arg.blank?
- return [get_time, DateTime.now.to_date] unless date_present?
+ return [get_time, DateTime.current] unless date_present?
return unless valid_date?
[get_time, get_date]
diff --git a/lib/gitlab/subscription_portal.rb b/lib/gitlab/subscription_portal.rb
index 1ba1d70a13c..ab2e1404cd2 100644
--- a/lib/gitlab/subscription_portal.rb
+++ b/lib/gitlab/subscription_portal.rb
@@ -9,8 +9,13 @@ module Gitlab
def self.subscriptions_url
ENV.fetch('CUSTOMER_PORTAL_URL', default_subscriptions_url)
end
+
+ def self.payment_form_url
+ "#{self.subscriptions_url}/payment_forms/cc_validation"
+ end
end
end
Gitlab::SubscriptionPortal.prepend_mod
Gitlab::SubscriptionPortal::SUBSCRIPTIONS_URL = Gitlab::SubscriptionPortal.subscriptions_url.freeze
+Gitlab::SubscriptionPortal::PAYMENT_FORM_URL = Gitlab::SubscriptionPortal.payment_form_url.freeze
diff --git a/lib/gitlab/usage/metric_definition.rb b/lib/gitlab/usage/metric_definition.rb
index 9d047b8a255..c0cf4a4db4b 100644
--- a/lib/gitlab/usage/metric_definition.rb
+++ b/lib/gitlab/usage/metric_definition.rb
@@ -65,6 +65,10 @@ module Gitlab
@definitions ||= load_all!
end
+ def all
+ @all ||= definitions.map { |_key_path, definition| definition }
+ end
+
def schemer
@schemer ||= ::JSONSchemer.schema(Pathname.new(METRIC_SCHEMA_PATH))
end
diff --git a/lib/gitlab/usage/metrics/instrumentations/base_metric.rb b/lib/gitlab/usage/metrics/instrumentations/base_metric.rb
new file mode 100644
index 00000000000..29b44f2bd0a
--- /dev/null
+++ b/lib/gitlab/usage/metrics/instrumentations/base_metric.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ module Instrumentations
+ class BaseMetric
+ include Gitlab::Utils::UsageData
+
+ attr_reader :time_frame
+
+ def initialize(time_frame:)
+ @time_frame = time_frame
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage/metrics/instrumentations/database_metric.rb b/lib/gitlab/usage/metrics/instrumentations/database_metric.rb
new file mode 100644
index 00000000000..f83f90dea03
--- /dev/null
+++ b/lib/gitlab/usage/metrics/instrumentations/database_metric.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ module Instrumentations
+ class DatabaseMetric < BaseMetric
+ # Usage Example
+ #
+ # class CountUsersCreatingIssuesMetric < DatabaseMetric
+ # operation :distinct_count, column: :author_id
+ #
+ # relation do |database_time_constraints|
+ # ::Issue.where(database_time_constraints)
+ # end
+ # end
+ class << self
+ def start(&block)
+ @metric_start = block
+ end
+
+ def finish(&block)
+ @metric_finish = block
+ end
+
+ def relation(&block)
+ @metric_relation = block
+ end
+
+ def operation(symbol, column: nil)
+ @metric_operation = symbol
+ @column = column
+ end
+
+ attr_reader :metric_operation, :metric_relation, :metric_start, :metric_finish, :column
+ end
+
+ def value
+ method(self.class.metric_operation)
+ .call(relation,
+ self.class.column,
+ start: self.class.metric_start&.call,
+ finish: self.class.metric_finish&.call)
+ end
+
+ def relation
+ self.class.metric_relation.call.where(time_constraints)
+ end
+
+ private
+
+ def time_constraints
+ case time_frame
+ when '28d'
+ { created_at: 30.days.ago..2.days.ago }
+ when 'all'
+ {}
+ when 'none'
+ nil
+ else
+ raise "Unknown time frame: #{time_frame} for DatabaseMetric"
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage/metrics/instrumentations/generic_metric.rb b/lib/gitlab/usage/metrics/instrumentations/generic_metric.rb
new file mode 100644
index 00000000000..7c97cc37d17
--- /dev/null
+++ b/lib/gitlab/usage/metrics/instrumentations/generic_metric.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ module Instrumentations
+ class GenericMetric < BaseMetric
+ # Usage example
+ #
+ # class UuidMetric < GenericMetric
+ # value do
+ # Gitlab::CurrentSettings.uuid
+ # end
+ # end
+ class << self
+ def value(&block)
+ @metric_value = block
+ end
+
+ attr_reader :metric_value
+ end
+
+ def value
+ alt_usage_data do
+ self.class.metric_value.call
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage/metrics/instrumentations/redis_hll_metric.rb b/lib/gitlab/usage/metrics/instrumentations/redis_hll_metric.rb
new file mode 100644
index 00000000000..ed0ddb1cbbe
--- /dev/null
+++ b/lib/gitlab/usage/metrics/instrumentations/redis_hll_metric.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ module Instrumentations
+ class RedisHLLMetric < BaseMetric
+ # Usage example
+ #
+ # class CountUsersVisitingAnalyticsValuestreamMetric < RedisHLLMetric
+ # event_names :g_analytics_valuestream
+ # end
+ class << self
+ def event_names(events = nil)
+ @mentric_events = events
+ end
+
+ attr_reader :metric_events
+ end
+
+ def value
+ redis_usage_data do
+ event_params = time_constraints.merge(event_names: self.class.metric_events)
+
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(**event_params)
+ end
+ end
+
+ private
+
+ def time_constraints
+ case time_frame
+ when '28d'
+ { start_date: 4.weeks.ago.to_date, end_date: Date.current }
+ when '7d'
+ { start_date: 7.days.ago.to_date, end_date: Date.current }
+ else
+ raise "Unknown time frame: #{time_frame} for TimeConstraint"
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage/metrics/instrumentations/uuid_metric.rb b/lib/gitlab/usage/metrics/instrumentations/uuid_metric.rb
new file mode 100644
index 00000000000..58547b5383a
--- /dev/null
+++ b/lib/gitlab/usage/metrics/instrumentations/uuid_metric.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ module Instrumentations
+ class UuidMetric < GenericMetric
+ value do
+ Gitlab::CurrentSettings.uuid
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage/metrics/key_path_processor.rb b/lib/gitlab/usage/metrics/key_path_processor.rb
new file mode 100644
index 00000000000..dbe574d5838
--- /dev/null
+++ b/lib/gitlab/usage/metrics/key_path_processor.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ class KeyPathProcessor
+ class << self
+ def process(key_path, value)
+ unflatten(key_path.split('.'), value)
+ end
+
+ private
+
+ def unflatten(keys, value)
+ loop do
+ value = { keys.pop.to_sym => value }
+
+ break if keys.blank?
+ end
+
+ value
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_data_metrics.rb b/lib/gitlab/usage_data_metrics.rb
new file mode 100644
index 00000000000..4f162c28f1c
--- /dev/null
+++ b/lib/gitlab/usage_data_metrics.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module Gitlab
+ class UsageDataMetrics
+ class << self
+ # Build the Usage Ping JSON payload from metrics YAML definitions which have instrumentation class set
+ def uncached_data
+ ::Gitlab::Usage::MetricDefinition.all.map do |definition|
+ instrumentation_class = definition.attributes[:instrumentation_class]
+
+ if instrumentation_class.present?
+ metric_value = instrumentation_class.constantize.new(time_frame: definition.attributes[:time_frame]).value
+
+ metric_payload(definition.key_path, metric_value)
+ else
+ {}
+ end
+ end.reduce({}, :deep_merge)
+ end
+
+ private
+
+ def metric_payload(key_path, value)
+ ::Gitlab::Usage::Metrics::KeyPathProcessor.process(key_path, value)
+ end
+ end
+ end
+end
diff --git a/lib/sidebars/projects/menus/settings_menu.rb b/lib/sidebars/projects/menus/settings_menu.rb
new file mode 100644
index 00000000000..8aec769d8d5
--- /dev/null
+++ b/lib/sidebars/projects/menus/settings_menu.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+module Sidebars
+ module Projects
+ module Menus
+ class SettingsMenu < ::Sidebars::Menu
+ override :configure_menu_items
+ def configure_menu_items
+ return false unless can?(context.current_user, :admin_project, context.project)
+
+ add_item(general_menu_item)
+ add_item(integrations_menu_item)
+ add_item(webhooks_menu_item)
+ add_item(access_tokens_menu_item)
+ add_item(repository_menu_item)
+ add_item(ci_cd_menu_item)
+ add_item(operations_menu_item)
+ add_item(pages_menu_item)
+ add_item(packages_and_registries_menu_item)
+
+ true
+ end
+
+ override :link
+ def link
+ edit_project_path(context.project)
+ end
+
+ override :title
+ def title
+ _('Settings')
+ end
+
+ override :title_html_options
+ def title_html_options
+ {
+ id: 'js-onboarding-settings-link'
+ }
+ end
+
+ override :sprite_icon
+ def sprite_icon
+ 'settings'
+ end
+
+ private
+
+ def general_menu_item
+ ::Sidebars::MenuItem.new(
+ title: _('General'),
+ link: edit_project_path(context.project),
+ active_routes: { path: 'projects#edit' },
+ item_id: :general
+ )
+ end
+
+ def integrations_menu_item
+ ::Sidebars::MenuItem.new(
+ title: _('Integrations'),
+ link: project_settings_integrations_path(context.project),
+ active_routes: { path: %w[integrations#show services#edit] },
+ item_id: :integrations
+ )
+ end
+
+ def webhooks_menu_item
+ ::Sidebars::MenuItem.new(
+ title: _('Webhooks'),
+ link: project_hooks_path(context.project),
+ active_routes: { path: %w[hooks#index hooks#edit hook_logs#show] },
+ item_id: :webhooks
+ )
+ end
+
+ def access_tokens_menu_item
+ return unless can?(context.current_user, :read_resource_access_tokens, context.project)
+
+ ::Sidebars::MenuItem.new(
+ title: _('Access Tokens'),
+ link: project_settings_access_tokens_path(context.project),
+ active_routes: { path: 'access_tokens#index' },
+ item_id: :access_tokens
+ )
+ end
+
+ def repository_menu_item
+ ::Sidebars::MenuItem.new(
+ title: _('Repository'),
+ link: project_settings_repository_path(context.project),
+ active_routes: { path: 'repository#show' },
+ item_id: :repository
+ )
+ end
+
+ def ci_cd_menu_item
+ return if context.project.archived?
+ return unless context.project.feature_available?(:builds, context.current_user)
+
+ ::Sidebars::MenuItem.new(
+ title: _('CI/CD'),
+ link: project_settings_ci_cd_path(context.project),
+ active_routes: { path: 'ci_cd#show' },
+ item_id: :ci_cd
+ )
+ end
+
+ def operations_menu_item
+ return if context.project.archived?
+ return unless can?(context.current_user, :admin_operations, context.project)
+
+ ::Sidebars::MenuItem.new(
+ title: _('Operations'),
+ link: project_settings_operations_path(context.project),
+ active_routes: { path: 'operations#show' },
+ item_id: :operations
+ )
+ end
+
+ def pages_menu_item
+ return unless context.project.pages_available?
+
+ ::Sidebars::MenuItem.new(
+ title: _('Pages'),
+ link: project_pages_path(context.project),
+ active_routes: { path: 'pages#show' },
+ item_id: :pages
+ )
+ end
+
+ def packages_and_registries_menu_item
+ return unless Gitlab.config.registry.enabled
+ return if Feature.disabled?(:sidebar_refactor, context.current_user)
+ return unless can?(context.current_user, :destroy_container_image, context.project)
+
+ ::Sidebars::MenuItem.new(
+ title: _('Packages & Registries'),
+ link: project_settings_packages_and_registries_path(context.project),
+ active_routes: { path: 'packages_and_registries#index' },
+ item_id: :packages_and_registries
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/sidebars/projects/panel.rb b/lib/sidebars/projects/panel.rb
index bc37eb262b8..3be2a2b2b71 100644
--- a/lib/sidebars/projects/panel.rb
+++ b/lib/sidebars/projects/panel.rb
@@ -24,6 +24,7 @@ module Sidebars
add_menu(Sidebars::Projects::Menus::ExternalWikiMenu.new(context))
add_menu(Sidebars::Projects::Menus::SnippetsMenu.new(context))
add_menu(Sidebars::Projects::Menus::MembersMenu.new(context))
+ add_menu(Sidebars::Projects::Menus::SettingsMenu.new(context))
end
override :render_raw_menus_partial
diff --git a/lib/tasks/gitlab/usage_data.rake b/lib/tasks/gitlab/usage_data.rake
index 95072444fcf..0ad50c0fa53 100644
--- a/lib/tasks/gitlab/usage_data.rake
+++ b/lib/tasks/gitlab/usage_data.rake
@@ -29,5 +29,10 @@ namespace :gitlab do
items = Gitlab::Usage::MetricDefinition.definitions
Gitlab::Usage::Docs::Renderer.new(items).write
end
+
+ desc 'GitLab | UsageDataMetrics | Generate usage ping from metrics definition YAML files in JSON'
+ task generate_from_yaml: :environment do
+ puts Gitlab::Json.pretty_generate(Gitlab::UsageDataMetrics.uncached_data)
+ end
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index eec9886323d..244f4d94d84 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -5460,6 +5460,9 @@ msgstr ""
msgid "BulkImport|From source group"
msgstr ""
+msgid "BulkImport|Import %{groups}"
+msgstr ""
+
msgid "BulkImport|Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again."
msgstr ""
@@ -5472,9 +5475,15 @@ msgstr ""
msgid "BulkImport|Name already exists."
msgstr ""
+msgid "BulkImport|No groups on this page are available for import"
+msgstr ""
+
msgid "BulkImport|No parent"
msgstr ""
+msgid "BulkImport|One or more groups has validation errors"
+msgstr ""
+
msgid "BulkImport|Showing %{start}-%{end} of %{total}"
msgstr ""
@@ -18750,6 +18759,9 @@ msgstr ""
msgid "KEY"
msgstr ""
+msgid "Keep"
+msgstr ""
+
msgid "Keep artifacts from most recent successful jobs"
msgstr ""
@@ -23875,6 +23887,9 @@ msgstr ""
msgid "Pipelines|More Information"
msgstr ""
+msgid "Pipelines|No artifacts available"
+msgstr ""
+
msgid "Pipelines|No triggers have been created yet. Add one using the form above."
msgstr ""
diff --git a/qa/qa/page/project/sub_menus/settings.rb b/qa/qa/page/project/sub_menus/settings.rb
index 531c4686345..80f62c8efde 100644
--- a/qa/qa/page/project/sub_menus/settings.rb
+++ b/qa/qa/page/project/sub_menus/settings.rb
@@ -12,21 +12,13 @@ module QA
base.class_eval do
include QA::Page::Project::SubMenus::Common
-
- view 'app/views/layouts/nav/sidebar/_project_menus.html.haml' do
- element :settings_item
- element :general_settings_link
- element :integrations_settings_link
- element :operations_settings_link
- element :access_tokens_settings_link
- end
end
end
def go_to_ci_cd_settings
hover_settings do
within_submenu do
- click_link('CI/CD')
+ click_element(:sidebar_menu_item_link, menu_item: 'CI/CD')
end
end
end
@@ -34,7 +26,7 @@ module QA
def go_to_repository_settings
hover_settings do
within_submenu do
- click_link('Repository')
+ click_element(:sidebar_menu_item_link, menu_item: 'Repository')
end
end
end
@@ -42,21 +34,21 @@ module QA
def go_to_general_settings
hover_settings do
within_submenu do
- click_element :general_settings_link
+ click_element(:sidebar_menu_item_link, menu_item: 'General')
end
end
end
def click_settings
within_sidebar do
- click_on 'Settings'
+ click_element(:sidebar_menu_link, menu_item: 'Settings')
end
end
def go_to_integrations_settings
hover_settings do
within_submenu do
- click_element :integrations_settings_link
+ click_element(:sidebar_menu_item_link, menu_item: 'Integrations')
end
end
end
@@ -64,7 +56,7 @@ module QA
def go_to_operations_settings
hover_settings do
within_submenu do
- click_element :operations_settings_link
+ click_element(:sidebar_menu_item_link, menu_item: 'Operations')
end
end
end
@@ -72,7 +64,7 @@ module QA
def go_to_access_token_settings
hover_settings do
within_submenu do
- click_element :access_tokens_settings_link
+ click_element(:sidebar_menu_item_link, menu_item: 'Access Tokens')
end
end
end
@@ -81,8 +73,8 @@ module QA
def hover_settings
within_sidebar do
- scroll_to_element(:settings_item)
- find_element(:settings_item).hover
+ scroll_to_element(:sidebar_menu_link, menu_item: 'Settings')
+ find_element(:sidebar_menu_link, menu_item: 'Settings').hover
yield
end
diff --git a/rubocop/rubocop-code_reuse.yml b/rubocop/rubocop-code_reuse.yml
index 64e51c859f4..a3b75117621 100644
--- a/rubocop/rubocop-code_reuse.yml
+++ b/rubocop/rubocop-code_reuse.yml
@@ -31,6 +31,7 @@ CodeReuse/ActiveRecord:
- lib/gitlab/import_export/**/*.rb
- lib/gitlab/project_authorizations.rb
- lib/gitlab/sql/**/*.rb
+ - lib/gitlab/usage/metrics/instrumentations/**/*.rb
- lib/system_check/**/*.rb
- qa/**/*.rb
- rubocop/**/*.rb
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 2cbc85232b4..fc9c67e9e97 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -976,49 +976,26 @@ RSpec.describe Projects::PipelinesController do
end
end
- context 'when junit_pipeline_screenshots_view is enabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: project)
- end
-
- context 'when test_report contains attachment and scope is with_attachment as a URL param' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
-
- it 'returns a test reports with attachment' do
- get_test_report_json(scope: 'with_attachment')
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["test_suites"]).to be_present
- expect(json_response["test_suites"].first["test_cases"].first).to include("attachment_url")
- end
- end
-
- context 'when test_report does not contain attachment and scope is with_attachment as a URL param' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+ context 'when test_report contains attachment and scope is with_attachment as a URL param' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
- it 'returns a test reports with empty values' do
- get_test_report_json(scope: 'with_attachment')
+ it 'returns a test reports with attachment' do
+ get_test_report_json(scope: 'with_attachment')
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["test_suites"]).to be_empty
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["test_suites"]).to be_present
+ expect(json_response["test_suites"].first["test_cases"].first).to include("attachment_url")
end
end
- context 'when junit_pipeline_screenshots_view is disabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: false)
- end
+ context 'when test_report does not contain attachment and scope is with_attachment as a URL param' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
- context 'when test_report contains attachment and scope is with_attachment as a URL param' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
+ it 'returns a test reports with empty values' do
+ get_test_report_json(scope: 'with_attachment')
- it 'returns a test reports without attachment_url' do
- get_test_report_json(scope: 'with_attachment')
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["test_suites"].first["test_cases"].first).not_to include("attachment_url")
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["test_suites"]).to be_empty
end
end
diff --git a/spec/features/projects/settings/operations_settings_spec.rb b/spec/features/projects/settings/operations_settings_spec.rb
index ca976997142..5fd716d3202 100644
--- a/spec/features/projects/settings/operations_settings_spec.rb
+++ b/spec/features/projects/settings/operations_settings_spec.rb
@@ -3,21 +3,20 @@
require 'spec_helper'
RSpec.describe 'Projects > Settings > For a forked project', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository, create_templates: :issue) }
- let(:role) { :maintainer }
+ let_it_be(:project) { create(:project, :repository, create_templates: :issue) }
+
+ let(:user) { project.owner}
before do
sign_in(user)
- project.add_role(user, role)
end
describe 'Sidebar > Operations' do
- it 'renders the settings link in the sidebar' do
+ it 'renders the menu in the sidebar' do
visit project_path(project)
wait_for_requests
- expect(page).to have_selector('a[title="Operations"]', visible: false)
+ expect(page).to have_selector('.sidebar-sub-level-items a[aria-label="Operations"]', text: 'Operations', visible: false)
end
end
diff --git a/spec/frontend/content_editor/components/toolbar_button_spec.js b/spec/frontend/content_editor/components/toolbar_button_spec.js
index c57b277eb60..42fab2a5616 100644
--- a/spec/frontend/content_editor/components/toolbar_button_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_button_spec.js
@@ -16,13 +16,15 @@ describe('content_editor/components/toolbar_button', () => {
toggleFooSpy = jest.fn();
tiptapEditor = createContentEditor({
extensions: [
- Extension.create({
- addCommands() {
- return {
- toggleFoo: () => toggleFooSpy,
- };
- },
- }),
+ {
+ tiptapExtension: Extension.create({
+ addCommands() {
+ return {
+ toggleFoo: () => toggleFooSpy,
+ };
+ },
+ }),
+ },
],
renderMarkdown: () => true,
}).tiptapEditor;
diff --git a/spec/frontend/content_editor/services/build_serializer_config_spec.js b/spec/frontend/content_editor/services/build_serializer_config_spec.js
new file mode 100644
index 00000000000..532e0493830
--- /dev/null
+++ b/spec/frontend/content_editor/services/build_serializer_config_spec.js
@@ -0,0 +1,38 @@
+import * as Blockquote from '~/content_editor/extensions/blockquote';
+import * as Bold from '~/content_editor/extensions/bold';
+import * as Dropcursor from '~/content_editor/extensions/dropcursor';
+import * as Paragraph from '~/content_editor/extensions/paragraph';
+
+import buildSerializerConfig from '~/content_editor/services/build_serializer_config';
+
+describe('content_editor/services/build_serializer_config', () => {
+ describe('given one or more content editor extensions', () => {
+ it('creates a serializer config that collects all extension serializers by type', () => {
+ const extensions = [Bold, Blockquote, Paragraph];
+ const serializerConfig = buildSerializerConfig(extensions);
+
+ extensions.forEach(({ tiptapExtension, serializer }) => {
+ const { name, type } = tiptapExtension;
+ expect(serializerConfig[`${type}s`][name]).toBe(serializer);
+ });
+ });
+ });
+
+ describe('given an extension without serializer', () => {
+ it('does not include the extension in the serializer config', () => {
+ const serializerConfig = buildSerializerConfig([Dropcursor]);
+
+ expect(serializerConfig.marks[Dropcursor.tiptapExtension.name]).toBe(undefined);
+ expect(serializerConfig.nodes[Dropcursor.tiptapExtension.name]).toBe(undefined);
+ });
+ });
+
+ describe('given no extensions', () => {
+ it('creates an empty serializer config', () => {
+ expect(buildSerializerConfig()).toStrictEqual({
+ marks: {},
+ nodes: {},
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/services/create_content_editor_spec.js b/spec/frontend/content_editor/services/create_content_editor_spec.js
index 012e8b66885..59b2fab6d54 100644
--- a/spec/frontend/content_editor/services/create_content_editor_spec.js
+++ b/spec/frontend/content_editor/services/create_content_editor_spec.js
@@ -1,5 +1,6 @@
import { PROVIDE_SERIALIZER_OR_RENDERER_ERROR } from '~/content_editor/constants';
import { createContentEditor } from '~/content_editor/services/create_content_editor';
+import { createTestContentEditorExtension } from '../test_utils';
describe('content_editor/services/create_editor', () => {
let renderMarkdown;
@@ -28,6 +29,22 @@ describe('content_editor/services/create_editor', () => {
expect(renderMarkdown).toHaveBeenCalledWith(serializedContent);
});
+ it('allows providing external content editor extensions', async () => {
+ const labelReference = 'this is a ~group::editor';
+
+ renderMarkdown.mockReturnValueOnce(
+ '<p>this is a <span data-reference="label" data-label-name="group::editor">group::editor</span></p>',
+ );
+ editor = createContentEditor({
+ renderMarkdown,
+ extensions: [createTestContentEditorExtension()],
+ });
+
+ await editor.setSerializedContent(labelReference);
+
+ expect(editor.getSerializedContent()).toBe(labelReference);
+ });
+
it('throws an error when a renderMarkdown fn is not provided', () => {
expect(() => createContentEditor()).toThrow(PROVIDE_SERIALIZER_OR_RENDERER_ERROR);
});
diff --git a/spec/frontend/content_editor/test_utils.js b/spec/frontend/content_editor/test_utils.js
new file mode 100644
index 00000000000..a92ceb6d058
--- /dev/null
+++ b/spec/frontend/content_editor/test_utils.js
@@ -0,0 +1,34 @@
+import { Node } from '@tiptap/core';
+
+export const createTestContentEditorExtension = () => ({
+ tiptapExtension: Node.create({
+ name: 'label',
+ priority: 101,
+ inline: true,
+ group: 'inline',
+ addAttributes() {
+ return {
+ labelName: {
+ default: null,
+ parseHTML: (element) => {
+ return { labelName: element.dataset.labelName };
+ },
+ },
+ };
+ },
+ parseHTML() {
+ return [
+ {
+ tag: 'span[data-reference="label"]',
+ },
+ ];
+ },
+ renderHTML({ HTMLAttributes }) {
+ return ['span', HTMLAttributes, 0];
+ },
+ }),
+ serializer: (state, node) => {
+ state.write(`~${node.attrs.labelName}`);
+ state.closeBlock(node);
+ },
+});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
index 3080d7ebdb8..0c69cfb3bc5 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
@@ -19,6 +19,7 @@ const getFakeGroup = (status) => ({
new_name: 'group1',
},
id: 1,
+ validation_errors: [],
progress: { status },
});
@@ -187,21 +188,25 @@ describe('import table row', () => {
expect(wrapper.text()).toContain('Please choose a group URL with no special characters.');
});
- it('Reports invalid group name if group already exists', async () => {
+ it('Reports invalid group name if relevant validation error exists', async () => {
+ const FAKE_ERROR_MESSAGE = 'fake error';
+
createComponent({
group: {
...getFakeGroup(STATUSES.NONE),
- import_target: {
- target_namespace: EXISTING_GROUP_TARGET_NAMESPACE,
- new_name: EXISTING_GROUP_PATH,
- },
+ validation_errors: [
+ {
+ field: 'new_name',
+ message: FAKE_ERROR_MESSAGE,
+ },
+ ],
},
});
jest.runOnlyPendingTimers();
await nextTick();
- expect(wrapper.text()).toContain('Name already exists.');
+ expect(wrapper.text()).toContain(FAKE_ERROR_MESSAGE);
});
});
});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index 496c5cda7c7..99ef6d9a7fb 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -1,4 +1,5 @@
import {
+ GlButton,
GlEmptyState,
GlLoadingIcon,
GlSearchBoxByClick,
@@ -14,7 +15,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { STATUSES } from '~/import_entities/constants';
import ImportTable from '~/import_entities/import_groups/components/import_table.vue';
import ImportTableRow from '~/import_entities/import_groups/components/import_table_row.vue';
-import importGroupMutation from '~/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql';
+import importGroupsMutation from '~/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql';
import setNewNameMutation from '~/import_entities/import_groups/graphql/mutations/set_new_name.mutation.graphql';
import setTargetNamespaceMutation from '~/import_entities/import_groups/graphql/mutations/set_target_namespace.mutation.graphql';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
@@ -40,6 +41,7 @@ describe('import table', () => {
];
const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 };
+ const findImportAllButton = () => wrapper.find('h1').find(GlButton);
const findPaginationDropdown = () => wrapper.findComponent(GlDropdown);
const findPaginationDropdownText = () => findPaginationDropdown().find({ ref: 'text' }).text();
@@ -72,7 +74,6 @@ describe('import table', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
it('renders loading icon while performing request', async () => {
@@ -141,7 +142,7 @@ describe('import table', () => {
event | payload | mutation | variables
${'update-target-namespace'} | ${'new-namespace'} | ${setTargetNamespaceMutation} | ${{ sourceGroupId: FAKE_GROUP.id, targetNamespace: 'new-namespace' }}
${'update-new-name'} | ${'new-name'} | ${setNewNameMutation} | ${{ sourceGroupId: FAKE_GROUP.id, newName: 'new-name' }}
- ${'import-group'} | ${undefined} | ${importGroupMutation} | ${{ sourceGroupId: FAKE_GROUP.id }}
+ ${'import-group'} | ${undefined} | ${importGroupsMutation} | ${{ sourceGroupIds: [FAKE_GROUP.id] }}
`('correctly maps $event to mutation', async ({ event, payload, mutation, variables }) => {
jest.spyOn(apolloProvider.defaultClient, 'mutate');
wrapper.find(ImportTableRow).vm.$emit(event, payload);
@@ -277,4 +278,66 @@ describe('import table', () => {
);
});
});
+
+ describe('import all button', () => {
+ it('does not exists when no groups available', () => {
+ createComponent({
+ bulkImportSourceGroups: () => new Promise(() => {}),
+ });
+
+ expect(findImportAllButton().exists()).toBe(false);
+ });
+
+ it('exists when groups are available for import', async () => {
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: FAKE_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ }),
+ });
+ await waitForPromises();
+
+ expect(findImportAllButton().exists()).toBe(true);
+ });
+
+ it('counts only not-imported groups', async () => {
+ const NEW_GROUPS = [
+ generateFakeEntry({ id: 1, status: STATUSES.NONE }),
+ generateFakeEntry({ id: 2, status: STATUSES.NONE }),
+ generateFakeEntry({ id: 3, status: STATUSES.FINISHED }),
+ ];
+
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: NEW_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ }),
+ });
+ await waitForPromises();
+
+ expect(findImportAllButton().text()).toMatchInterpolatedText('Import 2 groups');
+ });
+
+ it('disables button when any group has validation errors', async () => {
+ const NEW_GROUPS = [
+ generateFakeEntry({ id: 1, status: STATUSES.NONE }),
+ generateFakeEntry({
+ id: 2,
+ status: STATUSES.NONE,
+ validation_errors: [{ field: 'new_name', message: 'test validation error' }],
+ }),
+ generateFakeEntry({ id: 3, status: STATUSES.FINISHED }),
+ ];
+
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: NEW_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ }),
+ });
+ await waitForPromises();
+
+ expect(findImportAllButton().props().disabled).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
index 0d0679c74e5..ef83c9ebbc4 100644
--- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
@@ -8,7 +8,9 @@ import {
clientTypenames,
createResolvers,
} from '~/import_entities/import_groups/graphql/client_factory';
-import importGroupMutation from '~/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql';
+import addValidationErrorMutation from '~/import_entities/import_groups/graphql/mutations/add_validation_error.mutation.graphql';
+import importGroupsMutation from '~/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql';
+import removeValidationErrorMutation from '~/import_entities/import_groups/graphql/mutations/remove_validation_error.mutation.graphql';
import setImportProgressMutation from '~/import_entities/import_groups/graphql/mutations/set_import_progress.mutation.graphql';
import setNewNameMutation from '~/import_entities/import_groups/graphql/mutations/set_new_name.mutation.graphql';
import setTargetNamespaceMutation from '~/import_entities/import_groups/graphql/mutations/set_target_namespace.mutation.graphql';
@@ -240,6 +242,7 @@ describe('Bulk import resolvers', () => {
target_namespace: 'root',
new_name: 'group1',
},
+ validation_errors: [],
},
],
pageInfo: {
@@ -294,8 +297,8 @@ describe('Bulk import resolvers', () => {
axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(() => new Promise(() => {}));
client.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [GROUP_ID] },
});
await waitForPromises();
@@ -325,8 +328,8 @@ describe('Bulk import resolvers', () => {
it('sets import status to CREATED when request completes', async () => {
axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 });
await client.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [GROUP_ID] },
});
await waitForPromises();
@@ -340,8 +343,8 @@ describe('Bulk import resolvers', () => {
client
.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
+ mutation: [importGroupsMutation],
+ variables: { sourceGroupIds: [GROUP_ID] },
})
.catch(() => {});
await waitForPromises();
@@ -357,8 +360,8 @@ describe('Bulk import resolvers', () => {
client
.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [GROUP_ID] },
})
.catch(() => {});
await waitForPromises();
@@ -375,8 +378,8 @@ describe('Bulk import resolvers', () => {
client
.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [GROUP_ID] },
})
.catch(() => {});
await waitForPromises();
@@ -418,5 +421,41 @@ describe('Bulk import resolvers', () => {
status: NEW_STATUS,
});
});
+
+ it('addValidationError adds error to group', async () => {
+ const FAKE_FIELD = 'some-field';
+ const FAKE_MESSAGE = 'some-message';
+ const {
+ data: {
+ addValidationError: { validation_errors: validationErrors },
+ },
+ } = await client.mutate({
+ mutation: addValidationErrorMutation,
+ variables: { sourceGroupId: GROUP_ID, field: FAKE_FIELD, message: FAKE_MESSAGE },
+ });
+
+ expect(validationErrors).toMatchObject([{ field: FAKE_FIELD, message: FAKE_MESSAGE }]);
+ });
+
+ it('removeValidationError removes error from group', async () => {
+ const FAKE_FIELD = 'some-field';
+ const FAKE_MESSAGE = 'some-message';
+
+ await client.mutate({
+ mutation: addValidationErrorMutation,
+ variables: { sourceGroupId: GROUP_ID, field: FAKE_FIELD, message: FAKE_MESSAGE },
+ });
+
+ const {
+ data: {
+ removeValidationError: { validation_errors: validationErrors },
+ },
+ } = await client.mutate({
+ mutation: removeValidationErrorMutation,
+ variables: { sourceGroupId: GROUP_ID, field: FAKE_FIELD },
+ });
+
+ expect(validationErrors).toMatchObject([]);
+ });
});
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/fixtures.js b/spec/frontend/import_entities/import_groups/graphql/fixtures.js
index b046e04fa28..6f66066b312 100644
--- a/spec/frontend/import_entities/import_groups/graphql/fixtures.js
+++ b/spec/frontend/import_entities/import_groups/graphql/fixtures.js
@@ -14,6 +14,7 @@ export const generateFakeEntry = ({ id, status, ...rest }) => ({
id: `test-${id}`,
status,
},
+ validation_errors: [],
...rest,
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js b/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js
index e9c214b8d3b..bae715edac0 100644
--- a/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js
@@ -22,33 +22,42 @@ describe('SourceGroupsManager', () => {
const IMPORT_ID = 1;
const IMPORT_TARGET = { destination_name: 'demo', destination_namespace: 'foo' };
const STATUS = 'FAKE_STATUS';
- const FAKE_GROUP = { id: 1, importTarget: IMPORT_TARGET, status: STATUS };
+ const FAKE_GROUP = { id: 1, import_target: IMPORT_TARGET, status: STATUS };
it('loads state from storage on creation', () => {
expect(storage.getItem).toHaveBeenCalledWith(KEY);
});
- it('saves to storage when saveImportState is called', () => {
- manager.saveImportState(IMPORT_ID, FAKE_GROUP);
+ it('saves to storage when createImportState is called', () => {
+ const FAKE_STATUS = 'fake;';
+ manager.createImportState(IMPORT_ID, { status: FAKE_STATUS, groups: [FAKE_GROUP] });
const storedObject = JSON.parse(storage.setItem.mock.calls[0][1]);
expect(Object.values(storedObject)[0]).toStrictEqual({
- id: FAKE_GROUP.id,
- importTarget: IMPORT_TARGET,
- status: STATUS,
+ status: FAKE_STATUS,
+ groups: [
+ {
+ id: FAKE_GROUP.id,
+ importTarget: IMPORT_TARGET,
+ },
+ ],
});
});
it('updates storage when previous state is available', () => {
const CHANGED_STATUS = 'changed';
- manager.saveImportState(IMPORT_ID, FAKE_GROUP);
+ manager.createImportState(IMPORT_ID, { status: STATUS, groups: [FAKE_GROUP] });
- manager.saveImportState(IMPORT_ID, { status: CHANGED_STATUS });
+ manager.updateImportProgress(IMPORT_ID, CHANGED_STATUS);
const storedObject = JSON.parse(storage.setItem.mock.calls[1][1]);
expect(Object.values(storedObject)[0]).toStrictEqual({
- id: FAKE_GROUP.id,
- importTarget: IMPORT_TARGET,
status: CHANGED_STATUS,
+ groups: [
+ {
+ id: FAKE_GROUP.id,
+ importTarget: IMPORT_TARGET,
+ },
+ ],
});
});
});
diff --git a/spec/frontend/issues_list/mock_data.js b/spec/frontend/issues_list/mock_data.js
index f75c3d8bcb9..eede806c42f 100644
--- a/spec/frontend/issues_list/mock_data.js
+++ b/spec/frontend/issues_list/mock_data.js
@@ -20,6 +20,13 @@ export const locationSearch = [
'not[weight]=3',
].join('&');
+export const locationSearchWithSpecialValues = [
+ 'assignee_id=None',
+ 'my_reaction_emoji=None',
+ 'iteration_id=Current',
+ 'weight=None',
+].join('&');
+
export const filteredTokens = [
{ type: 'author_username', value: { data: 'homer', operator: OPERATOR_IS } },
{ type: 'author_username', value: { data: 'marge', operator: OPERATOR_IS_NOT } },
@@ -41,6 +48,13 @@ export const filteredTokens = [
{ type: 'filtered-search-term', value: { data: 'issues' } },
];
+export const filteredTokensWithSpecialValues = [
+ { type: 'assignee_username', value: { data: 'None', operator: OPERATOR_IS } },
+ { type: 'my_reaction_emoji', value: { data: 'None', operator: OPERATOR_IS } },
+ { type: 'iteration', value: { data: 'Current', operator: OPERATOR_IS } },
+ { type: 'weight', value: { data: 'None', operator: OPERATOR_IS } },
+];
+
export const apiParams = {
author_username: 'homer',
'not[author_username]': 'marge',
@@ -58,6 +72,13 @@ export const apiParams = {
'not[weight]': '3',
};
+export const apiParamsWithSpecialValues = {
+ assignee_id: 'None',
+ my_reaction_emoji: 'None',
+ iteration_id: 'Current',
+ weight: 'None',
+};
+
export const urlParams = {
author_username: ['homer'],
'not[author_username]': ['marge'],
@@ -74,3 +95,10 @@ export const urlParams = {
weight: ['1'],
'not[weight]': ['3'],
};
+
+export const urlParamsWithSpecialValues = {
+ assignee_id: ['None'],
+ my_reaction_emoji: ['None'],
+ iteration_id: ['Current'],
+ weight: ['None'],
+};
diff --git a/spec/frontend/issues_list/utils_spec.js b/spec/frontend/issues_list/utils_spec.js
index 00b387263ea..282080a1417 100644
--- a/spec/frontend/issues_list/utils_spec.js
+++ b/spec/frontend/issues_list/utils_spec.js
@@ -1,4 +1,13 @@
-import { apiParams, filteredTokens, locationSearch, urlParams } from 'jest/issues_list/mock_data';
+import {
+ apiParams,
+ apiParamsWithSpecialValues,
+ filteredTokens,
+ filteredTokensWithSpecialValues,
+ locationSearch,
+ locationSearchWithSpecialValues,
+ urlParams,
+ urlParamsWithSpecialValues,
+} from 'jest/issues_list/mock_data';
import { sortParams } from '~/issues_list/constants';
import {
convertToApiParams,
@@ -53,18 +62,32 @@ describe('getFilterTokens', () => {
it('returns filtered tokens given "window.location.search"', () => {
expect(getFilterTokens(locationSearch)).toEqual(filteredTokens);
});
+
+ it('returns filtered tokens given "window.location.search" with special values', () => {
+ expect(getFilterTokens(locationSearchWithSpecialValues)).toEqual(
+ filteredTokensWithSpecialValues,
+ );
+ });
});
describe('convertToApiParams', () => {
it('returns api params given filtered tokens', () => {
expect(convertToApiParams(filteredTokens)).toEqual(apiParams);
});
+
+ it('returns api params given filtered tokens with special values', () => {
+ expect(convertToApiParams(filteredTokensWithSpecialValues)).toEqual(apiParamsWithSpecialValues);
+ });
});
describe('convertToUrlParams', () => {
it('returns url params given filtered tokens', () => {
expect(convertToUrlParams(filteredTokens)).toEqual(urlParams);
});
+
+ it('returns url params given filtered tokens with special values', () => {
+ expect(convertToUrlParams(filteredTokensWithSpecialValues)).toEqual(urlParamsWithSpecialValues);
+ });
});
describe('convertToSearchQuery', () => {
diff --git a/spec/frontend/pipelines/pipelines_artifacts_spec.js b/spec/frontend/pipelines/pipelines_artifacts_spec.js
index d4a2db08d97..336255768d7 100644
--- a/spec/frontend/pipelines/pipelines_artifacts_spec.js
+++ b/spec/frontend/pipelines/pipelines_artifacts_spec.js
@@ -1,23 +1,43 @@
-import { GlDropdown, GlDropdownItem, GlSprintf } from '@gitlab/ui';
+import { GlAlert, GlDropdown, GlDropdownItem, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import PipelineArtifacts from '~/pipelines/components/pipelines_list/pipelines_artifacts.vue';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+import PipelineArtifacts, {
+ i18n,
+} from '~/pipelines/components/pipelines_list/pipelines_artifacts.vue';
describe('Pipelines Artifacts dropdown', () => {
let wrapper;
+ let mockAxios;
- const createComponent = () => {
+ const artifacts = [
+ {
+ name: 'job my-artifact',
+ path: '/download/path',
+ },
+ {
+ name: 'job-2 my-artifact-2',
+ path: '/download/path-two',
+ },
+ ];
+ const artifactsEndpointPlaceholder = ':pipeline_artifacts_id';
+ const artifactsEndpoint = `endpoint/${artifactsEndpointPlaceholder}/artifacts.json`;
+ const pipelineId = 108;
+
+ const createComponent = ({ mockData = {} } = {}) => {
wrapper = shallowMount(PipelineArtifacts, {
+ provide: {
+ artifactsEndpoint,
+ artifactsEndpointPlaceholder,
+ },
propsData: {
- artifacts: [
- {
- name: 'job my-artifact',
- path: '/download/path',
- },
- {
- name: 'job-2 my-artifact-2',
- path: '/download/path-two',
- },
- ],
+ pipelineId,
+ },
+ data() {
+ return {
+ ...mockData,
+ };
},
stubs: {
GlSprintf,
@@ -25,11 +45,14 @@ describe('Pipelines Artifacts dropdown', () => {
});
};
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findFirstGlDropdownItem = () => wrapper.find(GlDropdownItem);
const findAllGlDropdownItems = () => wrapper.find(GlDropdown).findAll(GlDropdownItem);
beforeEach(() => {
- createComponent();
+ mockAxios = new MockAdapter(axios);
});
afterEach(() => {
@@ -37,13 +60,66 @@ describe('Pipelines Artifacts dropdown', () => {
wrapper = null;
});
+ it('should render the dropdown', () => {
+ createComponent();
+
+ expect(findDropdown().exists()).toBe(true);
+ });
+
+ it('should fetch artifacts on dropdown click', async () => {
+ const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
+ mockAxios.onGet(endpoint).replyOnce(200, { artifacts });
+ createComponent();
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(wrapper.vm.artifacts).toEqual(artifacts);
+ });
+
it('should render a dropdown with all the provided artifacts', () => {
- expect(findAllGlDropdownItems()).toHaveLength(2);
+ createComponent({ mockData: { artifacts } });
+
+ expect(findAllGlDropdownItems()).toHaveLength(artifacts.length);
});
it('should render a link with the provided path', () => {
- expect(findFirstGlDropdownItem().attributes('href')).toBe('/download/path');
+ createComponent({ mockData: { artifacts } });
- expect(findFirstGlDropdownItem().text()).toBe('Download job my-artifact artifact');
+ expect(findFirstGlDropdownItem().attributes('href')).toBe(artifacts[0].path);
+
+ expect(findFirstGlDropdownItem().text()).toBe(`Download ${artifacts[0].name} artifact`);
+ });
+
+ describe('with a failing request', () => {
+ it('should render an error message', async () => {
+ const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
+ mockAxios.onGet(endpoint).replyOnce(500);
+ createComponent();
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+
+ const error = findAlert();
+ expect(error.exists()).toBe(true);
+ expect(error.text()).toBe(i18n.artifactsFetchErrorMessage);
+ });
+ });
+
+ describe('with no artifacts received', () => {
+ it('should render empty alert message', () => {
+ createComponent({ mockData: { artifacts: [] } });
+
+ const emptyAlert = findAlert();
+ expect(emptyAlert.exists()).toBe(true);
+ expect(emptyAlert.text()).toBe(i18n.noArtifacts);
+ });
+ });
+
+ describe('when artifacts are loading', () => {
+ it('should show loading icon', () => {
+ createComponent({ mockData: { isLoading: true } });
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/user_select_spec.js b/spec/frontend/vue_shared/components/user_select_spec.js
index 4258d415402..5a609568220 100644
--- a/spec/frontend/vue_shared/components/user_select_spec.js
+++ b/spec/frontend/vue_shared/components/user_select_spec.js
@@ -95,14 +95,14 @@ describe('User select dropdown', () => {
createComponent({ participantsQueryHandler: mockError });
await waitForPromises();
- expect(wrapper.emitted('error')).toBeTruthy();
+ expect(wrapper.emitted('error')).toEqual([[], []]);
});
it('emits an `error` event if search query was rejected', async () => {
createComponent({ searchQueryHandler: mockError });
await waitForSearch();
- expect(wrapper.emitted('error')).toBeTruthy();
+ expect(wrapper.emitted('error')).toEqual([[], []]);
});
it('renders current user if they are not in participants or assignees', async () => {
@@ -264,4 +264,48 @@ describe('User select dropdown', () => {
expect(findEmptySearchResults().exists()).toBe(true);
});
});
+
+ // TODO Remove this test after the following issue is resolved in the backend
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/329750
+ describe('temporary error suppression', () => {
+ beforeEach(() => {
+ jest.spyOn(console, 'error').mockImplementation();
+ });
+
+ const nullError = { message: 'Cannot return null for non-nullable field GroupMember.user' };
+
+ it.each`
+ mockErrors
+ ${[nullError]}
+ ${[nullError, nullError]}
+ `('does not emit errors', async ({ mockErrors }) => {
+ createComponent({
+ searchQueryHandler: jest.fn().mockResolvedValue({
+ errors: mockErrors,
+ }),
+ });
+ await waitForSearch();
+
+ expect(wrapper.emitted()).toEqual({});
+ // eslint-disable-next-line no-console
+ expect(console.error).toHaveBeenCalled();
+ });
+
+ it.each`
+ mockErrors
+ ${[{ message: 'serious error' }]}
+ ${[nullError, { message: 'serious error' }]}
+ `('emits error when non-null related errors are included', async ({ mockErrors }) => {
+ createComponent({
+ searchQueryHandler: jest.fn().mockResolvedValue({
+ errors: mockErrors,
+ }),
+ });
+ await waitForSearch();
+
+ expect(wrapper.emitted('error')).toEqual([[]]);
+ // eslint-disable-next-line no-console
+ expect(console.error).not.toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/graphql/types/repository/blob_type_spec.rb b/spec/graphql/types/repository/blob_type_spec.rb
index 8accee90fa3..c588f8230de 100644
--- a/spec/graphql/types/repository/blob_type_spec.rb
+++ b/spec/graphql/types/repository/blob_type_spec.rb
@@ -24,7 +24,8 @@ RSpec.describe Types::Repository::BlobType do
:raw_path,
:replace_path,
:simple_viewer,
- :rich_viewer
+ :rich_viewer,
+ :plain_data
)
end
end
diff --git a/spec/lib/gitlab/ci/build/cache_spec.rb b/spec/lib/gitlab/ci/build/cache_spec.rb
index 9188045988b..7477aedb994 100644
--- a/spec/lib/gitlab/ci/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/build/cache_spec.rb
@@ -4,11 +4,23 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Build::Cache do
describe '.initialize' do
- context 'when the multiple cache feature flag is disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
+ context 'when the cache is an array' do
+ it 'instantiates an array of cache seeds' do
+ cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
+ pipeline = double(::Ci::Pipeline)
+ cache_seed_a = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ cache_seed_b = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b)
+
+ cache = described_class.new(cache_config, pipeline)
+
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-a' })
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-b' })
+ expect(cache.instance_variable_get(:@cache)).to eq([cache_seed_a, cache_seed_b])
end
+ end
+ context 'when the cache is a hash' do
it 'instantiates a cache seed' do
cache_config = { key: 'key-a' }
pipeline = double(::Ci::Pipeline)
@@ -18,87 +30,35 @@ RSpec.describe Gitlab::Ci::Build::Cache do
cache = described_class.new(cache_config, pipeline)
expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, cache_config)
- expect(cache.instance_variable_get(:@cache)).to eq(cache_seed)
- end
- end
-
- context 'when the multiple cache feature flag is enabled' do
- context 'when the cache is an array' do
- it 'instantiates an array of cache seeds' do
- cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
- pipeline = double(::Ci::Pipeline)
- cache_seed_a = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
- cache_seed_b = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
- allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b)
-
- cache = described_class.new(cache_config, pipeline)
-
- expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-a' })
- expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-b' })
- expect(cache.instance_variable_get(:@cache)).to eq([cache_seed_a, cache_seed_b])
- end
- end
-
- context 'when the cache is a hash' do
- it 'instantiates a cache seed' do
- cache_config = { key: 'key-a' }
- pipeline = double(::Ci::Pipeline)
- cache_seed = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
- allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed)
-
- cache = described_class.new(cache_config, pipeline)
-
- expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, cache_config)
- expect(cache.instance_variable_get(:@cache)).to eq([cache_seed])
- end
+ expect(cache.instance_variable_get(:@cache)).to eq([cache_seed])
end
end
end
describe '#cache_attributes' do
- context 'when the multiple cache feature flag is disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- it "returns the cache seed's build attributes" do
- cache_config = { key: 'key-a' }
+ context 'when there are no caches' do
+ it 'returns an empty hash' do
+ cache_config = []
pipeline = double(::Ci::Pipeline)
cache = described_class.new(cache_config, pipeline)
attributes = cache.cache_attributes
- expect(attributes).to eq({
- options: { cache: { key: 'key-a' } }
- })
+ expect(attributes).to eq({})
end
end
- context 'when the multiple cache feature flag is enabled' do
- context 'when there are no caches' do
- it 'returns an empty hash' do
- cache_config = []
- pipeline = double(::Ci::Pipeline)
- cache = described_class.new(cache_config, pipeline)
-
- attributes = cache.cache_attributes
-
- expect(attributes).to eq({})
- end
- end
-
- context 'when there are caches' do
- it 'returns the structured attributes for the caches' do
- cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
- pipeline = double(::Ci::Pipeline)
- cache = described_class.new(cache_config, pipeline)
+ context 'when there are caches' do
+ it 'returns the structured attributes for the caches' do
+ cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
+ pipeline = double(::Ci::Pipeline)
+ cache = described_class.new(cache_config, pipeline)
- attributes = cache.cache_attributes
+ attributes = cache.cache_attributes
- expect(attributes).to eq({
- options: { cache: cache_config }
- })
- end
+ expect(attributes).to eq({
+ options: { cache: cache_config }
+ })
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index cec1c97085b..247f4b63910 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -7,295 +7,227 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
subject(:entry) { described_class.new(config) }
- context 'with multiple caches' do
+ describe 'validations' do
before do
entry.compose!
end
- describe '#valid?' do
- context 'with an empty hash as cache' do
- let(:config) { {} }
-
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
-
- context 'when configuration is valid with a single cache' do
- let(:config) { { key: 'key', paths: ["logs/"], untracked: true } }
-
- it 'is valid' do
- expect(entry).to be_valid
+ context 'when entry config value is correct' do
+ let(:policy) { nil }
+ let(:key) { 'some key' }
+ let(:when_config) { nil }
+
+ let(:config) do
+ {
+ key: key,
+ untracked: true,
+ paths: ['some/path/']
+ }.tap do |config|
+ config[:policy] = policy if policy
+ config[:when] = when_config if when_config
end
end
- context 'when configuration is valid with multiple caches' do
- let(:config) do
- [
- { key: 'key', paths: ["logs/"], untracked: true },
- { key: 'key2', paths: ["logs/"], untracked: true },
- { key: 'key3', paths: ["logs/"], untracked: true }
- ]
+ describe '#value' do
+ shared_examples 'hash key value' do
+ it 'returns hash value' do
+ expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
+ end
end
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
+ it_behaves_like 'hash key value'
- context 'when configuration is not a Hash or Array' do
- let(:config) { 'invalid' }
+ context 'with files' do
+ let(:key) { { files: %w[a-file other-file] } }
- it 'is invalid' do
- expect(entry).not_to be_valid
+ it_behaves_like 'hash key value'
end
- end
- context 'when entry values contain more than four caches' do
- let(:config) do
- [
- { key: 'key', paths: ["logs/"], untracked: true },
- { key: 'key2', paths: ["logs/"], untracked: true },
- { key: 'key3', paths: ["logs/"], untracked: true },
- { key: 'key4', paths: ["logs/"], untracked: true },
- { key: 'key5', paths: ["logs/"], untracked: true }
- ]
- end
+ context 'with files and prefix' do
+ let(:key) { { files: %w[a-file other-file], prefix: 'prefix-value' } }
- it 'is invalid' do
- expect(entry.errors).to eq(["caches config no more than 4 caches can be created"])
- expect(entry).not_to be_valid
+ it_behaves_like 'hash key value'
end
- end
- end
- end
- context 'with a single cache' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
- describe 'validations' do
- before do
- entry.compose!
- end
-
- context 'when entry config value is correct' do
- let(:policy) { nil }
- let(:key) { 'some key' }
- let(:when_config) { nil }
+ context 'with prefix' do
+ let(:key) { { prefix: 'prefix-value' } }
- let(:config) do
- {
- key: key,
- untracked: true,
- paths: ['some/path/']
- }.tap do |config|
- config[:policy] = policy if policy
- config[:when] = when_config if when_config
+ it 'key is nil' do
+ expect(entry.value).to match(a_hash_including(key: nil))
end
end
- describe '#value' do
- shared_examples 'hash key value' do
- it 'returns hash value' do
- expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
- end
- end
-
- it_behaves_like 'hash key value'
-
- context 'with files' do
- let(:key) { { files: %w[a-file other-file] } }
-
- it_behaves_like 'hash key value'
- end
-
- context 'with files and prefix' do
- let(:key) { { files: %w[a-file other-file], prefix: 'prefix-value' } }
-
- it_behaves_like 'hash key value'
+ context 'with `policy`' do
+ where(:policy, :result) do
+ 'pull-push' | 'pull-push'
+ 'push' | 'push'
+ 'pull' | 'pull'
+ 'unknown' | 'unknown' # invalid
end
- context 'with prefix' do
- let(:key) { { prefix: 'prefix-value' } }
-
- it 'key is nil' do
- expect(entry.value).to match(a_hash_including(key: nil))
- end
+ with_them do
+ it { expect(entry.value).to include(policy: result) }
end
+ end
- context 'with `policy`' do
- where(:policy, :result) do
- 'pull-push' | 'pull-push'
- 'push' | 'push'
- 'pull' | 'pull'
- 'unknown' | 'unknown' # invalid
- end
-
- with_them do
- it { expect(entry.value).to include(policy: result) }
- end
+ context 'without `policy`' do
+ it 'assigns policy to default' do
+ expect(entry.value).to include(policy: 'pull-push')
end
+ end
- context 'without `policy`' do
- it 'assigns policy to default' do
- expect(entry.value).to include(policy: 'pull-push')
- end
+ context 'with `when`' do
+ where(:when_config, :result) do
+ 'on_success' | 'on_success'
+ 'on_failure' | 'on_failure'
+ 'always' | 'always'
+ 'unknown' | 'unknown' # invalid
end
- context 'with `when`' do
- where(:when_config, :result) do
- 'on_success' | 'on_success'
- 'on_failure' | 'on_failure'
- 'always' | 'always'
- 'unknown' | 'unknown' # invalid
- end
-
- with_them do
- it { expect(entry.value).to include(when: result) }
- end
+ with_them do
+ it { expect(entry.value).to include(when: result) }
end
+ end
- context 'without `when`' do
- it 'assigns when to default' do
- expect(entry.value).to include(when: 'on_success')
- end
+ context 'without `when`' do
+ it 'assigns when to default' do
+ expect(entry.value).to include(when: 'on_success')
end
end
+ end
- describe '#valid?' do
- it { is_expected.to be_valid }
+ describe '#valid?' do
+ it { is_expected.to be_valid }
- context 'with files' do
- let(:key) { { files: %w[a-file other-file] } }
+ context 'with files' do
+ let(:key) { { files: %w[a-file other-file] } }
- it { is_expected.to be_valid }
- end
+ it { is_expected.to be_valid }
end
+ end
- context 'with `policy`' do
- where(:policy, :valid) do
- 'pull-push' | true
- 'push' | true
- 'pull' | true
- 'unknown' | false
- end
+ context 'with `policy`' do
+ where(:policy, :valid) do
+ 'pull-push' | true
+ 'push' | true
+ 'pull' | true
+ 'unknown' | false
+ end
- with_them do
- it 'returns expected validity' do
- expect(entry.valid?).to eq(valid)
- end
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
end
end
+ end
- context 'with `when`' do
- where(:when_config, :valid) do
- 'on_success' | true
- 'on_failure' | true
- 'always' | true
- 'unknown' | false
- end
+ context 'with `when`' do
+ where(:when_config, :valid) do
+ 'on_success' | true
+ 'on_failure' | true
+ 'always' | true
+ 'unknown' | false
+ end
- with_them do
- it 'returns expected validity' do
- expect(entry.valid?).to eq(valid)
- end
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
end
end
+ end
- context 'with key missing' do
- let(:config) do
- { untracked: true,
- paths: ['some/path/'] }
- end
+ context 'with key missing' do
+ let(:config) do
+ { untracked: true,
+ paths: ['some/path/'] }
+ end
- describe '#value' do
- it 'sets key with the default' do
- expect(entry.value[:key])
- .to eq(Gitlab::Ci::Config::Entry::Key.default)
- end
+ describe '#value' do
+ it 'sets key with the default' do
+ expect(entry.value[:key])
+ .to eq(Gitlab::Ci::Config::Entry::Key.default)
end
end
end
+ end
- context 'when entry value is not correct' do
- describe '#errors' do
- subject { entry.errors }
+ context 'when entry value is not correct' do
+ describe '#errors' do
+ subject { entry.errors }
- context 'when is not a hash' do
- let(:config) { 'ls' }
+ context 'when is not a hash' do
+ let(:config) { 'ls' }
- it 'reports errors with config value' do
- is_expected.to include 'cache config should be a hash'
- end
+ it 'reports errors with config value' do
+ is_expected.to include 'cache config should be a hash'
end
+ end
- context 'when policy is unknown' do
- let(:config) { { policy: 'unknown' } }
+ context 'when policy is unknown' do
+ let(:config) { { policy: 'unknown' } }
- it 'reports error' do
- is_expected.to include('cache policy should be pull-push, push, or pull')
- end
+ it 'reports error' do
+ is_expected.to include('cache policy should be pull-push, push, or pull')
end
+ end
- context 'when `when` is unknown' do
- let(:config) { { when: 'unknown' } }
+ context 'when `when` is unknown' do
+ let(:config) { { when: 'unknown' } }
- it 'reports error' do
- is_expected.to include('cache when should be on_success, on_failure or always')
- end
+ it 'reports error' do
+ is_expected.to include('cache when should be on_success, on_failure or always')
end
+ end
- context 'when descendants are invalid' do
- context 'with invalid keys' do
- let(:config) { { key: 1 } }
-
- it 'reports error with descendants' do
- is_expected.to include 'key should be a hash, a string or a symbol'
- end
- end
-
- context 'with empty key' do
- let(:config) { { key: {} } }
+ context 'when descendants are invalid' do
+ context 'with invalid keys' do
+ let(:config) { { key: 1 } }
- it 'reports error with descendants' do
- is_expected.to include 'key config missing required keys: files'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key should be a hash, a string or a symbol'
end
+ end
- context 'with invalid files' do
- let(:config) { { key: { files: 'a-file' } } }
+ context 'with empty key' do
+ let(:config) { { key: {} } }
- it 'reports error with descendants' do
- is_expected.to include 'key:files config should be an array of strings'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
end
+ end
- context 'with prefix without files' do
- let(:config) { { key: { prefix: 'a-prefix' } } }
+ context 'with invalid files' do
+ let(:config) { { key: { files: 'a-file' } } }
- it 'reports error with descendants' do
- is_expected.to include 'key config missing required keys: files'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key:files config should be an array of strings'
end
+ end
- context 'when there is an unknown key present' do
- let(:config) { { key: { unknown: 'a-file' } } }
+ context 'with prefix without files' do
+ let(:config) { { key: { prefix: 'a-prefix' } } }
- it 'reports error with descendants' do
- is_expected.to include 'key config contains unknown keys: unknown'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
end
end
context 'when there is an unknown key present' do
- let(:config) { { invalid: true } }
+ let(:config) { { key: { unknown: 'a-file' } } }
it 'reports error with descendants' do
- is_expected.to include 'cache config contains unknown keys: invalid'
+ is_expected.to include 'key config contains unknown keys: unknown'
end
end
end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { invalid: true } }
+
+ it 'reports error with descendants' do
+ is_expected.to include 'cache config contains unknown keys: invalid'
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/caches_spec.rb b/spec/lib/gitlab/ci/config/entry/caches_spec.rb
new file mode 100644
index 00000000000..047cef53b96
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/caches_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Caches do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:entry) { described_class.new(config) }
+
+ before do
+ entry.compose!
+ end
+
+ describe '#valid?' do
+ context 'with an empty hash as cache' do
+ let(:config) { {} }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when configuration is valid with a single cache' do
+ let(:config) { { key: 'key', paths: ["logs/"], untracked: true } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when configuration is valid with multiple caches' do
+ let(:config) do
+ [
+ { key: 'key', paths: ["logs/"], untracked: true },
+ { key: 'key2', paths: ["logs/"], untracked: true },
+ { key: 'key3', paths: ["logs/"], untracked: true }
+ ]
+ end
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when configuration is not a Hash or Array' do
+ let(:config) { 'invalid' }
+
+ it 'is invalid' do
+ expect(entry).not_to be_valid
+ end
+ end
+
+ context 'when entry values contain more than four caches' do
+ let(:config) do
+ [
+ { key: 'key', paths: ["logs/"], untracked: true },
+ { key: 'key2', paths: ["logs/"], untracked: true },
+ { key: 'key3', paths: ["logs/"], untracked: true },
+ { key: 'key4', paths: ["logs/"], untracked: true },
+ { key: 'key5', paths: ["logs/"], untracked: true }
+ ]
+ end
+
+ it 'is invalid' do
+ expect(entry.errors).to eq(["caches config no more than 4 caches can be created"])
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index ffcd029172a..1d23ab0c2c7 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -556,42 +556,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- context 'when job config overrides default config' do
- before do
- entry.compose!(deps)
- end
-
- let(:config) do
- { script: 'rspec', image: 'some_image', cache: { key: 'test' } }
- end
-
- it 'overrides default config' do
- expect(entry[:image].value).to eq(name: 'some_image')
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
- end
- end
-
- context 'when job config does not override default config' do
- before do
- allow(default).to receive('[]').with(:image).and_return(specified)
-
- entry.compose!(deps)
- end
-
- let(:config) { { script: 'ls', cache: { key: 'test' } } }
-
- it 'uses config from default entry' do
- expect(entry[:image].value).to eq 'specified'
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
- end
- end
- end
-
context 'with workflow rules' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 041eb748fc9..31e3545e8d8 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -175,68 +175,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
)
end
end
-
- context 'with multuple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- root.compose!
- end
-
- describe '#jobs_value' do
- it 'returns jobs configuration' do
- expect(root.jobs_value.keys).to eq([:rspec, :spinach, :release])
- expect(root.jobs_value[:rspec]).to eq(
- { name: :rspec,
- script: %w[rspec ls],
- before_script: %w(ls pwd),
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
- job_variables: {},
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
- )
- expect(root.jobs_value[:spinach]).to eq(
- { name: :spinach,
- before_script: [],
- script: %w[spinach],
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
- job_variables: {},
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
- )
- expect(root.jobs_value[:release]).to eq(
- { name: :release,
- stage: 'release',
- before_script: [],
- script: ["make changelog | tee release_changelog.txt"],
- release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
- image: { name: "ruby:2.7" },
- services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
- cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' },
- only: { refs: %w(branches tags) },
- variables: { 'VAR' => 'job', 'VAR2' => 'val 2' },
- job_variables: { 'VAR' => 'job' },
- root_variables_inheritance: true,
- after_script: [],
- ignore: false,
- scheduling_type: :stage }
- )
- end
- end
- end
end
end
@@ -255,56 +193,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
spinach: { before_script: [], variables: { VAR: 'job' }, script: 'spinach' } }
end
- context 'with multiple_cache_per_job FF disabled' do
- context 'when composed' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- root.compose!
- end
-
- describe '#errors' do
- it 'has no errors' do
- expect(root.errors).to be_empty
- end
- end
-
- describe '#jobs_value' do
- it 'returns jobs configuration' do
- expect(root.jobs_value).to eq(
- rspec: { name: :rspec,
- script: %w[rspec ls],
- before_script: %w(ls pwd),
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root' },
- job_variables: {},
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage },
- spinach: { name: :spinach,
- before_script: [],
- script: %w[spinach],
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'job' },
- job_variables: { 'VAR' => 'job' },
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
- )
- end
- end
- end
- end
-
context 'when composed' do
before do
root.compose!
@@ -390,19 +278,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
expect(root.cache_value).to eq([key: 'a', policy: 'pull-push', when: 'on_success'])
end
end
-
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- root.compose!
- end
-
- describe '#cache_value' do
- it 'returns correct cache definition' do
- expect(root.cache_value).to eq(key: 'a', policy: 'pull-push', when: 'on_success')
- end
- end
- end
end
context 'when variables resembles script-type job' do
@@ -525,7 +400,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
context 'when entry exists' do
it 'returns correct entry' do
expect(root[:cache])
- .to be_an_instance_of Gitlab::Ci::Config::Entry::Cache
+ .to be_an_instance_of Gitlab::Ci::Config::Entry::Caches
expect(root[:jobs][:rspec][:script].value).to eq ['ls']
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
index 773cb61b946..910c12389c3 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
@@ -9,253 +9,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
let(:processor) { described_class.new(pipeline, config) }
- context 'with multiple_cache_per_job ff disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- describe '#build_attributes' do
- subject { processor.build_attributes }
-
- context 'with cache:key' do
- let(:config) do
- {
- key: 'a-key',
- paths: ['vendor/ruby']
- }
- end
-
- it { is_expected.to include(options: { cache: config }) }
- end
-
- context 'with cache:key as a symbol' do
- let(:config) do
- {
- key: :a_key,
- paths: ['vendor/ruby']
- }
- end
-
- it { is_expected.to include(options: { cache: config.merge(key: "a_key") }) }
- end
-
- context 'with cache:key:files' do
- shared_examples 'default key' do
- let(:config) do
- { key: { files: files } }
- end
-
- it 'uses default key' do
- expected = { options: { cache: { key: 'default' } } }
-
- is_expected.to include(expected)
- end
- end
-
- shared_examples 'version and gemfile files' do
- let(:config) do
- {
- key: {
- files: files
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'builds a string key' do
- expected = {
- options: {
- cache: {
- key: '703ecc8fef1635427a1f86a8a1a308831c122392',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with existing files' do
- let(:files) { ['VERSION', 'Gemfile.zip'] }
-
- it_behaves_like 'version and gemfile files'
- end
-
- context 'with files starting with ./' do
- let(:files) { ['Gemfile.zip', './VERSION'] }
-
- it_behaves_like 'version and gemfile files'
- end
-
- context 'with files ending with /' do
- let(:files) { ['Gemfile.zip/'] }
-
- it_behaves_like 'default key'
- end
-
- context 'with new line in filenames' do
- let(:files) { ["Gemfile.zip\nVERSION"] }
-
- it_behaves_like 'default key'
- end
-
- context 'with missing files' do
- let(:files) { ['project-gemfile.lock', ''] }
-
- it_behaves_like 'default key'
- end
-
- context 'with directories' do
- shared_examples 'foo/bar directory key' do
- let(:config) do
- {
- key: {
- files: files
- }
- }
- end
-
- it 'builds a string key' do
- expected = {
- options: {
- cache: { key: '74bf43fb1090f161bdd4e265802775dbda2f03d1' }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with directory' do
- let(:files) { ['foo/bar'] }
-
- it_behaves_like 'foo/bar directory key'
- end
-
- context 'with directory ending in slash' do
- let(:files) { ['foo/bar/'] }
-
- it_behaves_like 'foo/bar directory key'
- end
-
- context 'with directories ending in slash star' do
- let(:files) { ['foo/bar/*'] }
-
- it_behaves_like 'foo/bar directory key'
- end
- end
- end
-
- context 'with cache:key:prefix' do
- context 'without files' do
- let(:config) do
- {
- key: {
- prefix: 'a-prefix'
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'adds prefix to default key' do
- expected = {
- options: {
- cache: {
- key: 'a-prefix-default',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with existing files' do
- let(:config) do
- {
- key: {
- files: ['VERSION', 'Gemfile.zip'],
- prefix: 'a-prefix'
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'adds prefix key' do
- expected = {
- options: {
- cache: {
- key: 'a-prefix-703ecc8fef1635427a1f86a8a1a308831c122392',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with missing files' do
- let(:config) do
- {
- key: {
- files: ['project-gemfile.lock', ''],
- prefix: 'a-prefix'
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'adds prefix to default key' do
- expected = {
- options: {
- cache: {
- key: 'a-prefix-default',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
- end
-
- context 'with all cache option keys' do
- let(:config) do
- {
- key: 'a-key',
- paths: ['vendor/ruby'],
- untracked: true,
- policy: 'push',
- when: 'on_success'
- }
- end
-
- it { is_expected.to include(options: { cache: config }) }
- end
-
- context 'with unknown cache option keys' do
- let(:config) do
- {
- key: 'a-key',
- unknown_key: true
- }
- end
-
- it { expect { subject }.to raise_error(ArgumentError, /unknown_key/) }
- end
-
- context 'with empty config' do
- let(:config) { {} }
-
- it { is_expected.to include(options: {}) }
- end
- end
- end
-
describe '#attributes' do
subject { processor.attributes }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index c8401054f99..058fb25807d 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -90,91 +90,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- context 'with cache:key' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: 'a-value'
- }
- }
- end
-
- it { is_expected.to include(options: { cache: { key: 'a-value' } }) }
- end
-
- context 'with cache:key:files' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- files: ['VERSION']
- }
- }
- }
- end
-
- it 'includes cache options' do
- cache_options = {
- options: {
- cache: { key: 'f155568ad0933d8358f66b846133614f76dd0ca4' }
- }
- }
-
- is_expected.to include(cache_options)
- end
- end
-
- context 'with cache:key:prefix' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- prefix: 'something'
- }
- }
- }
- end
-
- it { is_expected.to include(options: { cache: { key: 'something-default' } }) }
- end
-
- context 'with cache:key:files and prefix' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- files: ['VERSION'],
- prefix: 'something'
- }
- }
- }
- end
-
- it 'includes cache options' do
- cache_options = {
- options: {
- cache: { key: 'something-f155568ad0933d8358f66b846133614f76dd0ca4' }
- }
- }
-
- is_expected.to include(cache_options)
- end
- end
- end
-
context 'with cache:key' do
let(:attributes) do
{
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index ad94dfc9160..94ab4819361 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1419,155 +1419,6 @@ module Gitlab
end
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
- describe 'cache' do
- context 'when cache definition has unknown keys' do
- let(:config) do
- YAML.dump(
- { cache: { untracked: true, invalid: 'key' },
- rspec: { script: 'rspec' } })
- end
-
- it_behaves_like 'returns errors', 'cache config contains unknown keys: invalid'
- end
-
- it "returns cache when defined globally" do
- config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
- rspec: {
- script: "rspec"
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: 'key',
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it "returns cache when defined in default context" do
- config = YAML.dump(
- {
- default: {
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: { files: ['file'] } }
- },
- rspec: {
- script: "rspec"
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: { files: ['file'] },
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it 'returns cache key when defined in a job' do
- config = YAML.dump({
- rspec: {
- cache: { paths: ['logs/', 'binaries/'], untracked: true, key: 'key' },
- script: 'rspec'
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: 'key',
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it 'returns cache files' do
- config = YAML.dump(
- rspec: {
- cache: {
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'] }
- },
- script: 'rspec'
- }
- )
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'] },
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it 'returns cache files with prefix' do
- config = YAML.dump(
- rspec: {
- cache: {
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'], prefix: 'prefix' }
- },
- script: 'rspec'
- }
- )
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'], prefix: 'prefix' },
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it "overwrite cache when defined for a job and globally" do
- config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
- rspec: {
- script: "rspec",
- cache: { paths: ["test/"], untracked: false, key: 'local' }
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
- paths: ["test/"],
- untracked: false,
- key: 'local',
- policy: 'pull-push',
- when: 'on_success'
- )
- end
- end
- end
-
describe 'cache' do
context 'when cache definition has unknown keys' do
let(:config) do
diff --git a/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
index 0b012bfd970..7e28649e634 100644
--- a/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
+++ b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
@@ -13,7 +13,9 @@ RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
shared_examples 'arg line with valid parameters' do
it 'return time and date array' do
- expect(subject.new(valid_arg).execute).to eq(expected_response)
+ freeze_time do
+ expect(subject.new(valid_arg).execute).to eq(expected_response)
+ end
end
end
@@ -53,7 +55,7 @@ RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
it_behaves_like 'arg line with valid parameters' do
let(:valid_arg) { '2m 3m 5m 1h' }
let(:time) { Gitlab::TimeTrackingFormatter.parse(valid_arg) }
- let(:date) { DateTime.now.to_date }
+ let(:date) { DateTime.current }
let(:expected_response) { [time, date] }
end
end
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index ad1affdac0b..7f058086656 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -40,5 +40,42 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
end
end
end
+
+ describe '.payment_form_url' do
+ subject { described_class.payment_form_url }
+
+ context 'on non test and non dev environments' do
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
+ end
+
+ it 'returns URL to production payment form' do
+ is_expected.to eq('https://customers.gitlab.com/payment_forms/cc_validation')
+ end
+ end
+
+ context 'on dev environment' do
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(true)
+ end
+
+ it 'returns URL to staging payment form' do
+ is_expected.to eq('https://customers.stg.gitlab.com/payment_forms/cc_validation')
+ end
+ end
+
+ context 'on test environment' do
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(true)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
+ end
+
+ it 'returns URL to staging payment form' do
+ is_expected.to eq('https://customers.stg.gitlab.com/payment_forms/cc_validation')
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index e99d720058a..c352e5bb36f 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -25,6 +25,12 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
let(:definition) { described_class.new(path, attributes) }
let(:yaml_content) { attributes.deep_stringify_keys.to_yaml }
+ around do |example|
+ described_class.instance_variable_set(:@definitions, nil)
+ example.run
+ described_class.instance_variable_set(:@definitions, nil)
+ end
+
def write_metric(metric, path, content)
path = File.join(metric, path)
dir = File.dirname(path)
@@ -62,6 +68,9 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:distribution | 'test'
:tier | %w(test ee)
:name | 'count_<adjective_describing>_boards'
+
+ :instrumentation_class | 'Gitlab::Usage::Metrics::Instrumentations::Metric_Class'
+ :instrumentation_class | 'Gitlab::Usage::Metrics::MetricClass'
end
with_them do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb
new file mode 100644
index 00000000000..212dd3dc851
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UuidMetric do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }, Gitlab::CurrentSettings.uuid
+end
diff --git a/spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb b/spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb
new file mode 100644
index 00000000000..91c27825cce
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::KeyPathProcessor do
+ describe '#unflatten_default_path' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:key_path, :value, :expected_hash) do
+ 'uuid' | nil | { uuid: nil }
+ 'uuid' | '1111' | { uuid: '1111' }
+ 'counts.issues' | nil | { counts: { issues: nil } }
+ 'counts.issues' | 100 | { counts: { issues: 100 } }
+ 'usage_activity_by_stage.verify.ci_builds' | 100 | { usage_activity_by_stage: { verify: { ci_builds: 100 } } }
+ end
+
+ with_them do
+ subject { described_class.process(key_path, value) }
+
+ it { is_expected.to eq(expected_hash) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb
new file mode 100644
index 00000000000..1cb43c2886c
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_metrics_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataMetrics do
+ describe '.uncached_data' do
+ subject { described_class.uncached_data }
+
+ around do |example|
+ described_class.instance_variable_set(:@definitions, nil)
+ example.run
+ described_class.instance_variable_set(:@definitions, nil)
+ end
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ context 'whith instrumentation_class' do
+ it 'includes top level keys' do
+ expect(subject).to include(:uuid)
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
new file mode 100644
index 00000000000..85de46d1583
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
@@ -0,0 +1,167 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ it 'returns false when menu does not have any menu items' do
+ allow(subject).to receive(:has_items?).and_return(false)
+
+ expect(subject.render?).to be false
+ end
+ end
+
+ describe 'Menu items' do
+ subject { described_class.new(context).items.index { |e| e.item_id == item_id } }
+
+ shared_examples 'access rights checks' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'General' do
+ let(:item_id) { :general }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Integrations' do
+ let(:item_id) { :integrations }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Webhooks' do
+ let(:item_id) { :webhooks }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Access Tokens' do
+ let(:item_id) { :access_tokens }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Repository' do
+ let(:item_id) { :repository }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'CI/CD' do
+ let(:item_id) { :ci_cd }
+
+ describe 'when project is archived' do
+ before do
+ allow(project).to receive(:archived?).and_return(true)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when project is not archived' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+
+ describe 'Operations' do
+ let(:item_id) { :operations }
+
+ describe 'when project is archived' do
+ before do
+ allow(project).to receive(:archived?).and_return(true)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when project is not archived' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+
+ describe 'Pages' do
+ let(:item_id) { :pages }
+
+ before do
+ allow(project).to receive(:pages_available?).and_return(pages_enabled)
+ end
+
+ describe 'when pages are enabled' do
+ let(:pages_enabled) { true }
+
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'when pages are not enabled' do
+ let(:pages_enabled) { false }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Packages & Registries' do
+ let(:item_id) { :packages_and_registries }
+
+ before do
+ stub_container_registry_config(enabled: container_enabled)
+ end
+
+ describe 'when config registry setting is disabled' do
+ let(:container_enabled) { false }
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when config registry setting is enabled' do
+ let(:container_enabled) { true }
+
+ specify { is_expected.not_to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 7d3b178a4a8..7334165c41a 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -395,18 +395,94 @@ RSpec.describe Group do
end
end
- context 'assigning a new parent' do
- let!(:old_parent) { create(:group) }
- let!(:new_parent) { create(:group) }
+ context 'assign a new parent' do
let!(:group) { create(:group, parent: old_parent) }
+ let(:recorded_queries) { ActiveRecord::QueryRecorder.new }
+
+ subject do
+ recorded_queries.record do
+ group.update(parent: new_parent)
+ end
+ end
before do
- group.update(parent: new_parent)
+ subject
reload_models(old_parent, new_parent, group)
end
- it 'updates traversal_ids' do
- expect(group.traversal_ids).to eq [new_parent.id, group.id]
+ context 'within the same hierarchy' do
+ let!(:root) { create(:group).reload }
+ let!(:old_parent) { create(:group, parent: root) }
+ let!(:new_parent) { create(:group, parent: root) }
+
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [root.id, new_parent.id, group.id]
+ end
+
+ it_behaves_like 'hierarchy with traversal_ids'
+ it_behaves_like 'locked row' do
+ let(:row) { root }
+ end
+ end
+
+ context 'to another hierarchy' do
+ let!(:old_parent) { create(:group) }
+ let!(:new_parent) { create(:group) }
+ let!(:group) { create(:group, parent: old_parent) }
+
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [new_parent.id, group.id]
+ end
+
+ it_behaves_like 'locked rows' do
+ let(:rows) { [old_parent, new_parent] }
+ end
+
+ context 'old hierarchy' do
+ let(:root) { old_parent.root_ancestor }
+
+ it_behaves_like 'hierarchy with traversal_ids'
+ end
+
+ context 'new hierarchy' do
+ let(:root) { new_parent.root_ancestor }
+
+ it_behaves_like 'hierarchy with traversal_ids'
+ end
+ end
+
+ context 'from being a root ancestor' do
+ let!(:old_parent) { nil }
+ let!(:new_parent) { create(:group) }
+
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [new_parent.id, group.id]
+ end
+
+ it_behaves_like 'locked rows' do
+ let(:rows) { [group, new_parent] }
+ end
+
+ it_behaves_like 'hierarchy with traversal_ids' do
+ let(:root) { new_parent }
+ end
+ end
+
+ context 'to being a root ancestor' do
+ let!(:old_parent) { create(:group) }
+ let!(:new_parent) { nil }
+
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [group.id]
+ end
+
+ it_behaves_like 'locked rows' do
+ let(:rows) { [old_parent, group] }
+ end
+
+ it_behaves_like 'hierarchy with traversal_ids' do
+ let(:root) { group }
+ end
end
end
diff --git a/spec/models/namespace/traversal_hierarchy_spec.rb b/spec/models/namespace/traversal_hierarchy_spec.rb
index baefc161691..2cd66f42458 100644
--- a/spec/models/namespace/traversal_hierarchy_spec.rb
+++ b/spec/models/namespace/traversal_hierarchy_spec.rb
@@ -43,16 +43,6 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
end
end
- shared_examples 'locked update query' do
- it 'locks query with FOR UPDATE' do
- qr = ActiveRecord::QueryRecorder.new do
- subject
- end
- expect(qr.count).to eq 1
- expect(qr.log.first).to match /FOR UPDATE/
- end
- end
-
describe '#incorrect_traversal_ids' do
let!(:hierarchy) { described_class.new(root) }
@@ -63,12 +53,6 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
end
it { is_expected.to match_array Namespace.all }
-
- context 'when lock is true' do
- subject { hierarchy.incorrect_traversal_ids(lock: true).load }
-
- it_behaves_like 'locked update query'
- end
end
describe '#sync_traversal_ids!' do
@@ -79,14 +63,18 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
it { expect(hierarchy.incorrect_traversal_ids).to be_empty }
it_behaves_like 'hierarchy with traversal_ids'
- it_behaves_like 'locked update query'
+ it_behaves_like 'locked row' do
+ let(:recorded_queries) { ActiveRecord::QueryRecorder.new }
+ let(:row) { root }
- context 'when deadlocked' do
before do
- connection_double = double(:connection)
+ recorded_queries.record { subject }
+ end
+ end
- allow(Namespace).to receive(:connection).and_return(connection_double)
- allow(connection_double).to receive(:exec_query) { raise ActiveRecord::Deadlocked }
+ context 'when deadlocked' do
+ before do
+ allow(root).to receive(:lock!) { raise ActiveRecord::Deadlocked }
end
it { expect { subject }.to raise_error(ActiveRecord::Deadlocked) }
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index d6acc20396f..1fdd31b1f92 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -83,4 +83,43 @@ RSpec.describe BlobPresenter do
end
end
end
+
+ describe '#plain_data' do
+ let(:blob) { repository.blob_at('HEAD', file) }
+
+ subject { described_class.new(blob).plain_data }
+
+ context 'when blob is binary' do
+ let(:file) { 'files/images/logo-black.png' }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when blob is markup' do
+ let(:file) { 'README.md' }
+
+ it 'returns plain content' do
+ expect(subject).to include('<span id="LC1" class="line" lang="markdown">')
+ end
+ end
+
+ context 'when blob has syntax' do
+ let(:file) { 'files/ruby/regex.rb' }
+
+ it 'returns highlighted syntax content' do
+ expect(subject)
+ .to include '<span id="LC1" class="line" lang="ruby"><span class="k">module</span> <span class="nn">Gitlab</span>'
+ end
+ end
+
+ context 'when blob has plain data' do
+ let(:file) { 'LICENSE' }
+
+ it 'returns plain text highlighted content' do
+ expect(subject).to include('<span id="LC1" class="line" lang="plaintext">The MIT License (MIT)</span>')
+ end
+ end
+ end
end
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
index 83fe37effc0..42eca6b5a49 100644
--- a/spec/presenters/snippet_blob_presenter_spec.rb
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -80,45 +80,6 @@ RSpec.describe SnippetBlobPresenter do
end
end
- describe '#plain_data' do
- let(:blob) { blob_at(file) }
-
- subject { described_class.new(blob).plain_data }
-
- context 'when blob is binary' do
- let(:file) { 'files/images/logo-black.png' }
-
- it 'returns nil' do
- expect(subject).to be_nil
- end
- end
-
- context 'when blob is markup' do
- let(:file) { 'README.md' }
-
- it 'returns plain content' do
- expect(subject).to include('<span id="LC1" class="line" lang="markdown">')
- end
- end
-
- context 'when blob has syntax' do
- let(:file) { 'files/ruby/regex.rb' }
-
- it 'returns highlighted syntax content' do
- expect(subject)
- .to include '<span id="LC1" class="line" lang="ruby"><span class="k">module</span> <span class="nn">Gitlab</span>'
- end
- end
-
- context 'when blob has plain data' do
- let(:file) { 'LICENSE' }
-
- it 'returns plain text highlighted content' do
- expect(subject).to include('<span id="LC1" class="line" lang="plaintext">The MIT License (MIT)</span>')
- end
- end
- end
-
describe 'route helpers' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
diff --git a/spec/serializers/test_case_entity_spec.rb b/spec/serializers/test_case_entity_spec.rb
index e2b0f722f41..cdeefd2fec5 100644
--- a/spec/serializers/test_case_entity_spec.rb
+++ b/spec/serializers/test_case_entity_spec.rb
@@ -41,47 +41,19 @@ RSpec.describe TestCaseEntity do
end
end
- context 'when feature is enabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: true)
- end
-
- context 'when attachment is present' do
- let(:test_case) { build(:report_test_case, :failed_with_attachment, job: job) }
-
- it 'returns the attachment_url' do
- expect(subject).to include(:attachment_url)
- end
- end
-
- context 'when attachment is not present' do
- let(:test_case) { build(:report_test_case, job: job) }
+ context 'when attachment is present' do
+ let(:test_case) { build(:report_test_case, :failed_with_attachment, job: job) }
- it 'returns a nil attachment_url' do
- expect(subject[:attachment_url]).to be_nil
- end
+ it 'returns the attachment_url' do
+ expect(subject).to include(:attachment_url)
end
end
- context 'when feature is disabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: false)
- end
-
- context 'when attachment is present' do
- let(:test_case) { build(:report_test_case, :failed_with_attachment, job: job) }
-
- it 'returns no attachment_url' do
- expect(subject).not_to include(:attachment_url)
- end
- end
-
- context 'when attachment is not present' do
- let(:test_case) { build(:report_test_case, job: job) }
+ context 'when attachment is not present' do
+ let(:test_case) { build(:report_test_case, job: job) }
- it 'returns no attachment_url' do
- expect(subject).not_to include(:attachment_url)
- end
+ it 'returns a nil attachment_url' do
+ expect(subject[:attachment_url]).to be_nil
end
end
end
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index c098500b78a..bdce049bd3d 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -103,6 +103,30 @@ RSpec.describe Notes::QuickActionsService do
expect(Timelog.last.note_id).to eq(note.id)
end
end
+
+ context 'adds a system note' do
+ context 'when not specifying a date' do
+ let(:note_text) { "/spend 1h" }
+
+ it 'does not include the date' do
+ _, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
+
+ expect(Note.last.note).to eq('added 1h of time spent')
+ end
+ end
+
+ context 'when specifying a date' do
+ let(:note_text) { "/spend 1h 2020-01-01" }
+
+ it 'does include the date' do
+ _, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
+
+ expect(Note.last.note).to eq('added 1h of time spent at 2020-01-01')
+ end
+ end
+ end
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 9df238c6dac..f3ad69bae13 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -360,25 +360,29 @@ RSpec.describe QuickActions::InterpretService do
shared_examples 'spend command' do
it 'populates spend_time: 3600 if content contains /spend 1h' do
- _, updates, _ = service.execute(content, issuable)
+ freeze_time do
+ _, updates, _ = service.execute(content, issuable)
- expect(updates).to eq(spend_time: {
- duration: 3600,
- user_id: developer.id,
- spent_at: DateTime.current.to_date
- })
+ expect(updates).to eq(spend_time: {
+ duration: 3600,
+ user_id: developer.id,
+ spent_at: DateTime.current
+ })
+ end
end
end
shared_examples 'spend command with negative time' do
it 'populates spend_time: -7200 if content contains -120m' do
- _, updates, _ = service.execute(content, issuable)
+ freeze_time do
+ _, updates, _ = service.execute(content, issuable)
- expect(updates).to eq(spend_time: {
- duration: -7200,
- user_id: developer.id,
- spent_at: DateTime.current.to_date
- })
+ expect(updates).to eq(spend_time: {
+ duration: -7200,
+ user_id: developer.id,
+ spent_at: DateTime.current
+ })
+ end
end
it 'returns the spend_time message including the formatted duration and verb' do
diff --git a/spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb b/spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb
new file mode 100644
index 00000000000..c9ff566e94c
--- /dev/null
+++ b/spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a correct instrumented metric value' do |options, expected_value|
+ let(:time_frame) { options[:time_frame] }
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'has correct value' do
+ expect(described_class.new(time_frame: time_frame).value).to eq(expected_value)
+ end
+end
diff --git a/spec/support/helpers/query_recorder.rb b/spec/support/helpers/query_recorder.rb
index 2d880c7a8fe..05afbc336da 100644
--- a/spec/support/helpers/query_recorder.rb
+++ b/spec/support/helpers/query_recorder.rb
@@ -13,6 +13,10 @@ module ActiveRecord
@skip_cached = skip_cached
@query_recorder_debug = ENV['QUERY_RECORDER_DEBUG'] || query_recorder_debug
@log_file = log_file
+ record(&block) if block_given?
+ end
+
+ def record(&block)
# force replacement of bind parameters to give tests the ability to check for ids
ActiveRecord::Base.connection.unprepared_statement do
ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
diff --git a/spec/support/helpers/reload_helpers.rb b/spec/support/helpers/reload_helpers.rb
index 60811e4604f..368ebaaba8a 100644
--- a/spec/support/helpers/reload_helpers.rb
+++ b/spec/support/helpers/reload_helpers.rb
@@ -2,7 +2,7 @@
module ReloadHelpers
def reload_models(*models)
- models.map(&:reload)
+ models.compact.map(&:reload)
end
def subject_and_reload(*models)
diff --git a/spec/support/shared_examples/row_lock_shared_examples.rb b/spec/support/shared_examples/row_lock_shared_examples.rb
new file mode 100644
index 00000000000..5e003172215
--- /dev/null
+++ b/spec/support/shared_examples/row_lock_shared_examples.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+# Ensure that a SQL command to lock this row(s) was requested.
+# Ensure a transaction also occurred.
+# Be careful! This form of spec is not foolproof, but better than nothing.
+
+RSpec.shared_examples 'locked row' do
+ it "has locked row" do
+ table_name = row.class.table_name
+ ids_regex = /SELECT.*FROM.*#{table_name}.*"#{table_name}"."id" = #{row.id}.+FOR UPDATE/m
+
+ expect(recorded_queries.log).to include a_string_matching 'SAVEPOINT'
+ expect(recorded_queries.log).to include a_string_matching ids_regex
+ end
+end
+
+RSpec.shared_examples 'locked rows' do
+ it "has locked rows" do
+ table_name = rows.first.class.table_name
+
+ row_ids = rows.map(&:id).join(', ')
+ ids_regex = /SELECT.+FROM.+"#{table_name}".+"#{table_name}"."id" IN \(#{row_ids}\).+FOR UPDATE/m
+
+ expect(recorded_queries.log).to include a_string_matching 'SAVEPOINT'
+ expect(recorded_queries.log).to include a_string_matching ids_regex
+ end
+end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index 6433aea6cd8..9087b694d63 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -903,56 +903,168 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- describe 'operations settings tab' do
- describe 'archive projects' do
- before do
- project.update!(archived: project_archived)
+ describe 'Settings' do
+ describe 'General' do
+ it 'has a link to the General settings' do
+ render
+
+ expect(rendered).to have_link('General', href: edit_project_path(project))
+ end
+ end
+
+ describe 'Integrations' do
+ it 'has a link to the Integrations settings' do
+ render
+
+ expect(rendered).to have_link('Integrations', href: project_settings_integrations_path(project))
+ end
+ end
+
+ describe 'WebHooks' do
+ it 'has a link to the WebHooks settings' do
+ render
+
+ expect(rendered).to have_link('Webhooks', href: project_hooks_path(project))
+ end
+ end
+
+ describe 'Access Tokens' do
+ context 'self-managed instance' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ end
+
+ it 'has a link to the Access Tokens settings' do
+ render
+
+ expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
+ end
+ end
+
+ context 'gitlab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ it 'has a link to the Access Tokens settings' do
+ render
+
+ expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
+ end
+ end
+ end
+
+ describe 'Repository' do
+ it 'has a link to the Repository settings' do
+ render
+
+ expect(rendered).to have_link('Repository', href: project_settings_repository_path(project))
end
+ end
+ describe 'CI/CD' do
context 'when project is archived' do
- let(:project_archived) { true }
+ before do
+ project.update!(archived: true)
+ end
- it 'does not show the operations settings tab' do
+ it 'does not have a link to the CI/CD settings' do
render
- expect(rendered).not_to have_link('Operations', href: project_settings_operations_path(project))
+ expect(rendered).not_to have_link('CI/CD', href: project_settings_ci_cd_path(project))
+ end
+ end
+
+ context 'when project is not archived' do
+ it 'has a link to the CI/CD settings' do
+ render
+
+ expect(rendered).to have_link('CI/CD', href: project_settings_ci_cd_path(project))
end
end
+ end
+
+ describe 'Operations' do
+ context 'when project is archived' do
+ before do
+ project.update!(archived: true)
+ end
- context 'when project is active' do
- let(:project_archived) { false }
+ it 'does not have a link to the Operations settings' do
+ render
+
+ expect(rendered).not_to have_link('Operations', href: project_settings_operations_path(project))
+ end
+ end
- it 'shows the operations settings tab' do
+ context 'when project is not archived active' do
+ it 'has a link to the Operations settings' do
render
expect(rendered).to have_link('Operations', href: project_settings_operations_path(project))
end
end
end
- end
- describe 'project access tokens' do
- context 'self-managed instance' do
+ describe 'Pages' do
before do
- allow(Gitlab).to receive(:com?).and_return(false)
+ stub_config(pages: { enabled: pages_enabled })
end
- it 'displays "Access Tokens" nav item' do
- render
+ context 'when pages are enabled' do
+ let(:pages_enabled) { true }
+
+ it 'has a link to the Pages settings' do
+ render
+
+ expect(rendered).to have_link('Pages', href: project_pages_path(project))
+ end
+ end
+
+ context 'when pages are not enabled' do
+ let(:pages_enabled) { false }
- expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
+ it 'does not have a link to the Pages settings' do
+ render
+
+ expect(rendered).not_to have_link('Pages', href: project_pages_path(project))
+ end
end
end
- context 'gitlab.com' do
+ describe 'Packages & Registries' do
before do
- allow(Gitlab).to receive(:com?).and_return(true)
+ stub_container_registry_config(enabled: registry_enabled)
end
- it 'displays "Access Tokens" nav item' do
- render
+ context 'when registry is enabled' do
+ let(:registry_enabled) { true }
+
+ it 'has a link to the Packages & Registries settings' do
+ render
+
+ expect(rendered).to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'does not have a link to the Packages & Registries settings' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(rendered).not_to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
+ end
+ end
+ end
- expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
+ context 'when registry is not enabled' do
+ let(:registry_enabled) { false }
+
+ it 'does not have a link to the Packages & Registries settings' do
+ render
+
+ expect(rendered).not_to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
+ end
end
end
end