Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-03-15 15:07:44 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-03-15 15:07:44 +0300
commit6a9ab27963fc1479fe7c78581b942c8dcce322e5 (patch)
tree8d32f4f66efde1b426658a74d0276e5250091ab7
parent389d5aa505a916b0506b7b73dcc3be342d724976 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/assets/javascripts/google_cloud/components/home.vue8
-rw-r--r--app/assets/javascripts/google_cloud/components/revoke_oauth.vue38
-rw-r--r--app/assets/javascripts/graphql_shared/queries/users_search_with_mr_permissions.graphql24
-rw-r--r--app/assets/javascripts/header_search/store/getters.js12
-rw-r--r--app/assets/javascripts/pipeline_editor/components/drawer/pipeline_editor_drawer.vue22
-rw-r--r--app/assets/javascripts/pipeline_editor/components/pipeline_editor_tabs.vue8
-rw-r--r--app/assets/javascripts/sidebar/components/assignees/assignee_avatar.vue6
-rw-r--r--app/assets/javascripts/sidebar/components/assignees/assignee_avatar_link.vue4
-rw-r--r--app/assets/javascripts/sidebar/components/assignees/collapsed_assignee_list.vue6
-rw-r--r--app/assets/javascripts/sidebar/components/assignees/issuable_assignees.vue4
-rw-r--r--app/assets/javascripts/sidebar/components/assignees/sidebar_assignees_widget.vue18
-rw-r--r--app/assets/javascripts/sidebar/components/assignees/sidebar_invite_members.vue2
-rw-r--r--app/assets/javascripts/sidebar/components/assignees/sidebar_participant.vue28
-rw-r--r--app/assets/javascripts/sidebar/components/incidents/constants.js25
-rw-r--r--app/assets/javascripts/sidebar/components/incidents/escalation_status.vue61
-rw-r--r--app/assets/javascripts/sidebar/components/incidents/sidebar_escalation_status.vue135
-rw-r--r--app/assets/javascripts/sidebar/components/incidents/utils.js5
-rw-r--r--app/assets/javascripts/sidebar/constants.js19
-rw-r--r--app/assets/javascripts/sidebar/mount_sidebar.js49
-rw-r--r--app/assets/javascripts/sidebar/queries/escalation_status.query.graphql9
-rw-r--r--app/assets/javascripts/sidebar/queries/update_escalation_status.mutation.graphql10
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/queries/get_mr_assignees.query.graphql6
-rw-r--r--app/assets/javascripts/vue_shared/components/sidebar/queries/update_mr_assignees.mutation.graphql13
-rw-r--r--app/assets/javascripts/vue_shared/components/user_select/user_select.vue126
-rw-r--r--app/assets/stylesheets/pages/issuable.scss7
-rw-r--r--app/controllers/projects/ci/pipeline_editor_controller.rb8
-rw-r--r--app/controllers/projects/google_cloud/gcp_regions_controller.rb7
-rw-r--r--app/controllers/projects/google_cloud/revoke_oauth_controller.rb23
-rw-r--r--app/controllers/projects/google_cloud/service_accounts_controller.rb7
-rw-r--r--app/controllers/projects/google_cloud_controller.rb16
-rw-r--r--app/controllers/projects/incidents_controller.rb2
-rw-r--r--app/controllers/projects/merge_requests_controller.rb1
-rw-r--r--app/helpers/listbox_helper.rb13
-rw-r--r--app/models/project_team.rb32
-rw-r--r--app/models/wiki.rb13
-rw-r--r--app/policies/group_policy.rb2
-rw-r--r--app/policies/project_policy.rb2
-rw-r--r--app/services/ci/runners/register_runner_service.rb2
-rw-r--r--app/services/issuable_links/create_service.rb41
-rw-r--r--app/services/issue_links/create_service.rb27
-rw-r--r--app/services/merge_requests/approval_service.rb5
-rw-r--r--app/services/system_note_service.rb4
-rw-r--r--app/services/system_notes/issuables_service.rb5
-rw-r--r--app/views/admin/application_settings/ci_cd.html.haml2
-rw-r--r--app/views/clusters/clusters/_cluster_list.html.haml10
-rw-r--r--app/views/clusters/clusters/index.html.haml2
-rw-r--r--app/views/projects/merge_requests/creations/_new_compare.html.haml6
-rw-r--r--app/views/projects/settings/_general.html.haml2
-rw-r--r--app/views/shared/issuable/_sidebar.html.haml2
-rw-r--r--config/feature_flags/experiment/pipeline_editor_walkthrough.yml8
-rw-r--r--config/initializers/7_prometheus_metrics.rb38
-rw-r--r--config/initializers/sidekiq.rb4
-rw-r--r--config/routes/project.rb1
-rw-r--r--doc/administration/audit_event_streaming.md44
-rw-r--r--doc/administration/integration/plantuml.md10
-rw-r--r--doc/api/project_snippets.md2
-rw-r--r--doc/development/product_qualified_lead_guide/index.md55
-rw-r--r--doc/development/service_ping/review_guidelines.md1
-rw-r--r--doc/install/docker.md4
-rw-r--r--doc/update/plan_your_upgrade.md2
-rw-r--r--lib/gitlab/database/transaction/context.rb37
-rw-r--r--lib/gitlab/database/transaction/observer.rb1
-rw-r--r--lib/gitlab/merge_requests/mergeability/check_result.rb4
-rw-r--r--lib/gitlab/merge_requests/mergeability/results_store.rb6
-rw-r--r--lib/gitlab/process_supervisor.rb61
-rw-r--r--lib/google_api/cloud_platform/client.rb8
-rw-r--r--lib/tasks/ci/build_artifacts.rake20
-rw-r--r--locale/gitlab.pot71
-rw-r--r--metrics_server/dependencies.rb1
-rw-r--r--metrics_server/metrics_server.rb21
-rw-r--r--sidekiq_cluster/cli.rb8
-rw-r--r--sidekiq_cluster/sidekiq_cluster.rb3
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb35
-rw-r--r--spec/controllers/projects/ci/pipeline_editor_controller_spec.rb12
-rw-r--r--spec/features/incidents/incident_details_spec.rb37
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb7
-rw-r--r--spec/features/merge_request/user_edits_assignees_sidebar_spec.rb174
-rw-r--r--spec/features/merge_request/user_sees_suggest_pipeline_spec.rb3
-rw-r--r--spec/features/projects/ci/editor_spec.rb6
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb119
-rw-r--r--spec/frontend/google_cloud/components/app_spec.js1
-rw-r--r--spec/frontend/google_cloud/components/home_spec.js1
-rw-r--r--spec/frontend/google_cloud/components/revoke_oauth_spec.js47
-rw-r--r--spec/frontend/header_search/store/getters_spec.js100
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js27
-rw-r--r--spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js49
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js13
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js41
-rw-r--r--spec/frontend/sidebar/components/incidents/escalation_status_spec.js52
-rw-r--r--spec/frontend/sidebar/components/incidents/escalation_utils_spec.js18
-rw-r--r--spec/frontend/sidebar/components/incidents/mock_data.js39
-rw-r--r--spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js202
-rw-r--r--spec/frontend/sidebar/mock_data.js29
-rw-r--r--spec/frontend/vue_shared/components/user_select_spec.js61
-rw-r--r--spec/helpers/listbox_helper_spec.rb9
-rw-r--r--spec/lib/gitlab/database/transaction/context_spec.rb20
-rw-r--r--spec/lib/gitlab/database/transaction/observer_spec.rb67
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb4
-rw-r--r--spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb18
-rw-r--r--spec/lib/gitlab/process_supervisor_spec.rb107
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb16
-rw-r--r--spec/metrics_server/metrics_server_spec.rb48
-rw-r--r--spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb86
-rw-r--r--spec/requests/projects/google_cloud_controller_spec.rb32
-rw-r--r--spec/services/issue_links/create_service_spec.rb188
-rw-r--r--spec/services/system_note_service_spec.rb6
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb4
-rw-r--r--spec/support/matchers/pushed_frontend_feature_flags_matcher.rb8
-rw-r--r--spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb47
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb133
-rw-r--r--workhorse/internal/upload/artifacts_store_test.go2
-rw-r--r--workhorse/internal/upload/artifacts_upload_test.go4
-rw-r--r--workhorse/internal/upload/artifacts_uploader.go14
-rw-r--r--workhorse/internal/upload/body_uploader.go4
-rw-r--r--workhorse/internal/upload/body_uploader_test.go10
-rw-r--r--workhorse/internal/upload/destination/destination.go (renamed from workhorse/internal/filestore/file_handler.go)46
-rw-r--r--workhorse/internal/upload/destination/destination_test.go (renamed from workhorse/internal/filestore/file_handler_test.go)122
-rw-r--r--workhorse/internal/upload/destination/filestore/filestore.go20
-rw-r--r--workhorse/internal/upload/destination/filestore/filestore_test.go38
-rw-r--r--workhorse/internal/upload/destination/multi_hash.go (renamed from workhorse/internal/filestore/multi_hash.go)2
-rw-r--r--workhorse/internal/upload/destination/objectstore/gocloud_object.go (renamed from workhorse/internal/objectstore/gocloud_object.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/gocloud_object_test.go (renamed from workhorse/internal/objectstore/gocloud_object_test.go)4
-rw-r--r--workhorse/internal/upload/destination/objectstore/multipart.go (renamed from workhorse/internal/objectstore/multipart.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/multipart_test.go (renamed from workhorse/internal/objectstore/multipart_test.go)4
-rw-r--r--workhorse/internal/upload/destination/objectstore/object.go (renamed from workhorse/internal/objectstore/object.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/object_test.go (renamed from workhorse/internal/objectstore/object_test.go)4
-rw-r--r--workhorse/internal/upload/destination/objectstore/prometheus.go (renamed from workhorse/internal/objectstore/prometheus.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go (renamed from workhorse/internal/objectstore/s3_complete_multipart_api.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_object.go (renamed from workhorse/internal/objectstore/s3_object.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_object_test.go (renamed from workhorse/internal/objectstore/s3_object_test.go)4
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_session.go (renamed from workhorse/internal/objectstore/s3_session.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/s3_session_test.go (renamed from workhorse/internal/objectstore/s3_session_test.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/consts.go (renamed from workhorse/internal/objectstore/test/consts.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go (renamed from workhorse/internal/objectstore/test/gocloud_stub.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go (renamed from workhorse/internal/objectstore/test/objectstore_stub.go)2
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/objectstore_stub_test.go (renamed from workhorse/internal/objectstore/test/objectstore_stub_test.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/test/s3_stub.go (renamed from workhorse/internal/objectstore/test/s3_stub.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/upload_strategy.go (renamed from workhorse/internal/objectstore/upload_strategy.go)0
-rw-r--r--workhorse/internal/upload/destination/objectstore/uploader.go (renamed from workhorse/internal/objectstore/uploader.go)0
-rw-r--r--workhorse/internal/upload/destination/reader.go (renamed from workhorse/internal/filestore/reader.go)2
-rw-r--r--workhorse/internal/upload/destination/reader_test.go (renamed from workhorse/internal/filestore/reader_test.go)2
-rw-r--r--workhorse/internal/upload/destination/upload_opts.go (renamed from workhorse/internal/filestore/save_file_opts.go)18
-rw-r--r--workhorse/internal/upload/destination/upload_opts_test.go (renamed from workhorse/internal/filestore/save_file_opts_test.go)28
-rw-r--r--workhorse/internal/upload/lfs_preparer.go6
-rw-r--r--workhorse/internal/upload/object_storage_preparer.go6
-rw-r--r--workhorse/internal/upload/preparer.go12
-rw-r--r--workhorse/internal/upload/rewrite.go10
-rw-r--r--workhorse/internal/upload/saved_file_tracker.go4
-rw-r--r--workhorse/internal/upload/saved_file_tracker_test.go6
-rw-r--r--workhorse/internal/upload/uploads.go8
-rw-r--r--workhorse/internal/upload/uploads_test.go2
152 files changed, 2641 insertions, 925 deletions
diff --git a/app/assets/javascripts/google_cloud/components/home.vue b/app/assets/javascripts/google_cloud/components/home.vue
index 5f98eaf4a05..e41337e2679 100644
--- a/app/assets/javascripts/google_cloud/components/home.vue
+++ b/app/assets/javascripts/google_cloud/components/home.vue
@@ -1,6 +1,7 @@
<script>
import { GlTabs, GlTab } from '@gitlab/ui';
import DeploymentsServiceTable from './deployments_service_table.vue';
+import RevokeOauth from './revoke_oauth.vue';
import ServiceAccountsList from './service_accounts_list.vue';
import GcpRegionsList from './gcp_regions_list.vue';
@@ -9,6 +10,7 @@ export default {
GlTabs,
GlTab,
DeploymentsServiceTable,
+ RevokeOauth,
ServiceAccountsList,
GcpRegionsList,
},
@@ -41,6 +43,10 @@ export default {
type: Array,
required: true,
},
+ revokeOauthUrl: {
+ type: String,
+ required: true,
+ },
},
};
</script>
@@ -61,6 +67,8 @@ export default {
:create-url="configureGcpRegionsUrl"
:list="gcpRegions"
/>
+ <hr v-if="revokeOauthUrl" />
+ <revoke-oauth v-if="revokeOauthUrl" :url="revokeOauthUrl" />
</gl-tab>
<gl-tab :title="__('Deployments')">
<deployments-service-table
diff --git a/app/assets/javascripts/google_cloud/components/revoke_oauth.vue b/app/assets/javascripts/google_cloud/components/revoke_oauth.vue
new file mode 100644
index 00000000000..07d966894f6
--- /dev/null
+++ b/app/assets/javascripts/google_cloud/components/revoke_oauth.vue
@@ -0,0 +1,38 @@
+<script>
+import { GlButton, GlForm } from '@gitlab/ui';
+import csrf from '~/lib/utils/csrf';
+import { s__ } from '~/locale';
+
+export const GOOGLE_CLOUD_REVOKE_TITLE = s__('GoogleCloud|Revoke authorizations');
+export const GOOGLE_CLOUD_REVOKE_DESCRIPTION = s__(
+ 'GoogleCloud|Revoke authorizations granted to GitLab. This does not invalidate service accounts.',
+);
+
+export default {
+ components: { GlButton, GlForm },
+ csrf,
+ props: {
+ url: {
+ type: String,
+ required: true,
+ },
+ },
+ i18n: {
+ title: GOOGLE_CLOUD_REVOKE_TITLE,
+ description: GOOGLE_CLOUD_REVOKE_DESCRIPTION,
+ },
+};
+</script>
+
+<template>
+ <div class="gl-mx-4">
+ <h2 class="gl-font-size-h2">{{ $options.i18n.title }}</h2>
+ <p>{{ $options.i18n.description }}</p>
+ <gl-form :action="url" method="post">
+ <input :value="$options.csrf.token" type="hidden" name="authenticity_token" />
+ <gl-button category="secondary" variant="warning" type="submit">
+ {{ $options.i18n.title }}
+ </gl-button>
+ </gl-form>
+ </div>
+</template>
diff --git a/app/assets/javascripts/graphql_shared/queries/users_search_with_mr_permissions.graphql b/app/assets/javascripts/graphql_shared/queries/users_search_with_mr_permissions.graphql
new file mode 100644
index 00000000000..2bd016feb19
--- /dev/null
+++ b/app/assets/javascripts/graphql_shared/queries/users_search_with_mr_permissions.graphql
@@ -0,0 +1,24 @@
+#import "../fragments/user.fragment.graphql"
+#import "~/graphql_shared/fragments/user_availability.fragment.graphql"
+
+query projectUsersSearchWithMRPermissions(
+ $search: String!
+ $fullPath: ID!
+ $mergeRequestId: MergeRequestID!
+) {
+ workspace: project(fullPath: $fullPath) {
+ id
+ users: projectMembers(search: $search, relations: [DIRECT, INHERITED, INVITED_GROUPS]) {
+ nodes {
+ id
+ mergeRequestInteraction(id: $mergeRequestId) {
+ canMerge
+ }
+ user {
+ ...User
+ ...UserAvailability
+ }
+ }
+ }
+ }
+}
diff --git a/app/assets/javascripts/header_search/store/getters.js b/app/assets/javascripts/header_search/store/getters.js
index adaacc6ecf0..87dec95153f 100644
--- a/app/assets/javascripts/header_search/store/getters.js
+++ b/app/assets/javascripts/header_search/store/getters.js
@@ -21,6 +21,8 @@ export const searchQuery = (state) => {
group_id: state.searchContext?.group?.id,
scope: state.searchContext?.scope,
snippets: state.searchContext?.for_snippets ? true : null,
+ search_code: state.searchContext?.code_search ? true : null,
+ repository_ref: state.searchContext?.ref,
},
isNil,
);
@@ -98,6 +100,8 @@ export const projectUrl = (state) => {
group_id: state.searchContext?.group?.id,
scope: state.searchContext?.scope,
snippets: state.searchContext?.for_snippets ? true : null,
+ search_code: state.searchContext?.code_search ? true : null,
+ repository_ref: state.searchContext?.ref,
},
isNil,
);
@@ -113,6 +117,8 @@ export const groupUrl = (state) => {
group_id: state.searchContext?.group?.id,
scope: state.searchContext?.scope,
snippets: state.searchContext?.for_snippets ? true : null,
+ search_code: state.searchContext?.code_search ? true : null,
+ repository_ref: state.searchContext?.ref,
},
isNil,
);
@@ -127,6 +133,8 @@ export const allUrl = (state) => {
nav_source: 'navbar',
scope: state.searchContext?.scope,
snippets: state.searchContext?.for_snippets ? true : null,
+ search_code: state.searchContext?.code_search ? true : null,
+ repository_ref: state.searchContext?.ref,
},
isNil,
);
@@ -140,7 +148,7 @@ export const scopedSearchOptions = (state, getters) => {
if (state.searchContext?.project) {
options.push({
html_id: 'scoped-in-project',
- scope: state.searchContext?.project.name,
+ scope: state.searchContext.project?.name || '',
description: MSG_IN_PROJECT,
url: getters.projectUrl,
});
@@ -149,7 +157,7 @@ export const scopedSearchOptions = (state, getters) => {
if (state.searchContext?.group) {
options.push({
html_id: 'scoped-in-group',
- scope: state.searchContext?.group.name,
+ scope: state.searchContext.group?.name || '',
description: MSG_IN_GROUP,
url: getters.groupUrl,
});
diff --git a/app/assets/javascripts/pipeline_editor/components/drawer/pipeline_editor_drawer.vue b/app/assets/javascripts/pipeline_editor/components/drawer/pipeline_editor_drawer.vue
index 7bc096ce2c8..9cb070a5517 100644
--- a/app/assets/javascripts/pipeline_editor/components/drawer/pipeline_editor_drawer.vue
+++ b/app/assets/javascripts/pipeline_editor/components/drawer/pipeline_editor_drawer.vue
@@ -2,7 +2,6 @@
import { GlButton, GlIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
-import { experiment } from '~/experimentation/utils';
import { DRAWER_EXPANDED_KEY } from '../../constants';
import FirstPipelineCard from './cards/first_pipeline_card.vue';
import GettingStartedCard from './cards/getting_started_card.vue';
@@ -50,29 +49,8 @@ export default {
},
mounted() {
this.setTopPosition();
- this.setInitialExpandState();
},
methods: {
- setInitialExpandState() {
- let isExpanded;
-
- experiment('pipeline_editor_walkthrough', {
- control: () => {
- isExpanded = true;
- },
- candidate: () => {
- isExpanded = false;
- },
- });
-
- // We check in the local storage and if no value is defined, we want the default
- // to be true. We want to explicitly set it to true here so that the drawer
- // animates to open on load.
- const localValue = localStorage.getItem(this.$options.localDrawerKey);
- if (localValue === null) {
- this.isExpanded = isExpanded;
- }
- },
setTopPosition() {
const navbarEl = document.querySelector('.js-navbar');
diff --git a/app/assets/javascripts/pipeline_editor/components/pipeline_editor_tabs.vue b/app/assets/javascripts/pipeline_editor/components/pipeline_editor_tabs.vue
index c75b1d4bb11..5cff93c884f 100644
--- a/app/assets/javascripts/pipeline_editor/components/pipeline_editor_tabs.vue
+++ b/app/assets/javascripts/pipeline_editor/components/pipeline_editor_tabs.vue
@@ -4,7 +4,6 @@ import { s__ } from '~/locale';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { getParameterValues, setUrlParams, updateHistory } from '~/lib/utils/url_utility';
-import GitlabExperiment from '~/experimentation/components/gitlab_experiment.vue';
import {
CREATE_TAB,
EDITOR_APP_STATUS_EMPTY,
@@ -66,7 +65,6 @@ export default {
GlTabs,
PipelineGraph,
TextEditor,
- GitlabExperiment,
WalkthroughPopover,
},
mixins: [glFeatureFlagsMixin()],
@@ -158,11 +156,7 @@ export default {
data-testid="editor-tab"
@click="setCurrentTab($options.tabConstants.CREATE_TAB)"
>
- <gitlab-experiment name="pipeline_editor_walkthrough">
- <template #candidate>
- <walkthrough-popover v-if="isNewCiConfigFile" v-on="$listeners" />
- </template>
- </gitlab-experiment>
+ <walkthrough-popover v-if="isNewCiConfigFile" v-on="$listeners" />
<ci-editor-header />
<text-editor :commit-sha="commitSha" :value="ciFileContent" v-on="$listeners" />
</editor-tab>
diff --git a/app/assets/javascripts/sidebar/components/assignees/assignee_avatar.vue b/app/assets/javascripts/sidebar/components/assignees/assignee_avatar.vue
index da9ff407faf..240e12ee597 100644
--- a/app/assets/javascripts/sidebar/components/assignees/assignee_avatar.vue
+++ b/app/assets/javascripts/sidebar/components/assignees/assignee_avatar.vue
@@ -1,5 +1,6 @@
<script>
import { GlIcon } from '@gitlab/ui';
+import { IssuableType } from '~/issues/constants';
import { __, sprintf } from '~/locale';
export default {
@@ -31,10 +32,11 @@ export default {
);
},
isMergeRequest() {
- return this.issuableType === 'merge_request';
+ return this.issuableType === IssuableType.MergeRequest;
},
hasMergeIcon() {
- return this.isMergeRequest && !this.user.can_merge;
+ const canMerge = this.user.mergeRequestInteraction?.canMerge || this.user.can_merge;
+ return this.isMergeRequest && !canMerge;
},
},
};
diff --git a/app/assets/javascripts/sidebar/components/assignees/assignee_avatar_link.vue b/app/assets/javascripts/sidebar/components/assignees/assignee_avatar_link.vue
index 2a237e7ace0..578c344da02 100644
--- a/app/assets/javascripts/sidebar/components/assignees/assignee_avatar_link.vue
+++ b/app/assets/javascripts/sidebar/components/assignees/assignee_avatar_link.vue
@@ -1,5 +1,6 @@
<script>
import { GlTooltipDirective, GlLink } from '@gitlab/ui';
+import { IssuableType } from '~/issues/constants';
import { __ } from '~/locale';
import { isUserBusy } from '~/set_status_modal/utils';
import AssigneeAvatar from './assignee_avatar.vue';
@@ -71,7 +72,8 @@ export default {
},
computed: {
cannotMerge() {
- return this.issuableType === 'merge_request' && !this.user.can_merge;
+ const canMerge = this.user.mergeRequestInteraction?.canMerge || this.user.can_merge;
+ return this.issuableType === IssuableType.MergeRequest && !canMerge;
},
tooltipTitle() {
const { name = '', availability = '' } = this.user;
diff --git a/app/assets/javascripts/sidebar/components/assignees/collapsed_assignee_list.vue b/app/assets/javascripts/sidebar/components/assignees/collapsed_assignee_list.vue
index 6a74ab83c22..856687c00ae 100644
--- a/app/assets/javascripts/sidebar/components/assignees/collapsed_assignee_list.vue
+++ b/app/assets/javascripts/sidebar/components/assignees/collapsed_assignee_list.vue
@@ -58,7 +58,7 @@ export default {
return this.users.length > 2;
},
allAssigneesCanMerge() {
- return this.users.every((user) => user.can_merge);
+ return this.users.every((user) => user.can_merge || user.mergeRequestInteraction?.canMerge);
},
sidebarAvatarCounter() {
if (this.users.length > DEFAULT_MAX_COUNTER) {
@@ -77,7 +77,9 @@ export default {
return '';
}
- const mergeLength = this.users.filter((u) => u.can_merge).length;
+ const mergeLength = this.users.filter(
+ (u) => u.can_merge || u.mergeRequestInteraction?.canMerge,
+ ).length;
if (mergeLength === this.users.length) {
return '';
diff --git a/app/assets/javascripts/sidebar/components/assignees/issuable_assignees.vue b/app/assets/javascripts/sidebar/components/assignees/issuable_assignees.vue
index a3379784bc1..59a4eb54bbe 100644
--- a/app/assets/javascripts/sidebar/components/assignees/issuable_assignees.vue
+++ b/app/assets/javascripts/sidebar/components/assignees/issuable_assignees.vue
@@ -44,7 +44,7 @@ export default {
<div class="gl-display-flex gl-flex-direction-column issuable-assignees">
<div
v-if="emptyUsers"
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-2 hide-collapsed"
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 hide-collapsed"
data-testid="none"
>
<span> {{ __('None') }}</span>
@@ -65,7 +65,7 @@ export default {
v-else
:users="users"
:issuable-type="issuableType"
- class="gl-text-gray-800 gl-mt-2 hide-collapsed"
+ class="gl-text-gray-800 hide-collapsed"
@toggle-attention-requested="toggleAttentionRequested"
/>
</div>
diff --git a/app/assets/javascripts/sidebar/components/assignees/sidebar_assignees_widget.vue b/app/assets/javascripts/sidebar/components/assignees/sidebar_assignees_widget.vue
index 9c031ae64f8..7743004a293 100644
--- a/app/assets/javascripts/sidebar/components/assignees/sidebar_assignees_widget.vue
+++ b/app/assets/javascripts/sidebar/components/assignees/sidebar_assignees_widget.vue
@@ -1,6 +1,5 @@
<script>
import { GlDropdownItem } from '@gitlab/ui';
-import { cloneDeep } from 'lodash';
import Vue from 'vue';
import createFlash from '~/flash';
import { IssuableType } from '~/issues/constants';
@@ -101,7 +100,10 @@ export default {
}
const issuable = data.workspace?.issuable;
if (issuable) {
- this.selected = cloneDeep(issuable.assignees.nodes);
+ this.selected = issuable.assignees.nodes.map((node) => ({
+ ...node,
+ canMerge: node.mergeRequestInteraction?.canMerge || false,
+ }));
}
},
error() {
@@ -141,6 +143,7 @@ export default {
username: gon?.current_username,
name: gon?.current_user_fullname,
avatarUrl: gon?.current_user_avatar_url,
+ canMerge: this.issuable?.userPermissions?.canMerge || false,
};
},
signedIn() {
@@ -206,8 +209,8 @@ export default {
expandWidget() {
this.$refs.toggle.expand();
},
- focusSearch() {
- this.$refs.userSelect.focusSearch();
+ showDropdown() {
+ this.$refs.userSelect.showDropdown();
},
showError() {
createFlash({ message: __('An error occurred while fetching participants.') });
@@ -236,11 +239,11 @@ export default {
:initial-loading="isAssigneesLoading"
:title="assigneeText"
:is-dirty="isDirty"
- @open="focusSearch"
+ @open="showDropdown"
@close="saveAssignees"
>
<template #collapsed>
- <slot name="collapsed" :users="assignees" :on-click="expandWidget"></slot>
+ <slot name="collapsed" :users="assignees"></slot>
<issuable-assignees
:users="assignees"
:issuable-type="issuableType"
@@ -256,12 +259,13 @@ export default {
:text="$options.i18n.assignees"
:header-text="$options.i18n.assignTo"
:iid="iid"
+ :issuable-id="issuableId"
:full-path="fullPath"
:allow-multiple-assignees="allowMultipleAssignees"
:current-user="currentUser"
:issuable-type="issuableType"
:is-editing="edit"
- class="gl-w-full dropdown-menu-user"
+ class="gl-w-full dropdown-menu-user gl-mt-n3"
@toggle="collapseWidget"
@error="showError"
@input="setDirtyState"
diff --git a/app/assets/javascripts/sidebar/components/assignees/sidebar_invite_members.vue b/app/assets/javascripts/sidebar/components/assignees/sidebar_invite_members.vue
index 8ef65ef7308..28bc5afc1a4 100644
--- a/app/assets/javascripts/sidebar/components/assignees/sidebar_invite_members.vue
+++ b/app/assets/javascripts/sidebar/components/assignees/sidebar_invite_members.vue
@@ -30,6 +30,6 @@ export default {
:event="$options.dataTrackEvent"
:label="$options.dataTrackLabel"
:trigger-source="triggerSource"
- classes="gl-display-block gl-pl-6 gl-hover-text-decoration-none gl-hover-text-blue-800!"
+ classes="gl-display-block gl-pl-0 gl-hover-text-decoration-none gl-hover-text-blue-800!"
/>
</template>
diff --git a/app/assets/javascripts/sidebar/components/assignees/sidebar_participant.vue b/app/assets/javascripts/sidebar/components/assignees/sidebar_participant.vue
index e2a38a100b9..19f588b28be 100644
--- a/app/assets/javascripts/sidebar/components/assignees/sidebar_participant.vue
+++ b/app/assets/javascripts/sidebar/components/assignees/sidebar_participant.vue
@@ -1,17 +1,24 @@
<script>
-import { GlAvatarLabeled, GlAvatarLink } from '@gitlab/ui';
+import { GlAvatarLabeled, GlAvatarLink, GlIcon } from '@gitlab/ui';
+import { IssuableType } from '~/issues/constants';
import { s__, sprintf } from '~/locale';
export default {
components: {
GlAvatarLabeled,
GlAvatarLink,
+ GlIcon,
},
props: {
user: {
type: Object,
required: true,
},
+ issuableType: {
+ type: String,
+ required: false,
+ default: IssuableType.Issue,
+ },
},
computed: {
userLabel() {
@@ -22,6 +29,9 @@ export default {
author: this.user.name,
});
},
+ hasCannotMergeIcon() {
+ return this.issuableType === IssuableType.MergeRequest && !this.user.canMerge;
+ },
},
};
</script>
@@ -31,9 +41,19 @@ export default {
<gl-avatar-labeled
:size="32"
:label="userLabel"
- :sub-label="user.username"
+ :sub-label="`@${user.username}`"
:src="user.avatarUrl || user.avatar || user.avatar_url"
- class="gl-align-items-center"
- />
+ class="gl-align-items-center gl-relative"
+ >
+ <template #meta>
+ <gl-icon
+ v-if="hasCannotMergeIcon"
+ name="warning-solid"
+ aria-hidden="true"
+ class="merge-icon"
+ :size="12"
+ />
+ </template>
+ </gl-avatar-labeled>
</gl-avatar-link>
</template>
diff --git a/app/assets/javascripts/sidebar/components/incidents/constants.js b/app/assets/javascripts/sidebar/components/incidents/constants.js
new file mode 100644
index 00000000000..cd05a6099fd
--- /dev/null
+++ b/app/assets/javascripts/sidebar/components/incidents/constants.js
@@ -0,0 +1,25 @@
+import { s__ } from '~/locale';
+
+export const STATUS_TRIGGERED = 'TRIGGERED';
+export const STATUS_ACKNOWLEDGED = 'ACKNOWLEDGED';
+export const STATUS_RESOLVED = 'RESOLVED';
+
+export const STATUS_TRIGGERED_LABEL = s__('IncidentManagement|Triggered');
+export const STATUS_ACKNOWLEDGED_LABEL = s__('IncidentManagement|Acknowledged');
+export const STATUS_RESOLVED_LABEL = s__('IncidentManagement|Resolved');
+
+export const STATUS_LABELS = {
+ [STATUS_TRIGGERED]: STATUS_TRIGGERED_LABEL,
+ [STATUS_ACKNOWLEDGED]: STATUS_ACKNOWLEDGED_LABEL,
+ [STATUS_RESOLVED]: STATUS_RESOLVED_LABEL,
+};
+
+export const i18n = {
+ fetchError: s__(
+ 'IncidentManagement|An error occurred while fetching the incident status. Please reload the page.',
+ ),
+ title: s__('IncidentManagement|Status'),
+ updateError: s__(
+ 'IncidentManagement|An error occurred while updating the incident status. Please reload the page and try again.',
+ ),
+};
diff --git a/app/assets/javascripts/sidebar/components/incidents/escalation_status.vue b/app/assets/javascripts/sidebar/components/incidents/escalation_status.vue
new file mode 100644
index 00000000000..2c32cf89387
--- /dev/null
+++ b/app/assets/javascripts/sidebar/components/incidents/escalation_status.vue
@@ -0,0 +1,61 @@
+<script>
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { i18n, STATUS_ACKNOWLEDGED, STATUS_TRIGGERED, STATUS_RESOLVED } from './constants';
+import { getStatusLabel } from './utils';
+
+const STATUS_LIST = [STATUS_TRIGGERED, STATUS_ACKNOWLEDGED, STATUS_RESOLVED];
+
+export default {
+ i18n,
+ STATUS_LIST,
+ components: {
+ GlDropdown,
+ GlDropdownItem,
+ },
+ props: {
+ value: {
+ type: String,
+ required: false,
+ default: null,
+ validator(value) {
+ return [...STATUS_LIST, null].includes(value);
+ },
+ },
+ },
+ computed: {
+ currentStatusLabel() {
+ return this.getStatusLabel(this.value);
+ },
+ },
+ methods: {
+ show() {
+ this.$refs.dropdown.show();
+ },
+ hide() {
+ this.$refs.dropdown.hide();
+ },
+ getStatusLabel,
+ },
+};
+</script>
+
+<template>
+ <gl-dropdown
+ ref="dropdown"
+ block
+ :text="currentStatusLabel"
+ toggle-class="dropdown-menu-toggle gl-mb-2"
+ >
+ <slot name="header"> </slot>
+ <gl-dropdown-item
+ v-for="status in $options.STATUS_LIST"
+ :key="status"
+ data-testid="status-dropdown-item"
+ :is-check-item="true"
+ :is-checked="status === value"
+ @click="$emit('input', status)"
+ >
+ {{ getStatusLabel(status) }}
+ </gl-dropdown-item>
+ </gl-dropdown>
+</template>
diff --git a/app/assets/javascripts/sidebar/components/incidents/sidebar_escalation_status.vue b/app/assets/javascripts/sidebar/components/incidents/sidebar_escalation_status.vue
new file mode 100644
index 00000000000..67ae1e6fcab
--- /dev/null
+++ b/app/assets/javascripts/sidebar/components/incidents/sidebar_escalation_status.vue
@@ -0,0 +1,135 @@
+<script>
+import { GlIcon, GlTooltipDirective } from '@gitlab/ui';
+import { escalationStatusQuery, escalationStatusMutation } from '~/sidebar/constants';
+import { createAlert } from '~/flash';
+import { logError } from '~/lib/logger';
+import EscalationStatus from 'ee_else_ce/sidebar/components/incidents/escalation_status.vue';
+import SidebarEditableItem from '../sidebar_editable_item.vue';
+import { i18n } from './constants';
+import { getStatusLabel } from './utils';
+
+export default {
+ i18n,
+ components: {
+ EscalationStatus,
+ SidebarEditableItem,
+ GlIcon,
+ },
+ directives: {
+ GlTooltip: GlTooltipDirective,
+ },
+ props: {
+ iid: {
+ type: String,
+ required: true,
+ },
+ projectPath: {
+ type: String,
+ required: true,
+ },
+ issuableType: {
+ required: true,
+ type: String,
+ },
+ },
+ data() {
+ return {
+ status: null,
+ isUpdating: false,
+ };
+ },
+ apollo: {
+ status: {
+ query() {
+ return escalationStatusQuery;
+ },
+ variables() {
+ return {
+ fullPath: this.projectPath,
+ iid: this.iid,
+ };
+ },
+ update(data) {
+ return data.workspace?.issuable?.escalationStatus;
+ },
+ error(error) {
+ const message = this.$options.i18n.fetchError;
+ createAlert({ message });
+ logError(message, error);
+ },
+ },
+ },
+ computed: {
+ isLoading() {
+ return this.$apollo.queries.status.loading;
+ },
+ currentStatusLabel() {
+ return getStatusLabel(this.status);
+ },
+ tooltipText() {
+ return `${this.$options.i18n.title}: ${this.currentStatusLabel}`;
+ },
+ },
+ methods: {
+ updateStatus(status) {
+ this.isUpdating = true;
+ this.closeSidebar();
+ return this.$apollo
+ .mutate({
+ mutation: escalationStatusMutation,
+ variables: {
+ status,
+ iid: this.iid,
+ projectPath: this.projectPath,
+ },
+ })
+ .then(({ data: { issueSetEscalationStatus } }) => {
+ this.status = issueSetEscalationStatus.issue.escalationStatus;
+ })
+ .catch((error) => {
+ const message = this.$options.i18n.updateError;
+ createAlert({ message });
+ logError(message, error);
+ })
+ .finally(() => {
+ this.isUpdating = false;
+ });
+ },
+ closeSidebar() {
+ this.close();
+ this.$refs.editable.collapse();
+ },
+ open() {
+ this.$refs.escalationStatus.show();
+ },
+ close() {
+ this.$refs.escalationStatus.hide();
+ },
+ },
+};
+</script>
+
+<template>
+ <sidebar-editable-item
+ ref="editable"
+ :title="$options.i18n.title"
+ :initial-loading="isLoading"
+ :loading="isUpdating"
+ @open="open"
+ @close="close"
+ >
+ <template #default>
+ <escalation-status ref="escalationStatus" :value="status" @input="updateStatus" />
+ </template>
+ <template #collapsed>
+ <div
+ v-gl-tooltip.viewport.left="tooltipText"
+ class="sidebar-collapsed-icon"
+ data-testid="status-icon"
+ >
+ <gl-icon name="status" :size="16" />
+ </div>
+ <span class="hide-collapsed text-secondary">{{ currentStatusLabel }}</span>
+ </template>
+ </sidebar-editable-item>
+</template>
diff --git a/app/assets/javascripts/sidebar/components/incidents/utils.js b/app/assets/javascripts/sidebar/components/incidents/utils.js
new file mode 100644
index 00000000000..59bf1ea466c
--- /dev/null
+++ b/app/assets/javascripts/sidebar/components/incidents/utils.js
@@ -0,0 +1,5 @@
+import { s__ } from '~/locale';
+
+import { STATUS_LABELS } from './constants';
+
+export const getStatusLabel = (status) => STATUS_LABELS[status] ?? s__('IncidentManagement|None');
diff --git a/app/assets/javascripts/sidebar/constants.js b/app/assets/javascripts/sidebar/constants.js
index 0238fb8e8d5..989dc574bc3 100644
--- a/app/assets/javascripts/sidebar/constants.js
+++ b/app/assets/javascripts/sidebar/constants.js
@@ -1,7 +1,8 @@
import { s__, sprintf } from '~/locale';
import updateIssueLabelsMutation from '~/boards/graphql/issue_set_labels.mutation.graphql';
+import userSearchQuery from '~/graphql_shared/queries/users_search.query.graphql';
+import userSearchWithMRPermissionsQuery from '~/graphql_shared/queries/users_search_with_mr_permissions.graphql';
import { IssuableType, WorkspaceType } from '~/issues/constants';
-import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import epicConfidentialQuery from '~/sidebar/queries/epic_confidential.query.graphql';
import epicDueDateQuery from '~/sidebar/queries/epic_due_date.query.graphql';
import epicParticipantsQuery from '~/sidebar/queries/epic_participants.query.graphql';
@@ -49,12 +50,12 @@ import getMergeRequestParticipants from '~/vue_shared/components/sidebar/queries
import getMrTimelogsQuery from '~/vue_shared/components/sidebar/queries/get_mr_timelogs.query.graphql';
import updateIssueAssigneesMutation from '~/vue_shared/components/sidebar/queries/update_issue_assignees.mutation.graphql';
import updateMergeRequestAssigneesMutation from '~/vue_shared/components/sidebar/queries/update_mr_assignees.mutation.graphql';
+import getEscalationStatusQuery from '~/sidebar/queries/escalation_status.query.graphql';
+import updateEscalationStatusMutation from '~/sidebar/queries/update_escalation_status.mutation.graphql';
import projectIssueMilestoneMutation from './queries/project_issue_milestone.mutation.graphql';
import projectIssueMilestoneQuery from './queries/project_issue_milestone.query.graphql';
import projectMilestonesQuery from './queries/project_milestones.query.graphql';
-export const ASSIGNEES_DEBOUNCE_DELAY = DEFAULT_DEBOUNCE_AND_THROTTLE_MS;
-
export const defaultEpicSort = 'TITLE_ASC';
export const epicIidPattern = /^&(?<iid>\d+)$/;
@@ -91,6 +92,15 @@ export const participantsQueries = {
},
};
+export const userSearchQueries = {
+ [IssuableType.Issue]: {
+ query: userSearchQuery,
+ },
+ [IssuableType.MergeRequest]: {
+ query: userSearchWithMRPermissionsQuery,
+ },
+};
+
export const confidentialityQueries = {
[IssuableType.Issue]: {
query: issueConfidentialQuery,
@@ -305,3 +315,6 @@ export function dropdowni18nText(issuableAttribute, issuableType) {
),
};
}
+
+export const escalationStatusQuery = getEscalationStatusQuery;
+export const escalationStatusMutation = updateEscalationStatusMutation;
diff --git a/app/assets/javascripts/sidebar/mount_sidebar.js b/app/assets/javascripts/sidebar/mount_sidebar.js
index 0fb31ec92ca..2a7d967cb61 100644
--- a/app/assets/javascripts/sidebar/mount_sidebar.js
+++ b/app/assets/javascripts/sidebar/mount_sidebar.js
@@ -10,6 +10,7 @@ import {
isInIssuePage,
isInDesignPage,
isInIncidentPage,
+ isInMRPage,
parseBoolean,
} from '~/lib/utils/common_utils';
import { __ } from '~/locale';
@@ -31,6 +32,7 @@ import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests
import Translate from '../vue_shared/translate';
import SidebarAssignees from './components/assignees/sidebar_assignees.vue';
import CopyEmailToClipboard from './components/copy_email_to_clipboard.vue';
+import SidebarEscalationStatus from './components/incidents/sidebar_escalation_status.vue';
import IssuableLockForm from './components/lock/issuable_lock_form.vue';
import SidebarReviewers from './components/reviewers/sidebar_reviewers.vue';
import SidebarSeverity from './components/severity/sidebar_severity.vue';
@@ -135,6 +137,8 @@ function mountAssigneesComponent() {
if (!el) return;
const { id, iid, fullPath, editable } = getSidebarOptions();
+ const isIssuablePage = isInIssuePage() || isInIncidentPage() || isInDesignPage();
+ const issuableType = isIssuablePage ? IssuableType.Issue : IssuableType.MergeRequest;
// eslint-disable-next-line no-new
new Vue({
el,
@@ -152,21 +156,16 @@ function mountAssigneesComponent() {
props: {
iid: String(iid),
fullPath,
- issuableType:
- isInIssuePage() || isInIncidentPage() || isInDesignPage()
- ? IssuableType.Issue
- : IssuableType.MergeRequest,
+ issuableType,
issuableId: id,
allowMultipleAssignees: !el.dataset.maxAssignees,
},
scopedSlots: {
- collapsed: ({ users, onClick }) =>
+ collapsed: ({ users }) =>
createElement(CollapsedAssigneeList, {
props: {
users,
- },
- nativeOn: {
- click: onClick,
+ issuableType,
},
}),
},
@@ -568,6 +567,36 @@ function mountSeverityComponent() {
});
}
+function mountEscalationStatusComponent() {
+ const statusContainerEl = document.querySelector('#js-escalation-status');
+
+ if (!statusContainerEl) {
+ return false;
+ }
+
+ const { issuableType } = getSidebarOptions();
+ const { canUpdate, issueIid, projectPath } = statusContainerEl.dataset;
+
+ return new Vue({
+ el: statusContainerEl,
+ apolloProvider,
+ components: {
+ SidebarEscalationStatus,
+ },
+ provide: {
+ canUpdate: parseBoolean(canUpdate),
+ },
+ render: (createElement) =>
+ createElement('sidebar-escalation-status', {
+ props: {
+ iid: issueIid,
+ issuableType,
+ projectPath,
+ },
+ }),
+ });
+}
+
function mountCopyEmailComponent() {
const el = document.getElementById('issuable-copy-email');
@@ -585,7 +614,7 @@ function mountCopyEmailComponent() {
}
const isAssigneesWidgetShown =
- (isInIssuePage() || isInDesignPage()) && gon.features.issueAssigneesWidget;
+ (isInIssuePage() || isInDesignPage() || isInMRPage()) && gon.features.issueAssigneesWidget;
export function mountSidebar(mediator, store) {
initInviteMembersModal();
@@ -619,6 +648,8 @@ export function mountSidebar(mediator, store) {
mountSeverityComponent();
+ mountEscalationStatusComponent();
+
if (window.gon?.features?.mrAttentionRequests) {
eventHub.$on('removeCurrentUserAttentionRequested', () => {
mediator.removeCurrentUserAttentionRequested();
diff --git a/app/assets/javascripts/sidebar/queries/escalation_status.query.graphql b/app/assets/javascripts/sidebar/queries/escalation_status.query.graphql
new file mode 100644
index 00000000000..cb7c5a0fbe7
--- /dev/null
+++ b/app/assets/javascripts/sidebar/queries/escalation_status.query.graphql
@@ -0,0 +1,9 @@
+query escalationStatusQuery($fullPath: ID!, $iid: String) {
+ workspace: project(fullPath: $fullPath) {
+ id
+ issuable: issue(iid: $iid) {
+ id
+ escalationStatus
+ }
+ }
+}
diff --git a/app/assets/javascripts/sidebar/queries/update_escalation_status.mutation.graphql b/app/assets/javascripts/sidebar/queries/update_escalation_status.mutation.graphql
new file mode 100644
index 00000000000..a4aff7968df
--- /dev/null
+++ b/app/assets/javascripts/sidebar/queries/update_escalation_status.mutation.graphql
@@ -0,0 +1,10 @@
+mutation updateEscalationStatus($projectPath: ID!, $status: IssueEscalationStatus!, $iid: String!) {
+ issueSetEscalationStatus(input: { projectPath: $projectPath, status: $status, iid: $iid }) {
+ errors
+ clientMutationId
+ issue {
+ id
+ escalationStatus
+ }
+ }
+}
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/queries/get_mr_assignees.query.graphql b/app/assets/javascripts/vue_shared/components/sidebar/queries/get_mr_assignees.query.graphql
index 81e19e48d75..7127940bb05 100644
--- a/app/assets/javascripts/vue_shared/components/sidebar/queries/get_mr_assignees.query.graphql
+++ b/app/assets/javascripts/vue_shared/components/sidebar/queries/get_mr_assignees.query.graphql
@@ -10,8 +10,14 @@ query getMrAssignees($fullPath: ID!, $iid: String!) {
nodes {
...User
...UserAvailability
+ mergeRequestInteraction {
+ canMerge
+ }
}
}
+ userPermissions {
+ canMerge
+ }
}
}
}
diff --git a/app/assets/javascripts/vue_shared/components/sidebar/queries/update_mr_assignees.mutation.graphql b/app/assets/javascripts/vue_shared/components/sidebar/queries/update_mr_assignees.mutation.graphql
index 77140ea36d8..5fec2ccbdfb 100644
--- a/app/assets/javascripts/vue_shared/components/sidebar/queries/update_mr_assignees.mutation.graphql
+++ b/app/assets/javascripts/vue_shared/components/sidebar/queries/update_mr_assignees.mutation.graphql
@@ -2,21 +2,18 @@
#import "~/graphql_shared/fragments/user_availability.fragment.graphql"
mutation mergeRequestSetAssignees($iid: String!, $assigneeUsernames: [String!]!, $fullPath: ID!) {
- mergeRequestSetAssignees(
+ issuableSetAssignees: mergeRequestSetAssignees(
input: { iid: $iid, assigneeUsernames: $assigneeUsernames, projectPath: $fullPath }
) {
- mergeRequest {
+ issuable: mergeRequest {
id
assignees {
nodes {
...User
...UserAvailability
- }
- }
- participants {
- nodes {
- ...User
- ...UserAvailability
+ mergeRequestInteraction {
+ canMerge
+ }
}
}
}
diff --git a/app/assets/javascripts/vue_shared/components/user_select/user_select.vue b/app/assets/javascripts/vue_shared/components/user_select/user_select.vue
index b85cae0c64f..9df5254155e 100644
--- a/app/assets/javascripts/vue_shared/components/user_select/user_select.vue
+++ b/app/assets/javascripts/vue_shared/components/user_select/user_select.vue
@@ -1,4 +1,5 @@
<script>
+import { debounce } from 'lodash';
import {
GlDropdown,
GlDropdownForm,
@@ -6,11 +7,14 @@ import {
GlDropdownItem,
GlSearchBoxByType,
GlLoadingIcon,
+ GlTooltipDirective,
} from '@gitlab/ui';
-import searchUsers from '~/graphql_shared/queries/users_search.query.graphql';
import { __ } from '~/locale';
import SidebarParticipant from '~/sidebar/components/assignees/sidebar_participant.vue';
-import { ASSIGNEES_DEBOUNCE_DELAY, participantsQueries } from '~/sidebar/constants';
+import { IssuableType } from '~/issues/constants';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+import { participantsQueries, userSearchQueries } from '~/sidebar/constants';
+import { convertToGraphQLId } from '~/graphql_shared/utils';
export default {
i18n: {
@@ -25,6 +29,9 @@ export default {
SidebarParticipant,
GlLoadingIcon,
},
+ directives: {
+ GlTooltip: GlTooltipDirective,
+ },
props: {
headerText: {
type: String,
@@ -58,13 +65,18 @@ export default {
issuableType: {
type: String,
required: false,
- default: 'issue',
+ default: IssuableType.Issue,
},
isEditing: {
type: Boolean,
required: false,
default: true,
},
+ issuableId: {
+ type: Number,
+ required: false,
+ default: null,
+ },
},
data() {
return {
@@ -89,28 +101,35 @@ export default {
};
},
update(data) {
- return data.workspace?.issuable?.participants.nodes;
+ return data.workspace?.issuable?.participants.nodes.map((node) => ({
+ ...node,
+ canMerge: false,
+ }));
},
error() {
this.$emit('error');
},
},
searchUsers: {
- query: searchUsers,
+ query() {
+ return userSearchQueries[this.issuableType].query;
+ },
variables() {
- return {
- fullPath: this.fullPath,
- search: this.search,
- first: 20,
- };
+ return this.searchUsersVariables;
},
skip() {
return !this.isEditing;
},
update(data) {
- return data.workspace?.users?.nodes.filter((x) => x?.user).map(({ user }) => user) || [];
+ return (
+ data.workspace?.users?.nodes
+ .filter((x) => x?.user)
+ .map((node) => ({
+ ...node.user,
+ canMerge: node.mergeRequestInteraction?.canMerge || false,
+ })) || []
+ );
},
- debounce: ASSIGNEES_DEBOUNCE_DELAY,
error() {
this.$emit('error');
this.isSearching = false;
@@ -121,6 +140,23 @@ export default {
},
},
computed: {
+ isMergeRequest() {
+ return this.issuableType === IssuableType.MergeRequest;
+ },
+ searchUsersVariables() {
+ const variables = {
+ fullPath: this.fullPath,
+ search: this.search,
+ first: 20,
+ };
+ if (!this.isMergeRequest) {
+ return variables;
+ }
+ return {
+ ...variables,
+ mergeRequestId: convertToGraphQLId('MergeRequest', this.issuableId),
+ };
+ },
isLoading() {
return this.$apollo.queries.searchUsers.loading || this.$apollo.queries.participants.loading;
},
@@ -135,8 +171,8 @@ export default {
// TODO this de-duplication is temporary (BE fix required)
// https://gitlab.com/gitlab-org/gitlab/-/issues/327822
- const mergedSearchResults = filteredParticipants
- .concat(this.searchUsers)
+ const mergedSearchResults = this.searchUsers
+ .concat(filteredParticipants)
.reduce(
(acc, current) => (acc.some((user) => current.id === user.id) ? acc : [...acc, current]),
[],
@@ -179,6 +215,7 @@ export default {
return this.selectedFiltered.length === 0;
},
},
+
watch: {
// We need to add this watcher to track the moment when user is alredy typing
// but query is still not started due to debounce
@@ -188,15 +225,21 @@ export default {
}
},
},
+ created() {
+ this.debouncedSearchKeyUpdate = debounce(this.setSearchKey, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+ },
methods: {
selectAssignee(user) {
let selected = [...this.value];
if (!this.allowMultipleAssignees) {
selected = [user];
+ this.$emit('input', selected);
+ this.$refs.dropdown.hide();
+ this.$emit('toggle');
} else {
selected.push(user);
+ this.$emit('input', selected);
}
- this.$emit('input', selected);
},
unselect(name) {
const selected = this.value.filter((user) => user.username !== name);
@@ -205,6 +248,9 @@ export default {
focusSearch() {
this.$refs.search.focusInput();
},
+ showDropdown() {
+ this.$refs.dropdown.show();
+ },
showDivider(list) {
return list.length > 0 && this.isSearchEmpty;
},
@@ -216,22 +262,37 @@ export default {
const currentUser = usersCopy.find((user) => user.username === this.currentUser.username);
if (currentUser) {
+ currentUser.canMerge = this.currentUser.canMerge;
const index = usersCopy.indexOf(currentUser);
usersCopy.splice(0, 0, usersCopy.splice(index, 1)[0]);
}
return usersCopy;
},
+ setSearchKey(value) {
+ this.search = value.trim();
+ },
+ tooltipText(user) {
+ if (!this.isMergeRequest) {
+ return '';
+ }
+ return user.canMerge ? '' : __('Cannot merge');
+ },
},
};
</script>
<template>
- <gl-dropdown class="show" :text="text" @toggle="$emit('toggle')">
+ <gl-dropdown ref="dropdown" :text="text" @toggle="$emit('toggle')" @shown="focusSearch">
<template #header>
<p class="gl-font-weight-bold gl-text-center gl-mt-2 gl-mb-4">{{ headerText }}</p>
<gl-dropdown-divider />
- <gl-search-box-by-type ref="search" v-model.trim="search" class="js-dropdown-input-field" />
+ <gl-search-box-by-type
+ ref="search"
+ :value="search"
+ class="js-dropdown-input-field"
+ @input="debouncedSearchKeyUpdate"
+ />
</template>
<gl-dropdown-form class="gl-relative gl-min-h-7">
<gl-loading-icon
@@ -247,7 +308,7 @@ export default {
:is-checked="selectedIsEmpty"
:is-check-centered="true"
data-testid="unassign"
- @click="$emit('input', [])"
+ @click.native.capture.stop="$emit('input', [])"
>
<span :class="selectedIsEmpty ? 'gl-pl-0' : 'gl-pl-6'" class="gl-font-weight-bold">{{
$options.i18n.unassigned
@@ -258,27 +319,44 @@ export default {
<gl-dropdown-item
v-for="item in selectedFiltered"
:key="item.id"
+ v-gl-tooltip.left.viewport
+ :title="tooltipText(item)"
+ boundary="viewport"
is-checked
is-check-centered
data-testid="selected-participant"
- @click.stop="unselect(item.username)"
+ @click.native.capture.stop="unselect(item.username)"
>
- <sidebar-participant :user="item" />
+ <sidebar-participant :user="item" :issuable-type="issuableType" />
</gl-dropdown-item>
<template v-if="showCurrentUser">
<gl-dropdown-divider />
- <gl-dropdown-item data-testid="current-user" @click.stop="selectAssignee(currentUser)">
- <sidebar-participant :user="currentUser" class="gl-pl-6!" />
+ <gl-dropdown-item
+ data-testid="current-user"
+ @click.native.capture.stop="selectAssignee(currentUser)"
+ >
+ <sidebar-participant
+ :user="currentUser"
+ :issuable-type="issuableType"
+ class="gl-pl-6!"
+ />
</gl-dropdown-item>
</template>
<gl-dropdown-divider v-if="showDivider(unselectedFiltered)" />
<gl-dropdown-item
v-for="unselectedUser in unselectedFiltered"
:key="unselectedUser.id"
+ v-gl-tooltip.left.viewport
+ :title="tooltipText(unselectedUser)"
+ boundary="viewport"
data-testid="unselected-participant"
- @click="selectAssignee(unselectedUser)"
+ @click.native.capture.stop="selectAssignee(unselectedUser)"
>
- <sidebar-participant :user="unselectedUser" class="gl-pl-6!" />
+ <sidebar-participant
+ :user="unselectedUser"
+ :issuable-type="issuableType"
+ class="gl-pl-6!"
+ />
</gl-dropdown-item>
<gl-dropdown-item v-if="noUsersFound" data-testid="empty-results" class="gl-pl-6!">
{{ __('No matching results') }}
diff --git a/app/assets/stylesheets/pages/issuable.scss b/app/assets/stylesheets/pages/issuable.scss
index 6a27a0770c0..c00af802c06 100644
--- a/app/assets/stylesheets/pages/issuable.scss
+++ b/app/assets/stylesheets/pages/issuable.scss
@@ -108,12 +108,15 @@
.merge-icon {
color: $orange-400;
position: absolute;
- bottom: 0;
- right: 0;
filter: drop-shadow(0 0 0.5px $white) drop-shadow(0 0 1px $white) drop-shadow(0 0 2px $white);
}
}
+.assignee .merge-icon {
+ top: calc(50% + 0.25rem);
+ left: 1.275rem;
+}
+
.reviewer .merge-icon {
bottom: -3px;
right: -3px;
diff --git a/app/controllers/projects/ci/pipeline_editor_controller.rb b/app/controllers/projects/ci/pipeline_editor_controller.rb
index 6f12e3940dd..8c6e8f0e126 100644
--- a/app/controllers/projects/ci/pipeline_editor_controller.rb
+++ b/app/controllers/projects/ci/pipeline_editor_controller.rb
@@ -2,7 +2,6 @@
class Projects::Ci::PipelineEditorController < Projects::ApplicationController
before_action :check_can_collaborate!
- before_action :setup_walkthrough_experiment, only: :show
before_action do
push_frontend_feature_flag(:schema_linting, @project, default_enabled: :yaml)
end
@@ -19,11 +18,4 @@ class Projects::Ci::PipelineEditorController < Projects::ApplicationController
def check_can_collaborate!
render_404 unless can_collaborate_with_project?(@project)
end
-
- def setup_walkthrough_experiment
- experiment(:pipeline_editor_walkthrough, namespace: @project.namespace, sticky_to: current_user) do |e|
- e.candidate {}
- e.publish_to_database
- end
- end
end
diff --git a/app/controllers/projects/google_cloud/gcp_regions_controller.rb b/app/controllers/projects/google_cloud/gcp_regions_controller.rb
index c0531e5d2f5..beeb91cfd80 100644
--- a/app/controllers/projects/google_cloud/gcp_regions_controller.rb
+++ b/app/controllers/projects/google_cloud/gcp_regions_controller.rb
@@ -12,13 +12,14 @@ class Projects::GoogleCloud::GcpRegionsController < Projects::GoogleCloud::BaseC
branches = BranchesFinder.new(project.repository, params).execute(gitaly_pagination: true)
tags = TagsFinder.new(project.repository, params).execute(gitaly_pagination: true)
refs = (branches + tags).map(&:name)
- @js_data = {
+ js_data = {
screen: 'gcp_regions_form',
availableRegions: AVAILABLE_REGIONS,
refs: refs,
cancelPath: project_google_cloud_index_path(project)
- }.to_json
- track_event('gcp_regions#index', 'form_render', @js_data)
+ }
+ @js_data = js_data.to_json
+ track_event('gcp_regions#index', 'form_render', js_data)
end
def create
diff --git a/app/controllers/projects/google_cloud/revoke_oauth_controller.rb b/app/controllers/projects/google_cloud/revoke_oauth_controller.rb
new file mode 100644
index 00000000000..03d1474707b
--- /dev/null
+++ b/app/controllers/projects/google_cloud/revoke_oauth_controller.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+class Projects::GoogleCloud::RevokeOauthController < Projects::GoogleCloud::BaseController
+ before_action :validate_gcp_token!
+
+ def create
+ google_api_client = GoogleApi::CloudPlatform::Client.new(token_in_session, nil)
+ response = google_api_client.revoke_authorizations
+
+ if response.success?
+ status = 'success'
+ redirect_message = { notice: s_('GoogleCloud|Google OAuth2 token revocation requested') }
+ else
+ status = 'failed'
+ redirect_message = { alert: s_('GoogleCloud|Google OAuth2 token revocation request failed') }
+ end
+
+ session.delete(GoogleApi::CloudPlatform::Client.session_key_for_token)
+ track_event('revoke_oauth#create', 'create', status)
+
+ redirect_to project_google_cloud_index_path(project), redirect_message
+ end
+end
diff --git a/app/controllers/projects/google_cloud/service_accounts_controller.rb b/app/controllers/projects/google_cloud/service_accounts_controller.rb
index 9c4dd35a6f5..5d8b2030d5c 100644
--- a/app/controllers/projects/google_cloud/service_accounts_controller.rb
+++ b/app/controllers/projects/google_cloud/service_accounts_controller.rb
@@ -17,14 +17,15 @@ class Projects::GoogleCloud::ServiceAccountsController < Projects::GoogleCloud::
branches = BranchesFinder.new(project.repository, params).execute(gitaly_pagination: true)
tags = TagsFinder.new(project.repository, params).execute(gitaly_pagination: true)
refs = (branches + tags).map(&:name)
- @js_data = {
+ js_data = {
screen: 'service_accounts_form',
gcpProjects: gcp_projects,
refs: refs,
cancelPath: project_google_cloud_index_path(project)
- }.to_json
+ }
+ @js_data = js_data.to_json
- track_event('service_accounts#index', 'form_success', @js_data)
+ track_event('service_accounts#index', 'form_success', js_data)
end
rescue Google::Apis::ClientError => error
handle_gcp_error('service_accounts#index', error)
diff --git a/app/controllers/projects/google_cloud_controller.rb b/app/controllers/projects/google_cloud_controller.rb
index e52e14a75bc..49bb4bec859 100644
--- a/app/controllers/projects/google_cloud_controller.rb
+++ b/app/controllers/projects/google_cloud_controller.rb
@@ -4,7 +4,7 @@ class Projects::GoogleCloudController < Projects::GoogleCloud::BaseController
GCP_REGION_CI_VAR_KEY = 'GCP_REGION'
def index
- @js_data = {
+ js_data = {
screen: 'home',
serviceAccounts: GoogleCloud::ServiceAccountsService.new(project).find_for_project,
createServiceAccountUrl: project_google_cloud_service_accounts_path(project),
@@ -12,9 +12,11 @@ class Projects::GoogleCloudController < Projects::GoogleCloud::BaseController
enableCloudStorageUrl: project_google_cloud_deployments_cloud_storage_path(project),
emptyIllustrationUrl: ActionController::Base.helpers.image_path('illustrations/pipelines_empty.svg'),
configureGcpRegionsUrl: project_google_cloud_gcp_regions_path(project),
- gcpRegions: gcp_regions
- }.to_json
- track_event('google_cloud#index', 'index', @js_data)
+ gcpRegions: gcp_regions,
+ revokeOauthUrl: revoke_oauth_url
+ }
+ @js_data = js_data.to_json
+ track_event('google_cloud#index', 'index', js_data)
end
private
@@ -23,4 +25,10 @@ class Projects::GoogleCloudController < Projects::GoogleCloud::BaseController
list = ::Ci::VariablesFinder.new(project, { key: GCP_REGION_CI_VAR_KEY }).execute
list.map { |variable| { gcp_region: variable.value, environment: variable.environment_scope } }
end
+
+ def revoke_oauth_url
+ google_token_valid = GoogleApi::CloudPlatform::Client.new(token_in_session, nil)
+ .validate_token(expires_at_in_session)
+ google_token_valid ? project_google_cloud_revoke_oauth_index_path(project) : nil
+ end
end
diff --git a/app/controllers/projects/incidents_controller.rb b/app/controllers/projects/incidents_controller.rb
index 6f67b07c6a0..293581a6744 100644
--- a/app/controllers/projects/incidents_controller.rb
+++ b/app/controllers/projects/incidents_controller.rb
@@ -48,3 +48,5 @@ class Projects::IncidentsController < Projects::ApplicationController
IssueSerializer.new(current_user: current_user, project: incident.project)
end
end
+
+Projects::IncidentsController.prepend_mod
diff --git a/app/controllers/projects/merge_requests_controller.rb b/app/controllers/projects/merge_requests_controller.rb
index 04f311f58e9..5259bf90dd0 100644
--- a/app/controllers/projects/merge_requests_controller.rb
+++ b/app/controllers/projects/merge_requests_controller.rb
@@ -43,6 +43,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:rebase_without_ci_ui, project, default_enabled: :yaml)
push_frontend_feature_flag(:markdown_continue_lists, project, default_enabled: :yaml)
push_frontend_feature_flag(:secure_vulnerability_training, project, default_enabled: :yaml)
+ push_frontend_feature_flag(:issue_assignees_widget, @project, default_enabled: :yaml)
# Usage data feature flags
push_frontend_feature_flag(:users_expanding_widgets_usage_data, project, default_enabled: :yaml)
push_frontend_feature_flag(:diff_settings_usage_data, default_enabled: :yaml)
diff --git a/app/helpers/listbox_helper.rb b/app/helpers/listbox_helper.rb
index d24680bc0b0..16caf862c7b 100644
--- a/app/helpers/listbox_helper.rb
+++ b/app/helpers/listbox_helper.rb
@@ -16,8 +16,10 @@ module ListboxHelper
# the sort key), `text` is the user-facing string for the item, and `href` is
# the path to redirect to when that item is selected.
#
- # The `selected` parameter is the currently selected `value`, and must
- # correspond to one of the `items`, or be `nil`. When `selected.nil?`, the first item is selected.
+ # The `selected` parameter is the currently selected `value`, and should
+ # correspond to one of the `items`, or be `nil`. When `selected.nil?` or
+ # a value which does not correspond to one of the items, the first item is
+ # selected.
#
# The final parameter `html_options` applies arbitrary attributes to the
# returned tag. Some of these are passed to the underlying Vue component as
@@ -37,9 +39,12 @@ module ListboxHelper
webpack_bundle_tag 'redirect_listbox'
end
- selected ||= items.first[:value]
selected_option = items.find { |opt| opt[:value] == selected }
- raise ArgumentError, "cannot find #{selected} in #{items}" unless selected_option
+
+ unless selected_option
+ selected_option = items.first
+ selected = selected_option[:value]
+ end
button = button_tag(type: :button, class: DROPDOWN_BUTTON_CLASSES) do
content_tag(:span, selected_option[:text], class: DROPDOWN_INNER_CLASS) +
diff --git a/app/models/project_team.rb b/app/models/project_team.rb
index d5e0d112aeb..4b89d95c1a3 100644
--- a/app/models/project_team.rb
+++ b/app/models/project_team.rb
@@ -198,31 +198,15 @@ class ProjectTeam
end
def contribution_check_for_user_ids(user_ids)
- user_ids = user_ids.uniq
- key = "contribution_check_for_users:#{project.id}"
-
- Gitlab::SafeRequestStore[key] ||= {}
- contributors = Gitlab::SafeRequestStore[key] || {}
-
- user_ids -= contributors.keys
-
- return contributors if user_ids.empty?
-
- resource_contributors = project.merge_requests
- .merged
- .where(author_id: user_ids, target_branch: project.default_branch.to_s)
- .pluck(:author_id)
- .product([true]).to_h
-
- contributors.merge!(resource_contributors)
-
- missing_resource_ids = user_ids - resource_contributors.keys
-
- missing_resource_ids.each do |resource_id|
- contributors[resource_id] = false
+ Gitlab::SafeRequestLoader.execute(resource_key: "contribution_check_for_users:#{project.id}",
+ resource_ids: user_ids,
+ default_value: false) do |user_ids|
+ project.merge_requests
+ .merged
+ .where(author_id: user_ids, target_branch: project.default_branch.to_s)
+ .pluck(:author_id)
+ .product([true]).to_h
end
-
- contributors
end
def contributor?(user_id)
diff --git a/app/models/wiki.rb b/app/models/wiki.rb
index 307f31edfef..622070abd88 100644
--- a/app/models/wiki.rb
+++ b/app/models/wiki.rb
@@ -87,8 +87,7 @@ class Wiki
end
def create_wiki_repository
- repository.create_if_not_exists
- change_head_to_default_branch
+ repository.create_if_not_exists(default_branch)
raise CouldNotCreateWikiError unless repository_exists?
rescue StandardError => err
@@ -322,16 +321,6 @@ class Wiki
def default_message(action, title)
"#{user.username} #{action} page: #{title}"
end
-
- def change_head_to_default_branch
- # If the wiki has commits in the 'HEAD' branch means that the current
- # HEAD is pointing to the right branch. If not, it could mean that either
- # the repo has just been created or that 'HEAD' is pointing
- # to the wrong branch and we need to rewrite it
- return if repository.raw_repository.commit_count('HEAD') != 0
-
- repository.raw_repository.write_ref('HEAD', "refs/heads/#{default_branch}")
- end
end
Wiki.prepend_mod_with('Wiki')
diff --git a/app/policies/group_policy.rb b/app/policies/group_policy.rb
index 1dcf4409048..7a49ad3d4aa 100644
--- a/app/policies/group_policy.rb
+++ b/app/policies/group_policy.rb
@@ -81,7 +81,7 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
condition(:crm_enabled, score: 0, scope: :subject) { Feature.enabled?(:customer_relations, @subject) && @subject.crm_enabled? }
condition(:group_runner_registration_allowed) do
- Feature.disabled?(:runner_registration_control) || Gitlab::CurrentSettings.valid_runner_registrars.include?('group')
+ Feature.disabled?(:runner_registration_control, default_enabled: :yaml) || Gitlab::CurrentSettings.valid_runner_registrars.include?('group')
end
rule { can?(:read_group) & design_management_enabled }.policy do
diff --git a/app/policies/project_policy.rb b/app/policies/project_policy.rb
index e7b63d5e17f..09085bef9f0 100644
--- a/app/policies/project_policy.rb
+++ b/app/policies/project_policy.rb
@@ -195,7 +195,7 @@ class ProjectPolicy < BasePolicy
end
condition(:project_runner_registration_allowed) do
- Feature.disabled?(:runner_registration_control) || Gitlab::CurrentSettings.valid_runner_registrars.include?('project')
+ Feature.disabled?(:runner_registration_control, default_enabled: :yaml) || Gitlab::CurrentSettings.valid_runner_registrars.include?('project')
end
# `:read_project` may be prevented in EE, but `:read_project_for_iids` should
diff --git a/app/services/ci/runners/register_runner_service.rb b/app/services/ci/runners/register_runner_service.rb
index 196d2de1a65..7978d094d9b 100644
--- a/app/services/ci/runners/register_runner_service.rb
+++ b/app/services/ci/runners/register_runner_service.rb
@@ -47,7 +47,7 @@ module Ci
end
def runner_registrar_valid?(type)
- Feature.disabled?(:runner_registration_control) || Gitlab::CurrentSettings.valid_runner_registrars.include?(type)
+ Feature.disabled?(:runner_registration_control, default_enabled: :yaml) || Gitlab::CurrentSettings.valid_runner_registrars.include?(type)
end
def token_scope
diff --git a/app/services/issuable_links/create_service.rb b/app/services/issuable_links/create_service.rb
index 81685f81afa..64a64f50ded 100644
--- a/app/services/issuable_links/create_service.rb
+++ b/app/services/issuable_links/create_service.rb
@@ -36,6 +36,20 @@ module IssuableLinks
success
end
+ # rubocop: disable CodeReuse/ActiveRecord
+ def relate_issuables(referenced_issuable)
+ link = link_class.find_or_initialize_by(source: issuable, target: referenced_issuable)
+
+ set_link_type(link)
+
+ if link.changed? && link.save
+ create_notes(referenced_issuable)
+ end
+
+ link
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
private
def render_conflict_error?
@@ -96,6 +110,23 @@ module IssuableLinks
{}
end
+ def issuables_assigned_message
+ _('%{issuable}(s) already assigned' % { issuable: target_issuable_type.capitalize })
+ end
+
+ def issuables_not_found_message
+ _('No matching %{issuable} found. Make sure that you are adding a valid %{issuable} URL.' % { issuable: target_issuable_type })
+ end
+
+ def target_issuable_type
+ :issue
+ end
+
+ def create_notes(referenced_issuable)
+ SystemNoteService.relate_issuable(issuable, referenced_issuable, current_user)
+ SystemNoteService.relate_issuable(referenced_issuable, issuable, current_user)
+ end
+
def linkable_issuables(objects)
raise NotImplementedError
end
@@ -104,16 +135,12 @@ module IssuableLinks
raise NotImplementedError
end
- def relate_issuables(referenced_object)
+ def link_class
raise NotImplementedError
end
- def issuables_assigned_message
- _("Issue(s) already assigned")
- end
-
- def issuables_not_found_message
- _("No matching issue found. Make sure that you are adding a valid issue URL.")
+ def set_link_type(_link)
+ # no-op
end
end
end
diff --git a/app/services/issue_links/create_service.rb b/app/services/issue_links/create_service.rb
index a022d3e0bcf..1c6621ce0a1 100644
--- a/app/services/issue_links/create_service.rb
+++ b/app/services/issue_links/create_service.rb
@@ -2,44 +2,25 @@
module IssueLinks
class CreateService < IssuableLinks::CreateService
- # rubocop: disable CodeReuse/ActiveRecord
- def relate_issuables(referenced_issue)
- link = IssueLink.find_or_initialize_by(source: issuable, target: referenced_issue)
-
- set_link_type(link)
-
- if link.changed? && link.save
- create_notes(referenced_issue)
- end
-
- link
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
def linkable_issuables(issues)
@linkable_issuables ||= begin
issues.select { |issue| can?(current_user, :admin_issue_link, issue) }
end
end
- def create_notes(referenced_issue)
- SystemNoteService.relate_issue(issuable, referenced_issue, current_user)
- SystemNoteService.relate_issue(referenced_issue, issuable, current_user)
- end
-
def previous_related_issuables
@related_issues ||= issuable.related_issues(current_user).to_a
end
private
- def set_link_type(_link)
- # EE only
- end
-
def track_event
track_incident_action(current_user, issuable, :incident_relate)
end
+
+ def link_class
+ IssueLink
+ end
end
end
diff --git a/app/services/merge_requests/approval_service.rb b/app/services/merge_requests/approval_service.rb
index 527ed9d45b7..37c2676e51c 100644
--- a/app/services/merge_requests/approval_service.rb
+++ b/app/services/merge_requests/approval_service.rb
@@ -11,6 +11,7 @@ module MergeRequests
reset_approvals_cache(merge_request)
create_event(merge_request)
+ stream_audit_event(merge_request)
create_approval_note(merge_request)
mark_pending_todos_as_done(merge_request)
execute_approval_hooks(merge_request, current_user)
@@ -52,6 +53,10 @@ module MergeRequests
def create_event(merge_request)
event_service.approve_mr(merge_request, current_user)
end
+
+ def stream_audit_event(merge_request)
+ # Defined in EE
+ end
end
end
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 8d77f03c0d9..9db39a5e174 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -49,8 +49,8 @@ module SystemNoteService
::SystemNotes::IssuablesService.new(noteable: issuable, project: project, author: author).change_issuable_contacts(added_count, removed_count)
end
- def relate_issue(noteable, noteable_ref, user)
- ::SystemNotes::IssuablesService.new(noteable: noteable, project: noteable.project, author: user).relate_issue(noteable_ref)
+ def relate_issuable(noteable, noteable_ref, user)
+ ::SystemNotes::IssuablesService.new(noteable: noteable, project: noteable.project, author: user).relate_issuable(noteable_ref)
end
def unrelate_issuable(noteable, noteable_ref, user)
diff --git a/app/services/system_notes/issuables_service.rb b/app/services/system_notes/issuables_service.rb
index ce592f8d4bb..89212288a6b 100644
--- a/app/services/system_notes/issuables_service.rb
+++ b/app/services/system_notes/issuables_service.rb
@@ -10,8 +10,9 @@ module SystemNotes
# "marked this issue as related to gitlab-foss#9001"
#
# Returns the created Note object
- def relate_issue(noteable_ref)
- body = "marked this issue as related to #{noteable_ref.to_reference(noteable.project)}"
+ def relate_issuable(noteable_ref)
+ issuable_type = noteable.to_ability_name.humanize(capitalize: false)
+ body = "marked this #{issuable_type} as related to #{noteable_ref.to_reference(noteable.resource_parent)}"
issue_activity_counter.track_issue_related_action(author: author) if noteable.is_a?(Issue)
diff --git a/app/views/admin/application_settings/ci_cd.html.haml b/app/views/admin/application_settings/ci_cd.html.haml
index 18ec43407c3..762dba69e6a 100644
--- a/app/views/admin/application_settings/ci_cd.html.haml
+++ b/app/views/admin/application_settings/ci_cd.html.haml
@@ -39,7 +39,7 @@
.settings-content
= render 'registry'
-- if Feature.enabled?(:runner_registration_control)
+- if Feature.enabled?(:runner_registration_control, default_enabled: :yaml)
%section.settings.as-runner.no-animate#js-runner-settings{ class: ('expanded' if expanded_by_default?) }
.settings-header
%h4
diff --git a/app/views/clusters/clusters/_cluster_list.html.haml b/app/views/clusters/clusters/_cluster_list.html.haml
deleted file mode 100644
index 8d92dc30a76..00000000000
--- a/app/views/clusters/clusters/_cluster_list.html.haml
+++ /dev/null
@@ -1,10 +0,0 @@
-- if !clusters.empty?
- .top-area.adjust
- .gl-display-block.gl-text-right.gl-my-4.gl-w-full
- - if clusterable.can_add_cluster?
- = link_to s_('ClusterIntegration|Connect cluster with certificate'), clusterable.new_path, class: 'btn gl-button btn-confirm js-add-cluster gl-py-2', data: { qa_selector: 'integrate_kubernetes_cluster_button' }
- - else
- %span.btn.gl-button.btn-confirm.js-add-cluster.disabled.gl-py-2
- = s_("ClusterIntegration|Connect cluster with certificate")
-
-.js-clusters-main-view{ data: js_clusters_list_data(clusterable) }
diff --git a/app/views/clusters/clusters/index.html.haml b/app/views/clusters/clusters/index.html.haml
index 457e34b306a..eec44945a77 100644
--- a/app/views/clusters/clusters/index.html.haml
+++ b/app/views/clusters/clusters/index.html.haml
@@ -9,4 +9,4 @@
.js-clusters-main-view{ data: js_clusters_data(clusterable) }
- else
- = render 'cluster_list', clusters: @clusters
+ .js-clusters-main-view{ data: js_clusters_list_data(clusterable) }
diff --git a/app/views/projects/merge_requests/creations/_new_compare.html.haml b/app/views/projects/merge_requests/creations/_new_compare.html.haml
index ea778517374..e2ac8ef5abc 100644
--- a/app/views/projects/merge_requests/creations/_new_compare.html.haml
+++ b/app/views/projects/merge_requests/creations/_new_compare.html.haml
@@ -29,8 +29,7 @@
= dropdown_content
= dropdown_loading
.card-footer
- .text-center
- .js-source-loading.mt-1.gl-spinner
+ = gl_loading_icon(css_class: 'js-source-loading gl-my-3')
%ul.list-unstyled.mr_source_commit
.col-lg-6
@@ -58,8 +57,7 @@
= dropdown_content
= dropdown_loading
.card-footer
- .text-center
- .js-target-loading.mt-1.gl-spinner
+ = gl_loading_icon(css_class: 'js-target-loading gl-my-3')
%ul.list-unstyled.mr_target_commit
- if @merge_request.errors.any?
diff --git a/app/views/projects/settings/_general.html.haml b/app/views/projects/settings/_general.html.haml
index 960b1d67610..3a62c6f41cc 100644
--- a/app/views/projects/settings/_general.html.haml
+++ b/app/views/projects/settings/_general.html.haml
@@ -23,7 +23,7 @@
.row
.form-group.col-md-9
= f.label :description, _('Project description (optional)'), class: 'label-bold'
- = f.text_area :description, class: 'form-control gl-form-input', rows: 3, maxlength: 250
+ = f.text_area :description, class: 'form-control gl-form-input', rows: 3
.row= render_if_exists 'projects/classification_policy_settings', f: f
diff --git a/app/views/shared/issuable/_sidebar.html.haml b/app/views/shared/issuable/_sidebar.html.haml
index 72fa979392e..37d31515307 100644
--- a/app/views/shared/issuable/_sidebar.html.haml
+++ b/app/views/shared/issuable/_sidebar.html.haml
@@ -27,7 +27,7 @@
- if issuable_sidebar[:supports_escalation]
.block.escalation-status{ data: { testid: 'escalation_status_container' } }
- #js-escalation-status{ data: { can_edit: issuable_sidebar.dig(:current_user, :can_update_escalation_status).to_s, project_path: issuable_sidebar[:project_full_path], issue_iid: issuable_sidebar[:iid] } }
+ #js-escalation-status{ data: { can_update: issuable_sidebar.dig(:current_user, :can_update_escalation_status).to_s, project_path: issuable_sidebar[:project_full_path], issue_iid: issuable_sidebar[:iid] } }
= render_if_exists 'shared/issuable/sidebar_escalation_policy', issuable_sidebar: issuable_sidebar
- if @project.group.present?
diff --git a/config/feature_flags/experiment/pipeline_editor_walkthrough.yml b/config/feature_flags/experiment/pipeline_editor_walkthrough.yml
deleted file mode 100644
index 6d8895cbab7..00000000000
--- a/config/feature_flags/experiment/pipeline_editor_walkthrough.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: pipeline_editor_walkthrough
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73050
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/345558
-milestone: '14.5'
-type: experiment
-group: group::activation
-default_enabled: false
diff --git a/config/initializers/7_prometheus_metrics.rb b/config/initializers/7_prometheus_metrics.rb
index a65b1041d83..81e55a139c2 100644
--- a/config/initializers/7_prometheus_metrics.rb
+++ b/config/initializers/7_prometheus_metrics.rb
@@ -1,5 +1,7 @@
# frozen_string_literal: true
+PUMA_EXTERNAL_METRICS_SERVER = Gitlab::Utils.to_boolean(ENV['PUMA_EXTERNAL_METRICS_SERVER'])
+
# Keep separate directories for separate processes
def prometheus_default_multiproc_dir
return unless Rails.env.development? || Rails.env.test?
@@ -72,8 +74,12 @@ Gitlab::Cluster::LifecycleEvents.on_master_start do
if Gitlab::Runtime.puma?
Gitlab::Metrics::Samplers::PumaSampler.instance.start
- # Starts a metrics server to export metrics from the Puma primary.
- Gitlab::Metrics::Exporter::WebExporter.instance.start
+ if Settings.monitoring.web_exporter.enabled && PUMA_EXTERNAL_METRICS_SERVER
+ require_relative '../../metrics_server/metrics_server'
+ MetricsServer.start_for_puma
+ else
+ Gitlab::Metrics::Exporter::WebExporter.instance.start
+ end
end
Gitlab::Ci::Parsers.instrument!
@@ -90,11 +96,13 @@ Gitlab::Cluster::LifecycleEvents.on_worker_start do
Gitlab::Metrics::Samplers::ThreadsSampler.initialize_instance(logger: logger).start
if Gitlab::Runtime.puma?
- # Since we are running a metrics server on the Puma primary, we would inherit
- # this thread after forking into workers, so we need to explicitly stop it here.
- # NOTE: This will not be necessary anymore after moving to an external server
- # process via https://gitlab.com/gitlab-org/gitlab/-/issues/350548
- Gitlab::Metrics::Exporter::WebExporter.instance.stop
+ # Since we are observing a metrics server from the Puma primary, we would inherit
+ # this supervision thread after forking into workers, so we need to explicitly stop it here.
+ if PUMA_EXTERNAL_METRICS_SERVER
+ ::MetricsServer::PumaProcessSupervisor.instance.stop
+ else
+ Gitlab::Metrics::Exporter::WebExporter.instance.stop
+ end
Gitlab::Metrics::Samplers::ActionCableSampler.instance(logger: logger).start
end
@@ -112,16 +120,24 @@ end
if Gitlab::Runtime.puma?
Gitlab::Cluster::LifecycleEvents.on_before_graceful_shutdown do
# We need to ensure that before we re-exec or shutdown server
- # we do stop the exporter
- Gitlab::Metrics::Exporter::WebExporter.instance.stop
+ # we also stop the metrics server
+ if PUMA_EXTERNAL_METRICS_SERVER
+ ::MetricsServer::PumaProcessSupervisor.instance.shutdown
+ else
+ Gitlab::Metrics::Exporter::WebExporter.instance.stop
+ end
end
Gitlab::Cluster::LifecycleEvents.on_before_master_restart do
# We need to ensure that before we re-exec server
- # we do stop the exporter
+ # we also stop the metrics server
#
# We do it again, for being extra safe,
# but it should not be needed
- Gitlab::Metrics::Exporter::WebExporter.instance.stop
+ if PUMA_EXTERNAL_METRICS_SERVER
+ ::MetricsServer::PumaProcessSupervisor.instance.shutdown
+ else
+ Gitlab::Metrics::Exporter::WebExporter.instance.stop
+ end
end
end
diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb
index baf252a5d10..25984c45318 100644
--- a/config/initializers/sidekiq.rb
+++ b/config/initializers/sidekiq.rb
@@ -30,8 +30,6 @@ Sidekiq.configure_server do |config|
config.options[:strict] = false
config.options[:queues] = Gitlab::SidekiqConfig.expand_queues(config.options[:queues])
- Sidekiq.logger.info "Listening on queues #{config.options[:queues].uniq.sort}"
-
if enable_json_logs
config.log_formatter = Gitlab::SidekiqLogging::JSONFormatter.new
config.options[:job_logger] = Gitlab::SidekiqLogging::StructuredLogger
@@ -41,6 +39,8 @@ Sidekiq.configure_server do |config|
config.error_handlers.reject! { |handler| handler.is_a?(Sidekiq::ExceptionHandler::Logger) }
end
+ Sidekiq.logger.info "Listening on queues #{config.options[:queues].uniq.sort}"
+
config.redis = queues_config_hash
config.server_middleware(&Gitlab::SidekiqMiddleware.server_configurator(
diff --git a/config/routes/project.rb b/config/routes/project.rb
index eae30d3408a..420dca92caa 100644
--- a/config/routes/project.rb
+++ b/config/routes/project.rb
@@ -322,6 +322,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
resources :google_cloud, only: [:index]
namespace :google_cloud do
+ resources :revoke_oauth, only: [:create]
resources :service_accounts, only: [:index, :create]
resources :gcp_regions, only: [:index, :create]
diff --git a/doc/administration/audit_event_streaming.md b/doc/administration/audit_event_streaming.md
index 4f2fc8c26e2..a579e9618ee 100644
--- a/doc/administration/audit_event_streaming.md
+++ b/doc/administration/audit_event_streaming.md
@@ -259,3 +259,47 @@ Fetch:
"target_id": 29
}
```
+
+## Audit event streaming on merge request approval actions
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/271162) in GitLab 14.9.
+
+Stream audit events that relate to merge approval actions performed within a project.
+
+### Request headers
+
+Request headers are formatted as follows:
+
+```plaintext
+POST /logs HTTP/1.1
+Host: <DESTINATION_HOST>
+Content-Type: application/x-www-form-urlencoded
+X-Gitlab-Event-Streaming-Token: <DESTINATION_TOKEN>
+```
+
+### Example request body
+
+```json
+{
+ "id": 1,
+ "author_id": 1,
+ "entity_id": 6,
+ "entity_type": "Project",
+ "details": {
+ "author_name": "example_username",
+ "target_id": 20,
+ "target_type": "MergeRequest",
+ "target_details": "merge request title",
+ "custom_message": "Approved merge request",
+ "ip_address": "127.0.0.1",
+ "entity_path": "example-group/example-project"
+ },
+ "ip_address": "127.0.0.1",
+ "author_name": "example_username",
+ "entity_path": "example-group/example-project",
+ "target_details": "merge request title",
+ "created_at": "2022-03-09T06:53:11.181Z",
+ "target_type": "MergeRequest",
+ "target_id": 20
+}
+```
diff --git a/doc/administration/integration/plantuml.md b/doc/administration/integration/plantuml.md
index aa17fd44ef0..5a9699b3a39 100644
--- a/doc/administration/integration/plantuml.md
+++ b/doc/administration/integration/plantuml.md
@@ -227,6 +227,16 @@ these steps:
gitlab_rails['env'] = { 'PLANTUML_ENCODING' => 'deflate' }
```
+ In GitLab Helm chart, you can set it by adding a variable to the
+ [global.extraEnv](https://gitlab.com/gitlab-org/charts/gitlab/blob/master/doc/charts/globals.md#extraenv)
+ section, like this:
+
+ ```yaml
+ global:
+ extraEnv:
+ PLANTUML_ENCODING: deflate
+ ```
+
- For GitLab versions 13.1 and later, PlantUML integration now
[requires a header prefix in the URL](https://github.com/plantuml/plantuml/issues/117#issuecomment-6235450160)
to distinguish different encoding types.
diff --git a/doc/api/project_snippets.md b/doc/api/project_snippets.md
index efbd8bf9431..90bd2cd6f83 100644
--- a/doc/api/project_snippets.md
+++ b/doc/api/project_snippets.md
@@ -15,7 +15,7 @@ Constants for snippet visibility levels are:
| visibility | Description |
| ---------- | ----------- |
-| `private` | The snippet is visible only to the snippet creator |
+| `private` | The snippet is visible only to project members |
| `internal` | The snippet is visible for any logged in user except [external users](../user/permissions.md#external-users) |
| `public` | The snippet can be accessed without any authentication |
diff --git a/doc/development/product_qualified_lead_guide/index.md b/doc/development/product_qualified_lead_guide/index.md
index ceb2b9b2933..6943f931d79 100644
--- a/doc/development/product_qualified_lead_guide/index.md
+++ b/doc/development/product_qualified_lead_guide/index.md
@@ -92,7 +92,48 @@ The flow of a PQL lead is as follows:
1. Marketo does scoring and sends the form to Salesforce.
1. Our Sales team uses Salesforce to connect to the leads.
-### Lead flow on GitLab.com
+### Trial lead flow
+
+#### Trial lead flow on GitLab.com
+
+```mermaid
+sequenceDiagram
+ Trial Frontend Forms ->>TrialsController#create_lead: GitLab.com frontend sends [lead] to backend
+ TrialsController#create_lead->>CreateLeadService: [lead]
+ TrialsController#create_lead->>ApplyTrialService: [lead] Apply the trial
+ CreateLeadService->>SubscriptionPortalClient#generate_trial(sync_to_gl=false): [lead] Creates customer account on CustomersDot
+ ApplyTrialService->>SubscriptionPortalClient#generate_trial(sync_to_gl=true): [lead] Asks CustomersDot to apply the trial on namespace
+ SubscriptionPortalClient#generate_trial(sync_to_gl=false)->>CustomersDot|TrialsController#create(sync_to_gl=false): GitLab.com sends [lead] to CustomersDot
+ SubscriptionPortalClient#generate_trial(sync_to_gl=true)->>CustomersDot|TrialsController#create(sync_to_gl=true): GitLab.com asks CustomersDot to apply the trial
+
+
+```
+
+#### Trial lead flow on CustomersDot (sync_to_gl)
+
+```mermaid
+sequenceDiagram
+ CustomersDot|TrialsController#create->>HostedPlans|CreateTrialService#execute: Save [lead] to leads table for monitoring purposes
+ HostedPlans|CreateTrialService#execute->>BaseTrialService#create_account: Creates a customer record in customers table
+ HostedPlans|CreateTrialService#create_platypus_lead->>PlatypusLogLeadService: Creates a platypus lead
+ HostedPlans|CreateTrialService#create_platypus_lead->>Platypus|CreateLeadWorker: Async worker to submit [lead] to Platypus
+ Platypus|CreateLeadWorker->>Platypus|CreateLeadService: [lead]
+ Platypus|CreateLeadService->>PlatypusApp#post: [lead]
+ PlatypusApp#post->>Platypus: [lead] is sent to Platypus
+```
+
+#### Applying the trial to a namespace on CustomersDot
+
+```mermaid
+sequenceDiagram
+ HostedPlans|CreateTrialService->load_namespace#Gitlab api/namespaces: Load namespace details
+ HostedPlans|CreateTrialService->create_order#: Creates an order in orders table
+ HostedPlans|CreateTrialService->create_trial_history#: Creates a record in trial_histories table
+```
+
+### Hand raise lead flow
+
+#### Hand raise flow on GitLab.com
```mermaid
sequenceDiagram
@@ -100,9 +141,9 @@ sequenceDiagram
TrialsController#create_hand_raise_lead->>CreateHandRaiseLeadService: [lead]
CreateHandRaiseLeadService->>SubscriptionPortalClient: [lead]
SubscriptionPortalClient->>CustomersDot|TrialsController#create_hand_raise_lead: GitLab.com sends [lead] to CustomersDot
-```
+```
-### Lead flow on CustomersDot and later
+#### Hand raise flow on CustomersDot
```mermaid
sequenceDiagram
@@ -111,9 +152,9 @@ sequenceDiagram
Platypus|CreateLeadWorker->>Platypus|CreateLeadService: [lead]
Platypus|CreateLeadService->>PlatypusApp#post: [lead]
PlatypusApp#post->>Platypus: [lead] is sent to Platypus
-```
+```
-### Lead flow after Platypus
+### PQL flow after Platypus for all lead types
```mermaid
sequenceDiagram
@@ -128,7 +169,3 @@ sequenceDiagram
- Check the `leads` table in CustomersDot.
- Set up staging credentials for Platypus, and track the leads on the [Platypus Dashboard](https://staging.ci.nexus.gitlabenvironment.cloud/admin/queues/queue/new-lead-queue).
- Ask for access to the Marketo Sandbox and validate the leads there.
-
-## Trials
-
-Trials follow the same flow as the PQL leads.
diff --git a/doc/development/service_ping/review_guidelines.md b/doc/development/service_ping/review_guidelines.md
index c7e602725d3..ee2d8f4f4a1 100644
--- a/doc/development/service_ping/review_guidelines.md
+++ b/doc/development/service_ping/review_guidelines.md
@@ -57,6 +57,7 @@ are regular backend changes.
generating the metric's YAML definition.
- Add the `~database` label and ask for a [database review](../database_review.md) for
metrics that are based on Database.
+- Add `~Data Warehouse::Impact Check` for any database metric that has a query change. Changes in queries can affect [data operations](https://about.gitlab.com/handbook/business-technology/data-team/how-we-work/triage/#gitlabcom-db-structure-changes).
- For tracking using Redis HLL (HyperLogLog):
- Check the Redis slot.
- Check if a [feature flag is needed](implement.md#recommendations).
diff --git a/doc/install/docker.md b/doc/install/docker.md
index 8223e05e3df..a25ed629681 100644
--- a/doc/install/docker.md
+++ b/doc/install/docker.md
@@ -189,7 +189,7 @@ services:
external_url 'http://gitlab.example.com:8929'
gitlab_rails['gitlab_shell_ssh_port'] = 2224
ports:
- - '8929:8929'
+ - '8929:80'
- '2224:22'
volumes:
- '$GITLAB_HOME/config:/etc/gitlab'
@@ -198,7 +198,7 @@ services:
shm_size: '256m'
```
-This is the same as using `--publish 8929:8929 --publish 2224:22`.
+This is the same as using `--publish 8929:80 --publish 2224:22`.
### Install GitLab using Docker swarm mode
diff --git a/doc/update/plan_your_upgrade.md b/doc/update/plan_your_upgrade.md
index 665d2f6783e..6eebfd1b278 100644
--- a/doc/update/plan_your_upgrade.md
+++ b/doc/update/plan_your_upgrade.md
@@ -94,6 +94,8 @@ to roll back GitLab to a working state if there's a problem with the upgrade:
### Restore GitLab
+If you have a test environment that mimics your production one, we recommend testing the restoration to ensure that everything works as you expect.
+
To restore your GitLab backup:
- Before restoring, make sure to read about the
diff --git a/lib/gitlab/database/transaction/context.rb b/lib/gitlab/database/transaction/context.rb
index a902537f02e..6defe9ae443 100644
--- a/lib/gitlab/database/transaction/context.rb
+++ b/lib/gitlab/database/transaction/context.rb
@@ -6,8 +6,10 @@ module Gitlab
class Context
attr_reader :context
- LOG_SAVEPOINTS_THRESHOLD = 1 # 1 `SAVEPOINT` created in a transaction
- LOG_DURATION_S_THRESHOLD = 120 # transaction that is running for 2 minutes or longer
+ LOG_SAVEPOINTS_THRESHOLD = 1 # 1 `SAVEPOINT` created in a transaction
+ LOG_DURATION_S_THRESHOLD = 120 # transaction that is running for 2 minutes or longer
+ LOG_EXTERNAL_HTTP_COUNT_THRESHOLD = 50 # 50 external HTTP requests executed within transaction
+ LOG_EXTERNAL_HTTP_DURATION_S_THRESHOLD = 1 # 1 second spent in HTTP requests in total within transaction
LOG_THROTTLE_DURATION = 1
def initialize
@@ -43,6 +45,11 @@ module Gitlab
(@context[:backtraces] ||= []).push(cleaned_backtrace)
end
+ def initialize_external_http_tracking
+ @context[:external_http_count_start] = external_http_requests_count_total
+ @context[:external_http_duration_start] = external_http_requests_duration_total
+ end
+
def duration
return unless @context[:start_time].present?
@@ -57,10 +64,16 @@ module Gitlab
duration.to_i >= LOG_DURATION_S_THRESHOLD
end
+ def external_http_requests_threshold_exceeded?
+ external_http_requests_count >= LOG_EXTERNAL_HTTP_COUNT_THRESHOLD ||
+ external_http_requests_duration >= LOG_EXTERNAL_HTTP_DURATION_S_THRESHOLD
+ end
+
def should_log?
return false if logged_already?
- savepoints_threshold_exceeded? || duration_threshold_exceeded?
+ savepoints_threshold_exceeded? || duration_threshold_exceeded? ||
+ external_http_requests_threshold_exceeded?
end
def commit
@@ -75,6 +88,14 @@ module Gitlab
@context[:backtraces].to_a
end
+ def external_http_requests_count
+ @requests_count ||= external_http_requests_count_total - @context[:external_http_count_start].to_i
+ end
+
+ def external_http_requests_duration
+ @requests_duration ||= external_http_requests_duration_total - @context[:external_http_duration_start].to_f
+ end
+
private
def queries
@@ -108,6 +129,8 @@ module Gitlab
savepoints_count: @context[:savepoints].to_i,
rollbacks_count: @context[:rollbacks].to_i,
releases_count: @context[:releases].to_i,
+ external_http_requests_count: external_http_requests_count,
+ external_http_requests_duration: external_http_requests_duration,
sql: queries,
savepoint_backtraces: backtraces
}
@@ -118,6 +141,14 @@ module Gitlab
def application_info(attributes)
Gitlab::AppJsonLogger.info(attributes)
end
+
+ def external_http_requests_count_total
+ ::Gitlab::Metrics::Subscribers::ExternalHttp.request_count.to_i
+ end
+
+ def external_http_requests_duration_total
+ ::Gitlab::Metrics::Subscribers::ExternalHttp.duration.to_f
+ end
end
end
end
diff --git a/lib/gitlab/database/transaction/observer.rb b/lib/gitlab/database/transaction/observer.rb
index ad6886a3d52..87e76257c24 100644
--- a/lib/gitlab/database/transaction/observer.rb
+++ b/lib/gitlab/database/transaction/observer.rb
@@ -21,6 +21,7 @@ module Gitlab
context.set_start_time
context.set_depth(0)
context.track_sql(event.payload[:sql])
+ context.initialize_external_http_tracking
elsif cmd.start_with?('SAVEPOINT', 'EXCEPTION')
context.set_depth(manager.open_transactions)
context.increment_savepoints
diff --git a/lib/gitlab/merge_requests/mergeability/check_result.rb b/lib/gitlab/merge_requests/mergeability/check_result.rb
index d0788c7d7c7..5284d20d423 100644
--- a/lib/gitlab/merge_requests/mergeability/check_result.rb
+++ b/lib/gitlab/merge_requests/mergeability/check_result.rb
@@ -22,8 +22,8 @@ module Gitlab
def self.from_hash(data)
new(
- status: data.fetch(:status),
- payload: data.fetch(:payload))
+ status: data.fetch('status').to_sym,
+ payload: data.fetch('payload'))
end
def initialize(status:, payload: {})
diff --git a/lib/gitlab/merge_requests/mergeability/results_store.rb b/lib/gitlab/merge_requests/mergeability/results_store.rb
index bb6489f8526..2f7b8888b2f 100644
--- a/lib/gitlab/merge_requests/mergeability/results_store.rb
+++ b/lib/gitlab/merge_requests/mergeability/results_store.rb
@@ -9,7 +9,11 @@ module Gitlab
end
def read(merge_check:)
- interface.retrieve_check(merge_check: merge_check)
+ result_hash = interface.retrieve_check(merge_check: merge_check)
+
+ return if result_hash.blank?
+
+ CheckResult.from_hash(result_hash)
end
def write(merge_check:, result_hash:)
diff --git a/lib/gitlab/process_supervisor.rb b/lib/gitlab/process_supervisor.rb
index f0d2bbc33bd..18fd24aa582 100644
--- a/lib/gitlab/process_supervisor.rb
+++ b/lib/gitlab/process_supervisor.rb
@@ -9,7 +9,7 @@ module Gitlab
# The supervisor will also trap termination signals if provided and
# propagate those to the supervised processes. Any supervised processes
# that do not terminate within a specified grace period will be killed.
- class ProcessSupervisor
+ class ProcessSupervisor < Gitlab::Daemon
DEFAULT_HEALTH_CHECK_INTERVAL_SECONDS = 5
DEFAULT_TERMINATE_INTERVAL_SECONDS = 1
DEFAULT_TERMINATE_TIMEOUT_SECONDS = 10
@@ -21,13 +21,18 @@ module Gitlab
check_terminate_interval_seconds: DEFAULT_TERMINATE_INTERVAL_SECONDS,
terminate_timeout_seconds: DEFAULT_TERMINATE_TIMEOUT_SECONDS,
term_signals: %i(INT TERM),
- forwarded_signals: [])
+ forwarded_signals: [],
+ **options)
+ super(**options)
@term_signals = term_signals
@forwarded_signals = forwarded_signals
@health_check_interval_seconds = health_check_interval_seconds
@check_terminate_interval_seconds = check_terminate_interval_seconds
@terminate_timeout_seconds = terminate_timeout_seconds
+
+ @pids = []
+ @alive = false
end
# Starts a supervision loop for the given process ID(s).
@@ -39,32 +44,66 @@ module Gitlab
# start observing those processes instead. Otherwise it will shut down.
def supervise(pid_or_pids, &on_process_death)
@pids = Array(pid_or_pids)
+ @on_process_death = on_process_death
trap_signals!
+ start
+ end
+
+ # Shuts down the supervisor and all supervised processes with the given signal.
+ def shutdown(signal = :TERM)
+ return unless @alive
+
+ stop_processes(signal)
+ stop
+ end
+
+ def supervised_pids
+ @pids
+ end
+
+ private
+
+ def start_working
@alive = true
+ end
+
+ def stop_working
+ @alive = false
+ end
+
+ def run_thread
while @alive
sleep(@health_check_interval_seconds)
- check_process_health(&on_process_death)
+ check_process_health
end
end
- private
-
- def check_process_health(&on_process_death)
+ def check_process_health
unless all_alive?
- dead_pids = @pids - live_pids
- @pids = Array(yield(dead_pids))
+ existing_pids = live_pids # Capture this value for the duration of the block.
+ dead_pids = @pids - existing_pids
+ new_pids = Array(@on_process_death.call(dead_pids))
+ @pids = existing_pids + new_pids
@alive = @pids.any?
end
end
+ def stop_processes(signal)
+ # Set this prior to shutting down so that shutdown hooks which read `alive`
+ # know the supervisor is about to shut down.
+ @alive = false
+
+ # Shut down supervised processes.
+ signal_all(signal)
+ wait_for_termination
+ end
+
def trap_signals!
ProcessManagement.trap_signals(@term_signals) do |signal|
- @alive = false
- signal_all(signal)
- wait_for_termination
+ stop_processes(signal)
end
ProcessManagement.trap_signals(@forwarded_signals) do |signal|
diff --git a/lib/google_api/cloud_platform/client.rb b/lib/google_api/cloud_platform/client.rb
index 43a2480d5b7..360c9a6c52f 100644
--- a/lib/google_api/cloud_platform/client.rb
+++ b/lib/google_api/cloud_platform/client.rb
@@ -22,6 +22,7 @@ module GoogleApi
"https://www.googleapis.com/auth/monitoring"
].freeze
ROLES_LIST = %w[roles/iam.serviceAccountUser roles/artifactregistry.admin roles/cloudbuild.builds.builder roles/run.admin roles/storage.admin roles/cloudsql.admin roles/browser].freeze
+ REVOKE_URL = 'https://oauth2.googleapis.com/revoke'
class << self
def session_key_for_token
@@ -146,6 +147,11 @@ module GoogleApi
enable_service(gcp_project_id, 'cloudbuild.googleapis.com')
end
+ def revoke_authorizations
+ uri = URI(REVOKE_URL)
+ Gitlab::HTTP.post(uri, body: { 'token' => access_token })
+ end
+
private
def enable_service(gcp_project_id, service_name)
@@ -211,7 +217,7 @@ module GoogleApi
end
def cloud_resource_manager_service
- @gpc_service ||= Google::Apis::CloudresourcemanagerV1::CloudResourceManagerService.new.tap { |s| s. authorization = access_token }
+ @gpc_service ||= Google::Apis::CloudresourcemanagerV1::CloudResourceManagerService.new.tap { |s| s.authorization = access_token }
end
end
end
diff --git a/lib/tasks/ci/build_artifacts.rake b/lib/tasks/ci/build_artifacts.rake
new file mode 100644
index 00000000000..4f4faef5a62
--- /dev/null
+++ b/lib/tasks/ci/build_artifacts.rake
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'httparty'
+require 'csv'
+
+namespace :ci do
+ namespace :build_artifacts do
+ desc "GitLab | CI | Fetch projects with incorrect artifact size on GitLab.com"
+ task :project_with_incorrect_artifact_size do
+ csv_url = ENV['SISENSE_PROJECT_IDS_WITH_INCORRECT_ARTIFACTS_URL']
+
+ # rubocop: disable Gitlab/HTTParty
+ body = HTTParty.get(csv_url)
+ # rubocop: enable Gitlab/HTTParty
+
+ table = CSV.parse(body.parsed_response, headers: true)
+ puts table['PROJECT_ID'].join(' ')
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 2f7d3e03bb9..ea75cdcf984 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -691,6 +691,9 @@ msgstr ""
msgid "%{issuableType} will be removed! Are you sure?"
msgstr ""
+msgid "%{issuable}(s) already assigned"
+msgstr ""
+
msgid "%{issueType} actions"
msgstr ""
@@ -8068,9 +8071,6 @@ msgstr ""
msgid "ClusterIntegration|Clusters are utilized by selecting the nearest ancestor with a matching environment scope. For example, project clusters will override group clusters. %{linkStart}More information%{linkEnd}"
msgstr ""
-msgid "ClusterIntegration|Connect cluster with certificate"
-msgstr ""
-
msgid "ClusterIntegration|Connect existing cluster"
msgstr ""
@@ -17093,6 +17093,12 @@ msgstr ""
msgid "GoogleCloud|Google Cloud project"
msgstr ""
+msgid "GoogleCloud|Google OAuth2 token revocation request failed"
+msgstr ""
+
+msgid "GoogleCloud|Google OAuth2 token revocation requested"
+msgstr ""
+
msgid "GoogleCloud|I understand the responsibilities involved with managing service account keys"
msgstr ""
@@ -17102,6 +17108,12 @@ msgstr ""
msgid "GoogleCloud|Refs"
msgstr ""
+msgid "GoogleCloud|Revoke authorizations"
+msgstr ""
+
+msgid "GoogleCloud|Revoke authorizations granted to GitLab. This does not invalidate service accounts."
+msgstr ""
+
msgid "Got it"
msgstr ""
@@ -19344,6 +19356,9 @@ msgstr ""
msgid "IncidentManagement|Achieved SLA"
msgstr ""
+msgid "IncidentManagement|Acknowledged"
+msgstr ""
+
msgid "IncidentManagement|All"
msgstr ""
@@ -19353,6 +19368,15 @@ msgstr ""
msgid "IncidentManagement|All alerts promoted to incidents are automatically displayed within the list. You can also create a new incident using the button below."
msgstr ""
+msgid "IncidentManagement|An error occurred while fetching the incident status. Please reload the page."
+msgstr ""
+
+msgid "IncidentManagement|An error occurred while updating the incident status. Please reload the page and try again."
+msgstr ""
+
+msgid "IncidentManagement|Assign paging status"
+msgstr ""
+
msgid "IncidentManagement|Assignees"
msgstr ""
@@ -19410,6 +19434,12 @@ msgstr ""
msgid "IncidentManagement|Published to status page"
msgstr ""
+msgid "IncidentManagement|Resolved"
+msgstr ""
+
+msgid "IncidentManagement|Setting the status to Acknowledged or Resolved stops paging when escalation policies are selected for the incident."
+msgstr ""
+
msgid "IncidentManagement|Severity"
msgstr ""
@@ -19425,6 +19455,9 @@ msgstr ""
msgid "IncidentManagement|Time to SLA"
msgstr ""
+msgid "IncidentManagement|Triggered"
+msgstr ""
+
msgid "IncidentManagement|Unassigned"
msgstr ""
@@ -20474,9 +20507,6 @@ msgstr ""
msgid "Issue weight"
msgstr ""
-msgid "Issue(s) already assigned"
-msgstr ""
-
msgid "IssueAnalytics|Age"
msgstr ""
@@ -24158,6 +24188,33 @@ msgstr ""
msgid "NamespaceStorageSize|push to your repository, create pipelines, create issues or add comments. To reduce storage capacity, delete unused repositories, artifacts, wikis, issues, and pipelines."
msgstr ""
+msgid "NamespaceStorage|%{name_with_link} namespace has %{percent} or less namespace storage space remaining."
+msgstr ""
+
+msgid "NamespaceStorage|%{name_with_link} namespace has exceeded its namespace storage limit."
+msgstr ""
+
+msgid "NamespaceStorage|%{name}(%{url}) namespace has %{percent} or less namespace storage space remaining."
+msgstr ""
+
+msgid "NamespaceStorage|%{name}(%{url}) namespace has exceeded its namespace storage limit."
+msgstr ""
+
+msgid "NamespaceStorage|Action required: Less than %{percentage_of_available_storage}%% of namespace storage remains for %{namespace_name}"
+msgstr ""
+
+msgid "NamespaceStorage|Action required: Storage has been exceeded for %{namespace_name}"
+msgstr ""
+
+msgid "NamespaceStorage|Buy more storage"
+msgstr ""
+
+msgid "NamespaceStorage|We recommend that you buy additional storage to ensure your service is not interrupted."
+msgstr ""
+
+msgid "NamespaceStorage|We recommend that you buy additional storage to resume normal service."
+msgstr ""
+
msgid "NamespaceUserCap|Pending users must be reviewed and approved by a group owner. Learn more about %{user_caps_link_start}user caps%{link_end} and %{users_pending_approval_link_start}users pending approval%{link_end}."
msgstr ""
@@ -24838,7 +24895,7 @@ msgstr ""
msgid "No matches found"
msgstr ""
-msgid "No matching issue found. Make sure that you are adding a valid issue URL."
+msgid "No matching %{issuable} found. Make sure that you are adding a valid %{issuable} URL."
msgstr ""
msgid "No matching labels"
diff --git a/metrics_server/dependencies.rb b/metrics_server/dependencies.rb
index 02cec1173e0..bfa6aae8ef8 100644
--- a/metrics_server/dependencies.rb
+++ b/metrics_server/dependencies.rb
@@ -31,5 +31,6 @@ require_relative '../lib/gitlab/metrics/exporter/gc_request_middleware'
require_relative '../lib/gitlab/health_checks/probes/collection'
require_relative '../lib/gitlab/health_checks/probes/status'
require_relative '../lib/gitlab/process_management'
+require_relative '../lib/gitlab/process_supervisor'
# rubocop:enable Naming/FileName
diff --git a/metrics_server/metrics_server.rb b/metrics_server/metrics_server.rb
index 70769459019..5411aaa0b5f 100644
--- a/metrics_server/metrics_server.rb
+++ b/metrics_server/metrics_server.rb
@@ -5,7 +5,28 @@ require_relative '../config/boot'
require_relative 'dependencies'
class MetricsServer # rubocop:disable Gitlab/NamespacedClass
+ # The singleton instance used to supervise the Puma metrics server.
+ PumaProcessSupervisor = Class.new(Gitlab::ProcessSupervisor)
+
class << self
+ def start_for_puma
+ metrics_dir = ::Prometheus::Client.configuration.multiprocess_files_dir
+
+ start_server = proc do
+ MetricsServer.spawn('puma', metrics_dir: metrics_dir).tap do |pid|
+ Gitlab::AppLogger.info("Starting Puma metrics server with pid #{pid}")
+ end
+ end
+
+ supervisor = PumaProcessSupervisor.instance
+ supervisor.supervise(start_server.call) do
+ next unless supervisor.alive
+
+ Gitlab::AppLogger.info('Puma metrics server terminated, restarting...')
+ start_server.call
+ end
+ end
+
def spawn(target, metrics_dir:, gitlab_config: nil, wipe_metrics_dir: false)
ensure_valid_target!(target)
diff --git a/sidekiq_cluster/cli.rb b/sidekiq_cluster/cli.rb
index f366cb26b8e..737d31bbc88 100644
--- a/sidekiq_cluster/cli.rb
+++ b/sidekiq_cluster/cli.rb
@@ -10,6 +10,7 @@ require 'time'
# we may run into "already initialized" warnings, hence the check.
require_relative '../lib/gitlab' unless Object.const_defined?('Gitlab')
require_relative '../lib/gitlab/utils'
+require_relative '../lib/gitlab/daemon'
require_relative '../lib/gitlab/sidekiq_config/cli_methods'
require_relative '../lib/gitlab/sidekiq_config/worker_matcher'
require_relative '../lib/gitlab/sidekiq_logging/json_formatter'
@@ -121,11 +122,12 @@ module Gitlab
ProcessManagement.write_pid(@pid) if @pid
- supervisor = Gitlab::ProcessSupervisor.new(
+ supervisor = SidekiqProcessSupervisor.instance(
health_check_interval_seconds: @interval,
terminate_timeout_seconds: @soft_timeout_seconds + TIMEOUT_GRACE_PERIOD_SECONDS,
term_signals: TERMINATE_SIGNALS,
- forwarded_signals: FORWARD_SIGNALS
+ forwarded_signals: FORWARD_SIGNALS,
+ synchronous: true
)
metrics_server_pid = start_metrics_server
@@ -136,7 +138,7 @@ module Gitlab
# If we're not in the process of shutting down the cluster,
# and the metrics server died, restart it.
if supervisor.alive && dead_pids.include?(metrics_server_pid)
- @logger.info('Metrics server terminated, restarting...')
+ @logger.info('Sidekiq metrics server terminated, restarting...')
metrics_server_pid = restart_metrics_server(wipe_metrics_dir: false)
all_pids = worker_pids + Array(metrics_server_pid)
else
diff --git a/sidekiq_cluster/sidekiq_cluster.rb b/sidekiq_cluster/sidekiq_cluster.rb
index 3ba3211b0e4..c68cbe7c163 100644
--- a/sidekiq_cluster/sidekiq_cluster.rb
+++ b/sidekiq_cluster/sidekiq_cluster.rb
@@ -16,6 +16,9 @@ module Gitlab
# before we kill the process.
TIMEOUT_GRACE_PERIOD_SECONDS = 5
+ # The singleton instance used to supervise cluster processes.
+ SidekiqProcessSupervisor = Class.new(Gitlab::ProcessSupervisor)
+
# Starts Sidekiq workers for the pairs of processes.
#
# Example:
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index 6baaa98eff9..2cb3f67b03d 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -40,6 +40,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
}
end
+ let(:supervisor) { instance_double(Gitlab::SidekiqCluster::SidekiqProcessSupervisor) }
+
before do
stub_env('RAILS_ENV', 'test')
@@ -47,8 +49,11 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
config_file.close
allow(::Settings).to receive(:source).and_return(config_file.path)
-
::Settings.reload!
+
+ allow(Gitlab::ProcessManagement).to receive(:write_pid)
+ allow(Gitlab::SidekiqCluster::SidekiqProcessSupervisor).to receive(:instance).and_return(supervisor)
+ allow(supervisor).to receive(:supervise)
end
after do
@@ -63,11 +68,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
end
context 'with arguments' do
- before do
- allow(Gitlab::ProcessManagement).to receive(:write_pid)
- allow_next_instance_of(Gitlab::ProcessSupervisor) { |it| allow(it).to receive(:supervise) }
- end
-
it 'starts the Sidekiq workers' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], default_options)
@@ -298,8 +298,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
context 'without --dryrun' do
before do
allow(Gitlab::SidekiqCluster).to receive(:start).and_return([])
- allow(Gitlab::ProcessManagement).to receive(:write_pid)
- allow_next_instance_of(Gitlab::ProcessSupervisor) { |it| allow(it).to receive(:supervise) }
end
context 'when there are no sidekiq_health_checks settings set' do
@@ -429,14 +427,11 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
before do
allow(Gitlab::SidekiqCluster).to receive(:start).and_return(sidekiq_worker_pids)
- allow(Gitlab::ProcessManagement).to receive(:write_pid)
end
it 'stops the entire process cluster if one of the workers has been terminated' do
- allow_next_instance_of(Gitlab::ProcessSupervisor) do |it|
- allow(it).to receive(:supervise).and_yield([2])
- end
-
+ expect(supervisor).to receive(:alive).and_return(true)
+ expect(supervisor).to receive(:supervise).and_yield([2])
expect(MetricsServer).to receive(:fork).once.and_return(metrics_server_pid)
expect(Gitlab::ProcessManagement).to receive(:signal_processes).with([42, 99], :TERM)
@@ -445,11 +440,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
context 'when the supervisor is alive' do
it 'restarts the metrics server when it is down' do
- allow_next_instance_of(Gitlab::ProcessSupervisor) do |it|
- allow(it).to receive(:alive).and_return(true)
- allow(it).to receive(:supervise).and_yield([metrics_server_pid])
- end
-
+ expect(supervisor).to receive(:alive).and_return(true)
+ expect(supervisor).to receive(:supervise).and_yield([metrics_server_pid])
expect(MetricsServer).to receive(:fork).twice.and_return(metrics_server_pid)
cli.run(%w(foo))
@@ -458,11 +450,8 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
context 'when the supervisor is shutting down' do
it 'does not restart the metrics server' do
- allow_next_instance_of(Gitlab::ProcessSupervisor) do |it|
- allow(it).to receive(:alive).and_return(false)
- allow(it).to receive(:supervise).and_yield([metrics_server_pid])
- end
-
+ expect(supervisor).to receive(:alive).and_return(false)
+ expect(supervisor).to receive(:supervise).and_yield([metrics_server_pid])
expect(MetricsServer).to receive(:fork).once.and_return(metrics_server_pid)
cli.run(%w(foo))
diff --git a/spec/controllers/projects/ci/pipeline_editor_controller_spec.rb b/spec/controllers/projects/ci/pipeline_editor_controller_spec.rb
index d55aad20689..37406d704f1 100644
--- a/spec/controllers/projects/ci/pipeline_editor_controller_spec.rb
+++ b/spec/controllers/projects/ci/pipeline_editor_controller_spec.rb
@@ -36,17 +36,5 @@ RSpec.describe Projects::Ci::PipelineEditorController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
-
- describe 'pipeline_editor_walkthrough experiment' do
- before do
- project.add_developer(user)
- end
-
- subject(:action) { show_request }
-
- it_behaves_like 'tracks assignment and records the subject', :pipeline_editor_walkthrough, :namespace do
- subject { project.namespace }
- end
- end
end
end
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
index dbcd524cad0..dad3dfd3440 100644
--- a/spec/features/incidents/incident_details_spec.rb
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Incident details', :js do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:incident) { create(:incident, project: project, author: developer, description: 'description') }
+ let_it_be(:escalation_status) { create(:incident_management_issuable_escalation_status, issue: incident) }
before_all do
project.add_developer(developer)
@@ -46,6 +47,42 @@ RSpec.describe 'Incident details', :js do
expect(page).to have_selector('.right-sidebar[data-issuable-type="issue"]')
expect(sidebar).to have_selector('.incident-severity')
expect(sidebar).to have_selector('.milestone')
+ expect(sidebar).to have_selector('[data-testid="escalation_status_container"]')
+ end
+ end
+
+ context 'escalation status' do
+ let(:sidebar) { page.find('.right-sidebar') }
+ let(:widget) { sidebar.find('[data-testid="escalation_status_container"]') }
+ let(:expected_dropdown_options) { escalation_status.class::STATUSES.keys.take(3).map { |key| key.to_s.titleize } }
+
+ it 'has an interactable escalation status widget' do
+ expect(current_status).to have_text(escalation_status.status_name.to_s.titleize)
+
+ # list the available statuses
+ widget.find('[data-testid="edit-button"]').click
+ expect(dropdown_options.map(&:text)).to eq(expected_dropdown_options)
+ expect(widget).not_to have_selector('#escalation-status-help')
+
+ # update the status
+ select_resolved(dropdown_options)
+ expect(current_status).to have_text('Resolved')
+ expect(escalation_status.reload).to be_resolved
+ end
+
+ private
+
+ def dropdown_options
+ widget.all('[data-testid="status-dropdown-item"]', count: 3)
+ end
+
+ def select_resolved(options)
+ options.last.click
+ wait_for_requests
+ end
+
+ def current_status
+ widget.find('[data-testid="collapsed-content"]')
end
end
end
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 868946814c3..aaa478378a9 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -106,6 +106,7 @@ RSpec.describe 'Issue Sidebar' do
end
context 'when GraphQL assignees widget feature flag is enabled' do
+ # TODO: Move to shared examples when feature flag is removed: https://gitlab.com/gitlab-org/gitlab/-/issues/328185
context 'when a privileged user can invite' do
it 'shows a link for inviting members and launches invite modal' do
project.add_maintainer(user)
@@ -236,6 +237,12 @@ RSpec.describe 'Issue Sidebar' do
it_behaves_like 'labels sidebar widget'
end
+ context 'escalation status', :js do
+ it 'is not available for default issue type' do
+ expect(page).not_to have_selector('.block.escalation-status')
+ end
+ end
+
context 'interacting with collapsed sidebar', :js do
collapsed_sidebar_selector = 'aside.right-sidebar.right-sidebar-collapsed'
expanded_sidebar_selector = 'aside.right-sidebar.right-sidebar-expanded'
diff --git a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
index 5894ec923c2..92b9b785148 100644
--- a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
+++ b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
@@ -17,66 +17,172 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js do
let(:sidebar_assignee_block) { page.find('.js-issuable-sidebar .assignee') }
let(:sidebar_assignee_avatar_link) { sidebar_assignee_block.find_all('a').find { |a| a['href'].include? assignee.username } }
let(:sidebar_assignee_tooltip) { sidebar_assignee_avatar_link['title'] || '' }
- let(:sidebar_assignee_dropdown_item) { sidebar_assignee_block.find(".dropdown-menu li[data-user-id=\"#{assignee.id}\"]") }
- let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item.find('a')['data-title'] || '' }
- context 'when user is an owner' do
+ context 'when GraphQL assignees widget feature flag is disabled' do
+ let(:sidebar_assignee_dropdown_item) { sidebar_assignee_block.find(".dropdown-menu li[data-user-id=\"#{assignee.id}\"]") }
+ let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item.find('a')['data-title'] || '' }
+
before do
- stub_const('Autocomplete::UsersFinder::LIMIT', users_find_limit)
+ stub_feature_flags(issue_assignees_widget: false)
+ end
- sign_in(project.first_owner)
+ context 'when user is an owner' do
+ before do
+ stub_const('Autocomplete::UsersFinder::LIMIT', users_find_limit)
- merge_request.assignees << assignee
+ sign_in(project.first_owner)
- visit project_merge_request_path(project, merge_request)
+ merge_request.assignees << assignee
- wait_for_requests
+ visit project_merge_request_path(project, merge_request)
+
+ wait_for_requests
+ end
+
+ shared_examples 'when assigned' do |expected_tooltip: ''|
+ it 'shows assignee name' do
+ expect(sidebar_assignee_block).to have_text(assignee.name)
+ end
+
+ it "shows assignee tooltip '#{expected_tooltip}'" do
+ expect(sidebar_assignee_tooltip).to eql(expected_tooltip)
+ end
+
+ context 'when edit is clicked' do
+ before do
+ sidebar_assignee_block.click_link('Edit')
+
+ wait_for_requests
+ end
+
+ it "shows assignee tooltip '#{expected_tooltip}" do
+ expect(sidebar_assignee_dropdown_tooltip).to eql(expected_tooltip)
+ end
+ end
+ end
+
+ context 'when assigned to maintainer' do
+ let(:assignee) { project_maintainers.last }
+
+ it_behaves_like 'when assigned', expected_tooltip: ''
+ end
+
+ context 'when assigned to developer' do
+ let(:assignee) { project_developers.last }
+
+ it_behaves_like 'when assigned', expected_tooltip: 'Cannot merge'
+ end
end
- shared_examples 'when assigned' do |expected_tooltip: ''|
- it 'shows assignee name' do
- expect(sidebar_assignee_block).to have_text(assignee.name)
+ context 'with invite members considerations' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
end
- it "shows assignee tooltip '#{expected_tooltip}'" do
- expect(sidebar_assignee_tooltip).to eql(expected_tooltip)
+ include_examples 'issuable invite members' do
+ let(:issuable_path) { project_merge_request_path(project, merge_request) }
end
+ end
+ end
+
+ context 'when GraphQL assignees widget feature flag is enabled' do
+ let(:sidebar_assignee_dropdown_item) { sidebar_assignee_block.find(".dropdown-item", text: assignee.username ) }
+ let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item['title']}
+
+ context 'when user is an owner' do
+ before do
+ stub_const('Autocomplete::UsersFinder::LIMIT', users_find_limit)
+
+ sign_in(project.first_owner)
+
+ merge_request.assignees << assignee
- context 'when edit is clicked' do
- before do
- sidebar_assignee_block.click_link('Edit')
+ visit project_merge_request_path(project, merge_request)
- wait_for_requests
+ wait_for_requests
+ end
+
+ shared_examples 'when assigned' do |expected_tooltip: ''|
+ it 'shows assignee name' do
+ expect(sidebar_assignee_block).to have_text(assignee.name)
end
- it "shows assignee tooltip '#{expected_tooltip}" do
- expect(sidebar_assignee_dropdown_tooltip).to eql(expected_tooltip)
+ it "shows assignee tooltip '#{expected_tooltip}'" do
+ expect(sidebar_assignee_tooltip).to eql(expected_tooltip)
+ end
+
+ context 'when edit is clicked' do
+ before do
+ open_assignees_dropdown
+ end
+
+ it "shows assignee tooltip '#{expected_tooltip}" do
+ expect(sidebar_assignee_dropdown_tooltip).to eql(expected_tooltip)
+ end
end
end
- end
- context 'when assigned to maintainer' do
- let(:assignee) { project_maintainers.last }
+ context 'when assigned to maintainer' do
+ let(:assignee) { project_maintainers.last }
- it_behaves_like 'when assigned', expected_tooltip: ''
- end
+ it_behaves_like 'when assigned', expected_tooltip: ''
+ end
- context 'when assigned to developer' do
- let(:assignee) { project_developers.last }
+ context 'when assigned to developer' do
+ let(:assignee) { project_developers.last }
- it_behaves_like 'when assigned', expected_tooltip: 'Cannot merge'
+ it_behaves_like 'when assigned', expected_tooltip: 'Cannot merge'
+ end
end
- end
- context 'with invite members considerations' do
- let_it_be(:user) { create(:user) }
+ context 'with invite members considerations' do
+ let_it_be(:user) { create(:user) }
- before do
- sign_in(user)
+ before do
+ sign_in(user)
+ end
+
+ # TODO: Move to shared examples when feature flag is removed: https://gitlab.com/gitlab-org/gitlab/-/issues/328185
+ context 'when a privileged user can invite' do
+ it 'shows a link for inviting members and launches invite modal' do
+ project.add_maintainer(user)
+ visit project_merge_request_path(project, merge_request)
+
+ open_assignees_dropdown
+
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_link('Invite members')
+ expect(page).to have_selector('[data-track-action="click_invite_members"]')
+ expect(page).to have_selector('[data-track-label="edit_assignee"]')
+ end
+
+ click_link 'Invite members'
+
+ expect(page).to have_content("You're inviting members to the")
+ end
+ end
+
+ context 'when user cannot invite members in assignee dropdown' do
+ it 'shows author in assignee dropdown and no invite link' do
+ project.add_developer(user)
+ visit project_merge_request_path(project, merge_request)
+
+ open_assignees_dropdown
+
+ page.within '.dropdown-menu-user' do
+ expect(page).not_to have_link('Invite members')
+ end
+ end
+ end
end
+ end
- include_examples 'issuable invite members' do
- let(:issuable_path) { project_merge_request_path(project, merge_request) }
+ def open_assignees_dropdown
+ page.within('.assignee') do
+ click_button('Edit')
+ wait_for_requests
end
end
end
diff --git a/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
index 2191849edd9..448ef750508 100644
--- a/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
+++ b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
@@ -42,9 +42,6 @@ RSpec.describe 'Merge request > User sees suggest pipeline', :js do
wait_for_requests
- # Drawer is open
- expect(page).to have_content('This template creates a simple test pipeline. To use it:')
-
# Editor shows template
expect(page).to have_content('This file is a template, and might need editing before it works on your project.')
diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb
index 7f35881cb21..ad4381a526a 100644
--- a/spec/features/projects/ci/editor_spec.rb
+++ b/spec/features/projects/ci/editor_spec.rb
@@ -81,8 +81,6 @@ RSpec.describe 'Pipeline Editor', :js do
context 'when a change is made' do
before do
- click_button 'Collapse'
-
page.within('#source-editor-') do
find('textarea').send_keys '123'
# It takes some time after sending keys for the vue
@@ -127,8 +125,6 @@ RSpec.describe 'Pipeline Editor', :js do
describe 'Editor content' do
it 'user can reset their CI configuration' do
- click_button 'Collapse'
-
page.within('#source-editor-') do
find('textarea').send_keys '123'
end
@@ -151,8 +147,6 @@ RSpec.describe 'Pipeline Editor', :js do
end
it 'user can cancel reseting their CI configuration' do
- click_button 'Collapse'
-
page.within('#source-editor-') do
find('textarea').send_keys '123'
end
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index a0016f82f0a..53c95b4a446 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -80,46 +80,103 @@ RSpec.describe 'User searches for code' do
end
end
- context 'search code within refs', :js do
- let(:ref_name) { 'v1.0.0' }
+ context 'when :new_header_search is true' do
+ context 'search code within refs', :js do
+ let(:ref_name) { 'v1.0.0' }
+
+ before do
+ # This feature is diabled by default in spec_helper.rb.
+ # We missed a feature breaking bug, so to prevent this regression, testing both scenarios for this spec.
+ # This can be removed as part of closing https://gitlab.com/gitlab-org/gitlab/-/issues/339348.
+ stub_feature_flags(new_header_search: true)
+ visit(project_tree_path(project, ref_name))
+
+ submit_search('gitlab-grack')
+ select_search_scope('Code')
+ end
- before do
- visit(project_tree_path(project, ref_name))
+ it 'shows ref switcher in code result summary' do
+ expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
+ end
- submit_search('gitlab-grack')
- select_search_scope('Code')
- end
+ it 'persists branch name across search' do
+ find('.gl-search-box-by-click-search-button').click
+ expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
+ end
- it 'shows ref switcher in code result summary' do
- expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
- end
- it 'persists branch name across search' do
- find('.gl-search-box-by-click-search-button').click
- expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
- end
+ # this example is use to test the desgine that the refs is not
+ # only repersent the branch as well as the tags.
+ it 'ref swither list all the branchs and tags' do
+ find('.js-project-refs-dropdown').click
+ expect(find('.dropdown-page-one .dropdown-content')).to have_link('sha-starting-with-large-number')
+ expect(find('.dropdown-page-one .dropdown-content')).to have_link('v1.0.0')
+ end
- # this example is use to test the desgine that the refs is not
- # only repersent the branch as well as the tags.
- it 'ref swither list all the branchs and tags' do
- find('.js-project-refs-dropdown').click
- expect(find('.dropdown-page-one .dropdown-content')).to have_link('sha-starting-with-large-number')
- expect(find('.dropdown-page-one .dropdown-content')).to have_link('v1.0.0')
- end
+ it 'search result changes when refs switched' do
+ expect(find('.results')).not_to have_content('path = gitlab-grack')
- it 'search result changes when refs switched' do
- expect(find('.results')).not_to have_content('path = gitlab-grack')
+ find('.js-project-refs-dropdown').click
+ find('.dropdown-page-one .dropdown-content').click_link('master')
- find('.js-project-refs-dropdown').click
- find('.dropdown-page-one .dropdown-content').click_link('master')
+ expect(page).to have_selector('.results', text: 'path = gitlab-grack')
+ end
- expect(page).to have_selector('.results', text: 'path = gitlab-grack')
+ it 'persist refs over browser tabs' do
+ ref = 'feature'
+ find('.js-project-refs-dropdown').click
+ link = find_link(ref)[:href]
+ expect(link.include?("repository_ref=" + ref)).to be(true)
+ end
end
+ end
- it 'persist refs over browser tabs' do
- ref = 'feature'
- find('.js-project-refs-dropdown').click
- link = find_link(ref)[:href]
- expect(link.include?("repository_ref=" + ref)).to be(true)
+ context 'when :new_header_search is false' do
+ context 'search code within refs', :js do
+ let(:ref_name) { 'v1.0.0' }
+
+ before do
+ # This feature is diabled by default in spec_helper.rb.
+ # We missed a feature breaking bug, so to prevent this regression, testing both scenarios for this spec.
+ # This can be removed as part of closing https://gitlab.com/gitlab-org/gitlab/-/issues/339348.
+ stub_feature_flags(new_header_search: false)
+ visit(project_tree_path(project, ref_name))
+
+ submit_search('gitlab-grack')
+ select_search_scope('Code')
+ end
+
+ it 'shows ref switcher in code result summary' do
+ expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
+ end
+
+ it 'persists branch name across search' do
+ find('.gl-search-box-by-click-search-button').click
+ expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
+ end
+
+ # this example is use to test the desgine that the refs is not
+ # only repersent the branch as well as the tags.
+ it 'ref swither list all the branchs and tags' do
+ find('.js-project-refs-dropdown').click
+ expect(find('.dropdown-page-one .dropdown-content')).to have_link('sha-starting-with-large-number')
+ expect(find('.dropdown-page-one .dropdown-content')).to have_link('v1.0.0')
+ end
+
+ it 'search result changes when refs switched' do
+ expect(find('.results')).not_to have_content('path = gitlab-grack')
+
+ find('.js-project-refs-dropdown').click
+ find('.dropdown-page-one .dropdown-content').click_link('master')
+
+ expect(page).to have_selector('.results', text: 'path = gitlab-grack')
+ end
+
+ it 'persist refs over browser tabs' do
+ ref = 'feature'
+ find('.js-project-refs-dropdown').click
+ link = find_link(ref)[:href]
+ expect(link.include?("repository_ref=" + ref)).to be(true)
+ end
end
end
diff --git a/spec/frontend/google_cloud/components/app_spec.js b/spec/frontend/google_cloud/components/app_spec.js
index 70d33ff67f1..50b05fb30e0 100644
--- a/spec/frontend/google_cloud/components/app_spec.js
+++ b/spec/frontend/google_cloud/components/app_spec.js
@@ -28,6 +28,7 @@ const HOME_PROPS = {
emptyIllustrationUrl: '#url-empty-illustration',
enableCloudRunUrl: '#url-enable-cloud-run',
enableCloudStorageUrl: '#enableCloudStorageUrl',
+ revokeOauthUrl: '#revokeOauthUrl',
};
describe('google_cloud App component', () => {
diff --git a/spec/frontend/google_cloud/components/home_spec.js b/spec/frontend/google_cloud/components/home_spec.js
index 1ce97097374..42e3d72577d 100644
--- a/spec/frontend/google_cloud/components/home_spec.js
+++ b/spec/frontend/google_cloud/components/home_spec.js
@@ -24,6 +24,7 @@ describe('google_cloud Home component', () => {
emptyIllustrationUrl: '#url-empty-illustration',
enableCloudRunUrl: '#url-enable-cloud-run',
enableCloudStorageUrl: '#enableCloudStorageUrl',
+ revokeOauthUrl: '#revokeOauthUrl',
};
beforeEach(() => {
diff --git a/spec/frontend/google_cloud/components/revoke_oauth_spec.js b/spec/frontend/google_cloud/components/revoke_oauth_spec.js
new file mode 100644
index 00000000000..87580dbf6de
--- /dev/null
+++ b/spec/frontend/google_cloud/components/revoke_oauth_spec.js
@@ -0,0 +1,47 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton, GlForm } from '@gitlab/ui';
+import RevokeOauth, {
+ GOOGLE_CLOUD_REVOKE_TITLE,
+ GOOGLE_CLOUD_REVOKE_DESCRIPTION,
+} from '~/google_cloud/components/revoke_oauth.vue';
+
+describe('RevokeOauth component', () => {
+ let wrapper;
+
+ const findTitle = () => wrapper.find('h2');
+ const findDescription = () => wrapper.find('p');
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findButton = () => wrapper.findComponent(GlButton);
+ const propsData = {
+ url: 'url_general_feedback',
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMount(RevokeOauth, { propsData });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('contains title', () => {
+ const title = findTitle();
+ expect(title.text()).toContain('Revoke authorizations');
+ });
+
+ it('contains description', () => {
+ const description = findDescription();
+ expect(description.text()).toContain(GOOGLE_CLOUD_REVOKE_DESCRIPTION);
+ });
+
+ it('contains form', () => {
+ const form = findForm();
+ expect(form.attributes('action')).toBe(propsData.url);
+ expect(form.attributes('method')).toBe('post');
+ });
+
+ it('contains button', () => {
+ const button = findButton();
+ expect(button.text()).toContain(GOOGLE_CLOUD_REVOKE_TITLE);
+ });
+});
diff --git a/spec/frontend/header_search/store/getters_spec.js b/spec/frontend/header_search/store/getters_spec.js
index 1ca0e2f2cb5..d3510de1439 100644
--- a/spec/frontend/header_search/store/getters_spec.js
+++ b/spec/frontend/header_search/store/getters_spec.js
@@ -37,15 +37,20 @@ describe('Header Search Store Getters', () => {
});
describe.each`
- group | project | scope | forSnippets | expectedPath
- ${null} | ${null} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${null} | ${null} | ${true} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
- ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
- `('searchQuery', ({ group, project, scope, forSnippets, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, and for_snippets is ${forSnippets}`, () => {
+ group | project | scope | forSnippets | codeSearch | ref | expectedPath
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
+ ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
+ ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
+ ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
+ `('searchQuery', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
+ describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
beforeEach(() => {
createState({
searchContext: {
@@ -53,6 +58,8 @@ describe('Header Search Store Getters', () => {
project,
scope,
for_snippets: forSnippets,
+ code_search: codeSearch,
+ ref,
},
});
state.search = MOCK_SEARCH;
@@ -137,15 +144,20 @@ describe('Header Search Store Getters', () => {
});
describe.each`
- group | project | scope | forSnippets | expectedPath
- ${null} | ${null} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${null} | ${null} | ${true} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
- ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
- `('projectUrl', ({ group, project, scope, forSnippets, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, and for_snippets is ${forSnippets}`, () => {
+ group | project | scope | forSnippets | codeSearch | ref | expectedPath
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
+ ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
+ ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
+ ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&project_id=${MOCK_PROJECT.id}&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
+ `('projectUrl', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
+ describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
beforeEach(() => {
createState({
searchContext: {
@@ -153,6 +165,8 @@ describe('Header Search Store Getters', () => {
project,
scope,
for_snippets: forSnippets,
+ code_search: codeSearch,
+ ref,
},
});
state.search = MOCK_SEARCH;
@@ -165,15 +179,20 @@ describe('Header Search Store Getters', () => {
});
describe.each`
- group | project | scope | forSnippets | expectedPath
- ${null} | ${null} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${null} | ${null} | ${true} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
- ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
- `('groupUrl', ({ group, project, scope, forSnippets, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, and for_snippets is ${forSnippets}`, () => {
+ group | project | scope | forSnippets | codeSearch | ref | expectedPath
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
+ ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
+ ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
+ ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&group_id=${MOCK_GROUP.id}&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
+ `('groupUrl', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
+ describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
beforeEach(() => {
createState({
searchContext: {
@@ -181,6 +200,8 @@ describe('Header Search Store Getters', () => {
project,
scope,
for_snippets: forSnippets,
+ code_search: codeSearch,
+ ref,
},
});
state.search = MOCK_SEARCH;
@@ -193,15 +214,20 @@ describe('Header Search Store Getters', () => {
});
describe.each`
- group | project | scope | forSnippets | expectedPath
- ${null} | ${null} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${null} | ${null} | ${true} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
- ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
- ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true`}
- `('allUrl', ({ group, project, scope, forSnippets, expectedPath }) => {
- describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, and for_snippets is ${forSnippets}`, () => {
+ group | project | scope | forSnippets | codeSearch | ref | expectedPath
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${null} | ${null} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&snippets=true`}
+ ${null} | ${null} | ${null} | ${false} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&search_code=true`}
+ ${null} | ${null} | ${null} | ${false} | ${false} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&repository_ref=test-branch`}
+ ${MOCK_GROUP} | ${null} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${null} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${null} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${false} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${false} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${null} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true&search_code=true`}
+ ${MOCK_GROUP} | ${MOCK_PROJECT} | ${'issues'} | ${true} | ${true} | ${'test-branch'} | ${`${MOCK_SEARCH_PATH}?search=${MOCK_SEARCH}&nav_source=navbar&scope=issues&snippets=true&search_code=true&repository_ref=test-branch`}
+ `('allUrl', ({ group, project, scope, forSnippets, codeSearch, ref, expectedPath }) => {
+ describe(`when group is ${group?.name}, project is ${project?.name}, scope is ${scope}, for_snippets is ${forSnippets}, code_search is ${codeSearch}, and ref is ${ref}`, () => {
beforeEach(() => {
createState({
searchContext: {
@@ -209,6 +235,8 @@ describe('Header Search Store Getters', () => {
project,
scope,
for_snippets: forSnippets,
+ code_search: codeSearch,
+ ref,
},
});
state.search = MOCK_SEARCH;
diff --git a/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js b/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
index 4df7768b035..ba06f113120 100644
--- a/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
+++ b/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
@@ -1,7 +1,6 @@
import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import { stubExperiments } from 'helpers/experimentation_helper';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import FirstPipelineCard from '~/pipeline_editor/components/drawer/cards/first_pipeline_card.vue';
import GettingStartedCard from '~/pipeline_editor/components/drawer/cards/getting_started_card.vue';
@@ -38,7 +37,6 @@ describe('Pipeline editor drawer', () => {
beforeEach(() => {
originalObjects.push(window.gon, window.gl);
- stubExperiments({ pipeline_editor_walkthrough: 'control' });
});
afterEach(() => {
@@ -48,33 +46,15 @@ describe('Pipeline editor drawer', () => {
});
describe('default expanded state', () => {
- describe('when experiment control', () => {
- it('sets the drawer to be opened by default', async () => {
- createComponent();
- expect(findDrawerContent().exists()).toBe(false);
- await nextTick();
- expect(findDrawerContent().exists()).toBe(true);
- });
- });
-
- describe('when experiment candidate', () => {
- beforeEach(() => {
- stubExperiments({ pipeline_editor_walkthrough: 'candidate' });
- });
-
- it('sets the drawer to be closed by default', async () => {
- createComponent();
- expect(findDrawerContent().exists()).toBe(false);
- await nextTick();
- expect(findDrawerContent().exists()).toBe(false);
- });
+ it('sets the drawer to be closed by default', async () => {
+ createComponent();
+ expect(findDrawerContent().exists()).toBe(false);
});
});
describe('when the drawer is collapsed', () => {
beforeEach(async () => {
createComponent();
- await clickToggleBtn();
});
it('shows the left facing arrow icon', () => {
@@ -101,6 +81,7 @@ describe('Pipeline editor drawer', () => {
describe('when the drawer is expanded', () => {
beforeEach(async () => {
createComponent();
+ await clickToggleBtn();
});
it('shows the right facing arrow icon', () => {
diff --git a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
index f6154f50bc0..fee52db9b64 100644
--- a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -7,7 +7,6 @@ import WalkthroughPopover from '~/pipeline_editor/components/walkthrough_popover
import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
-import { stubExperiments } from 'helpers/experimentation_helper';
import {
CREATE_TAB,
EDITOR_APP_STATUS_EMPTY,
@@ -245,50 +244,30 @@ describe('Pipeline editor tabs component', () => {
});
});
- describe('pipeline_editor_walkthrough experiment', () => {
- describe('when in control path', () => {
- beforeEach(() => {
- stubExperiments({ pipeline_editor_walkthrough: 'control' });
- });
-
- it('does not show walkthrough popover', async () => {
- createComponent({ mountFn: mount });
+ describe('pipeline editor walkthrough', () => {
+ describe('when isNewCiConfigFile prop is true (default)', () => {
+ beforeEach(async () => {
+ createComponent({
+ mountFn: mount,
+ });
await nextTick();
- expect(findWalkthroughPopover().exists()).toBe(false);
});
- });
- describe('when in candidate path', () => {
- beforeEach(() => {
- stubExperiments({ pipeline_editor_walkthrough: 'candidate' });
- });
-
- describe('when isNewCiConfigFile prop is true (default)', () => {
- beforeEach(async () => {
- createComponent({
- mountFn: mount,
- });
- await nextTick();
- });
-
- it('shows walkthrough popover', async () => {
- expect(findWalkthroughPopover().exists()).toBe(true);
- });
+ it('shows walkthrough popover', async () => {
+ expect(findWalkthroughPopover().exists()).toBe(true);
});
+ });
- describe('when isNewCiConfigFile prop is false', () => {
- it('does not show walkthrough popover', async () => {
- createComponent({ props: { isNewCiConfigFile: false }, mountFn: mount });
- await nextTick();
- expect(findWalkthroughPopover().exists()).toBe(false);
- });
+ describe('when isNewCiConfigFile prop is false', () => {
+ it('does not show walkthrough popover', async () => {
+ createComponent({ props: { isNewCiConfigFile: false }, mountFn: mount });
+ await nextTick();
+ expect(findWalkthroughPopover().exists()).toBe(false);
});
});
});
it('sets listeners on walkthrough popover', async () => {
- stubExperiments({ pipeline_editor_walkthrough: 'candidate' });
-
const handler = jest.fn();
createComponent({
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
index db1cffbd2cb..5fd364afbe4 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
@@ -1,4 +1,4 @@
-import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui';
+import { GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
@@ -76,7 +76,16 @@ describe('Sidebar assignees widget', () => {
SidebarEditableItem,
UserSelect,
GlSearchBoxByType,
- GlDropdown,
+ GlDropdown: {
+ template: `
+ <div>
+ <slot name="footer"></slot>
+ </div>
+ `,
+ methods: {
+ show: jest.fn(),
+ },
+ },
},
});
};
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
index 88a5f4ea8b7..71424aaead3 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
@@ -1,5 +1,6 @@
-import { GlAvatarLabeled } from '@gitlab/ui';
+import { GlAvatarLabeled, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { IssuableType } from '~/issues/constants';
import SidebarParticipant from '~/sidebar/components/assignees/sidebar_participant.vue';
const user = {
@@ -13,14 +14,24 @@ describe('Sidebar participant component', () => {
let wrapper;
const findAvatar = () => wrapper.findComponent(GlAvatarLabeled);
+ const findIcon = () => wrapper.findComponent(GlIcon);
- const createComponent = (status = null) => {
+ const createComponent = ({
+ status = null,
+ issuableType = IssuableType.Issue,
+ canMerge = false,
+ } = {}) => {
wrapper = shallowMount(SidebarParticipant, {
propsData: {
user: {
...user,
+ canMerge,
status,
},
+ issuableType,
+ },
+ stubs: {
+ GlAvatarLabeled,
},
});
};
@@ -29,15 +40,35 @@ describe('Sidebar participant component', () => {
wrapper.destroy();
});
- it('when user is not busy', () => {
+ it('does not show `Busy` status when user is not busy', () => {
createComponent();
expect(findAvatar().props('label')).toBe(user.name);
});
- it('when user is busy', () => {
- createComponent({ availability: 'BUSY' });
+ it('shows `Busy` status when user is busy', () => {
+ createComponent({ status: { availability: 'BUSY' } });
expect(findAvatar().props('label')).toBe(`${user.name} (Busy)`);
});
+
+ it('does not render a warning icon', () => {
+ createComponent();
+
+ expect(findIcon().exists()).toBe(false);
+ });
+
+ describe('when on merge request sidebar', () => {
+ it('when project member cannot merge', () => {
+ createComponent({ issuableType: IssuableType.MergeRequest });
+
+ expect(findIcon().exists()).toBe(true);
+ });
+
+ it('when project member can merge', () => {
+ createComponent({ issuableType: IssuableType.MergeRequest, canMerge: true });
+
+ expect(findIcon().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/sidebar/components/incidents/escalation_status_spec.js b/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
new file mode 100644
index 00000000000..7a736624fc0
--- /dev/null
+++ b/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
@@ -0,0 +1,52 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import EscalationStatus from '~/sidebar/components/incidents/escalation_status.vue';
+import {
+ STATUS_LABELS,
+ STATUS_TRIGGERED,
+ STATUS_ACKNOWLEDGED,
+} from '~/sidebar/components/incidents/constants';
+
+describe('EscalationStatus', () => {
+ let wrapper;
+
+ function createComponent(props) {
+ wrapper = mountExtended(EscalationStatus, {
+ propsData: {
+ value: STATUS_TRIGGERED,
+ ...props,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findDropdownComponent = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+
+ describe('status', () => {
+ it('shows the current status', () => {
+ createComponent({ value: STATUS_ACKNOWLEDGED });
+
+ expect(findDropdownComponent().props('text')).toBe(STATUS_LABELS[STATUS_ACKNOWLEDGED]);
+ });
+
+ it('shows the None option when status is null', () => {
+ createComponent({ value: null });
+
+ expect(findDropdownComponent().props('text')).toBe('None');
+ });
+ });
+
+ describe('events', () => {
+ it('selects an item', async () => {
+ createComponent();
+
+ await findDropdownItems().at(1).vm.$emit('click');
+
+ expect(wrapper.emitted().input[0][0]).toBe(STATUS_ACKNOWLEDGED);
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/incidents/escalation_utils_spec.js b/spec/frontend/sidebar/components/incidents/escalation_utils_spec.js
new file mode 100644
index 00000000000..edd65db0325
--- /dev/null
+++ b/spec/frontend/sidebar/components/incidents/escalation_utils_spec.js
@@ -0,0 +1,18 @@
+import { STATUS_ACKNOWLEDGED } from '~/sidebar/components/incidents/constants';
+import { getStatusLabel } from '~/sidebar/components/incidents/utils';
+
+describe('EscalationUtils', () => {
+ describe('getStatusLabel', () => {
+ it('returns a label when provided with a valid status', () => {
+ const label = getStatusLabel(STATUS_ACKNOWLEDGED);
+
+ expect(label).toEqual('Acknowledged');
+ });
+
+ it("returns 'None' when status is null", () => {
+ const label = getStatusLabel(null);
+
+ expect(label).toEqual('None');
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/incidents/mock_data.js b/spec/frontend/sidebar/components/incidents/mock_data.js
new file mode 100644
index 00000000000..bbb6c61b162
--- /dev/null
+++ b/spec/frontend/sidebar/components/incidents/mock_data.js
@@ -0,0 +1,39 @@
+import { STATUS_TRIGGERED, STATUS_ACKNOWLEDGED } from '~/sidebar/components/incidents/constants';
+
+export const fetchData = {
+ workspace: {
+ __typename: 'Project',
+ id: 'gid://gitlab/Project/2',
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/4',
+ escalationStatus: STATUS_TRIGGERED,
+ },
+ },
+};
+
+export const mutationData = {
+ issueSetEscalationStatus: {
+ __typename: 'IssueSetEscalationStatusPayload',
+ errors: [],
+ clientMutationId: null,
+ issue: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/4',
+ escalationStatus: STATUS_ACKNOWLEDGED,
+ },
+ },
+};
+
+export const fetchError = {
+ workspace: {
+ __typename: 'Project',
+ },
+};
+
+export const mutationError = {
+ issueSetEscalationStatus: {
+ __typename: 'IssueSetEscalationStatusPayload',
+ errors: ['hello'],
+ },
+};
diff --git a/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js b/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
new file mode 100644
index 00000000000..6173088b247
--- /dev/null
+++ b/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
@@ -0,0 +1,202 @@
+import { createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import SidebarEscalationStatus from '~/sidebar/components/incidents/sidebar_escalation_status.vue';
+import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
+import { escalationStatusQuery, escalationStatusMutation } from '~/sidebar/constants';
+import waitForPromises from 'helpers/wait_for_promises';
+import EscalationStatus from 'ee_else_ce/sidebar/components/incidents/escalation_status.vue';
+import { STATUS_ACKNOWLEDGED } from '~/sidebar/components/incidents/constants';
+import { createAlert } from '~/flash';
+import { logError } from '~/lib/logger';
+import { fetchData, fetchError, mutationData, mutationError } from './mock_data';
+
+jest.mock('~/lib/logger');
+jest.mock('~/flash');
+
+const localVue = createLocalVue();
+
+describe('SidebarEscalationStatus', () => {
+ let wrapper;
+ const queryResolverMock = jest.fn();
+ const mutationResolverMock = jest.fn();
+
+ function createMockApolloProvider({ hasFetchError = false, hasMutationError = false } = {}) {
+ localVue.use(VueApollo);
+
+ queryResolverMock.mockResolvedValue({ data: hasFetchError ? fetchError : fetchData });
+ mutationResolverMock.mockResolvedValue({
+ data: hasMutationError ? mutationError : mutationData,
+ });
+
+ const requestHandlers = [
+ [escalationStatusQuery, queryResolverMock],
+ [escalationStatusMutation, mutationResolverMock],
+ ];
+
+ return createMockApollo(requestHandlers);
+ }
+
+ function createComponent({ mockApollo } = {}) {
+ let config;
+
+ if (mockApollo) {
+ config = { apolloProvider: mockApollo };
+ } else {
+ config = { mocks: { $apollo: { queries: { status: { loading: false } } } } };
+ }
+
+ wrapper = mountExtended(SidebarEscalationStatus, {
+ propsData: {
+ iid: '1',
+ projectPath: 'gitlab-org/gitlab',
+ issuableType: 'issue',
+ },
+ provide: {
+ canUpdate: true,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ localVue,
+ ...config,
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findSidebarComponent = () => wrapper.findComponent(SidebarEditableItem);
+ const findStatusComponent = () => wrapper.findComponent(EscalationStatus);
+ const findEditButton = () => wrapper.findByTestId('edit-button');
+ const findIcon = () => wrapper.findByTestId('status-icon');
+
+ const clickEditButton = async () => {
+ findEditButton().vm.$emit('click');
+ await nextTick();
+ };
+ const selectAcknowledgedStatus = async () => {
+ findStatusComponent().vm.$emit('input', STATUS_ACKNOWLEDGED);
+ // wait for apollo requests
+ await waitForPromises();
+ };
+
+ describe('sidebar', () => {
+ it('renders the sidebar component', () => {
+ createComponent();
+ expect(findSidebarComponent().exists()).toBe(true);
+ });
+
+ describe('status icon', () => {
+ it('is visible', () => {
+ createComponent();
+
+ expect(findIcon().exists()).toBe(true);
+ expect(findIcon().isVisible()).toBe(true);
+ });
+
+ it('has correct tooltip', async () => {
+ const mockApollo = createMockApolloProvider();
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+
+ const tooltip = getBinding(findIcon().element, 'gl-tooltip');
+
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toBe('Status: Triggered');
+ });
+ });
+
+ describe('status dropdown', () => {
+ beforeEach(async () => {
+ const mockApollo = createMockApolloProvider();
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+ });
+
+ it('is closed by default', () => {
+ expect(findStatusComponent().exists()).toBe(true);
+ expect(findStatusComponent().isVisible()).toBe(false);
+ });
+
+ it('is shown after clicking the edit button', async () => {
+ await clickEditButton();
+
+ expect(findStatusComponent().isVisible()).toBe(true);
+ });
+
+ it('is hidden after clicking the edit button, when open already', async () => {
+ await clickEditButton();
+ await clickEditButton();
+
+ expect(findStatusComponent().isVisible()).toBe(false);
+ });
+ });
+
+ describe('update Status event', () => {
+ beforeEach(async () => {
+ const mockApollo = createMockApolloProvider();
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+
+ await clickEditButton();
+ await selectAcknowledgedStatus();
+ });
+
+ it('calls the mutation', async () => {
+ const mutationVariables = {
+ iid: '1',
+ projectPath: 'gitlab-org/gitlab',
+ status: STATUS_ACKNOWLEDGED,
+ };
+
+ expect(mutationResolverMock).toHaveBeenCalledWith(mutationVariables);
+ });
+
+ it('closes the dropdown', async () => {
+ expect(findStatusComponent().isVisible()).toBe(false);
+ });
+
+ it('updates the status', async () => {
+ expect(findStatusComponent().attributes('value')).toBe(STATUS_ACKNOWLEDGED);
+ });
+ });
+
+ describe('mutation errors', () => {
+ it('should error upon fetch', async () => {
+ const mockApollo = createMockApolloProvider({ hasFetchError: true });
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalled();
+ expect(logError).toHaveBeenCalled();
+ });
+
+ it('should error upon mutation', async () => {
+ const mockApollo = createMockApolloProvider({ hasMutationError: true });
+ createComponent({ mockApollo });
+
+ // wait for apollo requests
+ await waitForPromises();
+
+ await clickEditButton();
+ await selectAcknowledgedStatus();
+
+ expect(createAlert).toHaveBeenCalled();
+ expect(logError).toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index 30972484a08..fbca00636b6 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -428,7 +428,7 @@ const mockUser1 = {
export const mockUser2 = {
__typename: 'UserCore',
- id: 'gid://gitlab/User/4',
+ id: 'gid://gitlab/User/5',
avatarUrl: '/avatar2',
name: 'rookie',
username: 'rookie',
@@ -457,6 +457,33 @@ export const searchResponse = {
},
};
+export const searchResponseOnMR = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ id: '1',
+ users: {
+ nodes: [
+ {
+ id: 'gid://gitlab/User/1',
+ user: mockUser1,
+ mergeRequestInteraction: {
+ canMerge: true,
+ },
+ },
+ {
+ id: 'gid://gitlab/User/4',
+ user: mockUser2,
+ mergeRequestInteraction: {
+ canMerge: false,
+ },
+ },
+ ],
+ },
+ },
+ },
+};
+
export const projectMembersResponse = {
data: {
workspace: {
diff --git a/spec/frontend/vue_shared/components/user_select_spec.js b/spec/frontend/vue_shared/components/user_select_spec.js
index 411a15e1c74..cb476910944 100644
--- a/spec/frontend/vue_shared/components/user_select_spec.js
+++ b/spec/frontend/vue_shared/components/user_select_spec.js
@@ -1,4 +1,4 @@
-import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui';
+import { GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { cloneDeep } from 'lodash';
import Vue, { nextTick } from 'vue';
@@ -6,11 +6,14 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import searchUsersQuery from '~/graphql_shared/queries/users_search.query.graphql';
-import { ASSIGNEES_DEBOUNCE_DELAY } from '~/sidebar/constants';
+import searchUsersQueryOnMR from '~/graphql_shared/queries/users_search_with_mr_permissions.graphql';
+import { IssuableType } from '~/issues/constants';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import getIssueParticipantsQuery from '~/vue_shared/components/sidebar/queries/get_issue_participants.query.graphql';
import UserSelect from '~/vue_shared/components/user_select/user_select.vue';
import {
searchResponse,
+ searchResponseOnMR,
projectMembersResponse,
participantsQueryResponse,
} from '../../sidebar/mock_data';
@@ -28,7 +31,7 @@ const assignee = {
const mockError = jest.fn().mockRejectedValue('Error!');
const waitForSearch = async () => {
- jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await nextTick();
await waitForPromises();
};
@@ -58,6 +61,7 @@ describe('User select dropdown', () => {
} = {}) => {
fakeApollo = createMockApollo([
[searchUsersQuery, searchQueryHandler],
+ [searchUsersQueryOnMR, jest.fn().mockResolvedValue(searchResponseOnMR)],
[getIssueParticipantsQuery, participantsQueryHandler],
]);
wrapper = shallowMount(UserSelect, {
@@ -76,7 +80,18 @@ describe('User select dropdown', () => {
...props,
},
stubs: {
- GlDropdown,
+ GlDropdown: {
+ template: `
+ <div>
+ <slot name="header"></slot>
+ <slot></slot>
+ <slot name="footer"></slot>
+ </div>
+ `,
+ methods: {
+ hide: jest.fn(),
+ },
+ },
},
});
};
@@ -132,11 +147,19 @@ describe('User select dropdown', () => {
expect(findSelectedParticipants()).toHaveLength(1);
});
+ it('does not render a `Cannot merge` tooltip', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(findUnselectedParticipants().at(0).attributes('title')).toBe('');
+ });
+
describe('when search is empty', () => {
it('renders a merged list of participants and project members', async () => {
createComponent();
await waitForPromises();
- expect(findUnselectedParticipants()).toHaveLength(3);
+
+ expect(findUnselectedParticipants()).toHaveLength(4);
});
it('renders `Unassigned` link with the checkmark when there are no selected users', async () => {
@@ -162,7 +185,7 @@ describe('User select dropdown', () => {
},
});
await waitForPromises();
- findUnassignLink().vm.$emit('click');
+ findUnassignLink().trigger('click');
expect(wrapper.emitted('input')).toEqual([[[]]]);
});
@@ -175,7 +198,7 @@ describe('User select dropdown', () => {
});
await waitForPromises();
- findSelectedParticipants().at(0).vm.$emit('click', new Event('click'));
+ findSelectedParticipants().at(0).trigger('click');
expect(wrapper.emitted('input')).toEqual([[[]]]);
});
@@ -187,8 +210,9 @@ describe('User select dropdown', () => {
});
await waitForPromises();
- findUnselectedParticipants().at(0).vm.$emit('click');
- expect(wrapper.emitted('input')).toEqual([
+ findUnselectedParticipants().at(0).trigger('click');
+
+ expect(wrapper.emitted('input')).toMatchObject([
[
[
{
@@ -214,7 +238,7 @@ describe('User select dropdown', () => {
});
await waitForPromises();
- findUnselectedParticipants().at(0).vm.$emit('click');
+ findUnselectedParticipants().at(0).trigger('click');
expect(wrapper.emitted('input')[0][0]).toHaveLength(2);
});
});
@@ -232,7 +256,7 @@ describe('User select dropdown', () => {
createComponent();
await waitForPromises();
findSearchField().vm.$emit('input', 'roo');
- jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await nextTick();
expect(findParticipantsLoading().exists()).toBe(true);
@@ -273,4 +297,19 @@ describe('User select dropdown', () => {
expect(findEmptySearchResults().exists()).toBe(true);
});
});
+
+ describe('when on merge request sidebar', () => {
+ beforeEach(() => {
+ createComponent({ props: { issuableType: IssuableType.MergeRequest, issuableId: 1 } });
+ return waitForPromises();
+ });
+
+ it('does not render a `Cannot merge` tooltip for a user that has merge permission', () => {
+ expect(findUnselectedParticipants().at(0).attributes('title')).toBe('');
+ });
+
+ it('renders a `Cannot merge` tooltip for a user that does not have merge permission', () => {
+ expect(findUnselectedParticipants().at(1).attributes('title')).toBe('Cannot merge');
+ });
+ });
});
diff --git a/spec/helpers/listbox_helper_spec.rb b/spec/helpers/listbox_helper_spec.rb
index 8935d69d4f7..0a27aa04b37 100644
--- a/spec/helpers/listbox_helper_spec.rb
+++ b/spec/helpers/listbox_helper_spec.rb
@@ -65,10 +65,13 @@ RSpec.describe ListboxHelper do
end
context 'when selected does not match any item' do
- let(:selected) { 'qux' }
+ where(selected: [nil, 'qux'])
- it 'raises an error' do
- expect { subject }.to raise_error(ArgumentError, /cannot find qux/)
+ with_them do
+ it 'selects first item' do
+ expect(subject.at_css('button').content).to eq('Foo')
+ expect(subject.attributes['data-selected'].value).to eq('foo')
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/transaction/context_spec.rb b/spec/lib/gitlab/database/transaction/context_spec.rb
index 37cfc841d48..33a47150060 100644
--- a/spec/lib/gitlab/database/transaction/context_spec.rb
+++ b/spec/lib/gitlab/database/transaction/context_spec.rb
@@ -135,4 +135,24 @@ RSpec.describe Gitlab::Database::Transaction::Context do
it_behaves_like 'logs transaction data'
end
+
+ context 'when there are too many external HTTP requests' do
+ before do
+ allow(::Gitlab::Metrics::Subscribers::ExternalHttp)
+ .to receive(:request_count)
+ .and_return(100)
+ end
+
+ it_behaves_like 'logs transaction data'
+ end
+
+ context 'when there are too many too long external HTTP requests' do
+ before do
+ allow(::Gitlab::Metrics::Subscribers::ExternalHttp)
+ .to receive(:duration)
+ .and_return(5.5)
+ end
+
+ it_behaves_like 'logs transaction data'
+ end
end
diff --git a/spec/lib/gitlab/database/transaction/observer_spec.rb b/spec/lib/gitlab/database/transaction/observer_spec.rb
index e5cc0106c9b..074c18d406e 100644
--- a/spec/lib/gitlab/database/transaction/observer_spec.rb
+++ b/spec/lib/gitlab/database/transaction/observer_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Database::Transaction::Observer do
User.first
expect(transaction_context).to be_a(::Gitlab::Database::Transaction::Context)
- expect(context.keys).to match_array(%i(start_time depth savepoints queries backtraces))
+ expect(context.keys).to match_array(%i(start_time depth savepoints queries backtraces external_http_count_start external_http_duration_start))
expect(context[:depth]).to eq(2)
expect(context[:savepoints]).to eq(1)
expect(context[:queries].length).to eq(1)
@@ -38,6 +38,71 @@ RSpec.describe Gitlab::Database::Transaction::Observer do
expect(context[:backtraces].length).to eq(1)
end
+ describe 'tracking external network requests', :request_store do
+ it 'tracks external requests' do
+ perform_stubbed_external_http_request(duration: 0.25)
+ perform_stubbed_external_http_request(duration: 1.25)
+
+ ActiveRecord::Base.transaction do
+ User.first
+
+ expect(context[:external_http_count_start]).to eq(2)
+ expect(context[:external_http_duration_start]).to eq(1.5)
+
+ perform_stubbed_external_http_request(duration: 1)
+ perform_stubbed_external_http_request(duration: 3)
+
+ expect(transaction_context.external_http_requests_count).to eq 2
+ expect(transaction_context.external_http_requests_duration).to eq 4
+ end
+ end
+
+ context 'when external HTTP requests duration has been exceeded' do
+ it 'logs transaction details including exceeding thresholds' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ hash_including(
+ external_http_requests_count: 2,
+ external_http_requests_duration: 12
+ )
+ )
+
+ ActiveRecord::Base.transaction do
+ User.first
+
+ perform_stubbed_external_http_request(duration: 2)
+ perform_stubbed_external_http_request(duration: 10)
+ end
+ end
+ end
+
+ context 'when external HTTP requests count has been exceeded' do
+ it 'logs transaction details including exceeding thresholds' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ hash_including(external_http_requests_count: 55)
+ )
+
+ ActiveRecord::Base.transaction do
+ User.first
+
+ 55.times { perform_stubbed_external_http_request(duration: 0.01) }
+ end
+ end
+ end
+
+ def perform_stubbed_external_http_request(duration:)
+ ::Gitlab::Metrics::Subscribers::ExternalHttp.new.request(
+ instance_double(
+ 'ActiveSupport::Notifications::Event',
+ payload: {
+ method: 'GET', code: '200', duration: duration,
+ scheme: 'http', host: 'example.gitlab.com', port: 80, path: '/'
+ },
+ time: Time.current
+ )
+ )
+ end
+ end
+
describe '.extract_sql_command' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
index 4f437e57600..50cfa6b64ea 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb
@@ -70,8 +70,8 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::CheckResult do
let(:payload) { { test: 'test' } }
let(:hash) do
{
- status: status,
- payload: payload
+ 'status' => status,
+ 'payload' => payload
}
end
diff --git a/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
index d376dcb5b18..ed11f8ea6bb 100644
--- a/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
+++ b/spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb
@@ -10,10 +10,22 @@ RSpec.describe Gitlab::MergeRequests::Mergeability::ResultsStore do
let(:merge_request) { double }
describe '#read' do
- it 'calls #retrieve on the interface' do
- expect(interface).to receive(:retrieve_check).with(merge_check: merge_check)
+ let(:result_hash) { { 'status' => 'success', 'payload' => {} } }
- results_store.read(merge_check: merge_check)
+ it 'calls #retrieve_check on the interface' do
+ expect(interface).to receive(:retrieve_check).with(merge_check: merge_check).and_return(result_hash)
+
+ cached_result = results_store.read(merge_check: merge_check)
+
+ expect(cached_result.status).to eq(result_hash['status'].to_sym)
+ expect(cached_result.payload).to eq(result_hash['payload'])
+ end
+
+ context 'when #retrieve_check returns nil' do
+ it 'returns nil' do
+ expect(interface).to receive(:retrieve_check).with(merge_check: merge_check).and_return(nil)
+ expect(results_store.read(merge_check: merge_check)).to be_nil
+ end
end
end
diff --git a/spec/lib/gitlab/process_supervisor_spec.rb b/spec/lib/gitlab/process_supervisor_spec.rb
index d264c77d5fb..60b127dadda 100644
--- a/spec/lib/gitlab/process_supervisor_spec.rb
+++ b/spec/lib/gitlab/process_supervisor_spec.rb
@@ -6,7 +6,9 @@ RSpec.describe Gitlab::ProcessSupervisor do
let(:health_check_interval_seconds) { 0.1 }
let(:check_terminate_interval_seconds) { 1 }
let(:forwarded_signals) { [] }
- let(:process_id) do
+ let(:process_ids) { [spawn_process, spawn_process] }
+
+ def spawn_process
Process.spawn('while true; do sleep 1; done').tap do |pid|
Process.detach(pid)
end
@@ -22,54 +24,52 @@ RSpec.describe Gitlab::ProcessSupervisor do
end
after do
- if Gitlab::ProcessManagement.process_alive?(process_id)
- Process.kill('KILL', process_id)
+ process_ids.each do |pid|
+ Process.kill('KILL', pid)
+ rescue Errno::ESRCH
+ # Ignore if a process wasn't actually alive.
end
end
describe '#supervise' do
- context 'while supervised process is alive' do
+ context 'while supervised processes are alive' do
it 'does not invoke callback' do
- expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(true)
+ expect(Gitlab::ProcessManagement.all_alive?(process_ids)).to be(true)
pids_killed = []
- thread = Thread.new do
- supervisor.supervise(process_id) do |dead_pids|
- pids_killed = dead_pids
- []
- end
+ supervisor.supervise(process_ids) do |dead_pids|
+ pids_killed = dead_pids
+ []
end
# Wait several times the poll frequency of the supervisor.
sleep health_check_interval_seconds * 10
- thread.terminate
expect(pids_killed).to be_empty
- expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(true)
+ expect(Gitlab::ProcessManagement.all_alive?(process_ids)).to be(true)
end
end
- context 'when supervised process dies' do
- it 'triggers callback with the dead PIDs' do
- expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(true)
+ context 'when a supervised process dies' do
+ it 'triggers callback with the dead PIDs and adds new PIDs to supervised PIDs' do
+ expect(Gitlab::ProcessManagement.all_alive?(process_ids)).to be(true)
pids_killed = []
- thread = Thread.new do
- supervisor.supervise(process_id) do |dead_pids|
- pids_killed = dead_pids
- []
- end
+ supervisor.supervise(process_ids) do |dead_pids|
+ pids_killed = dead_pids
+ [42] # Fake starting a new process in place of the terminated one.
end
# Terminate the supervised process.
- Process.kill('TERM', process_id)
+ Process.kill('TERM', process_ids.first)
await_condition(sleep_sec: health_check_interval_seconds) do
- pids_killed == [process_id]
+ pids_killed == [process_ids.first]
end
- thread.terminate
- expect(Gitlab::ProcessManagement.process_alive?(process_id)).to be(false)
+ expect(Gitlab::ProcessManagement.process_alive?(process_ids.first)).to be(false)
+ expect(Gitlab::ProcessManagement.process_alive?(process_ids.last)).to be(true)
+ expect(supervisor.supervised_pids).to match_array([process_ids.last, 42])
end
end
@@ -78,7 +78,7 @@ RSpec.describe Gitlab::ProcessSupervisor do
allow(supervisor).to receive(:sleep)
allow(Gitlab::ProcessManagement).to receive(:trap_signals)
allow(Gitlab::ProcessManagement).to receive(:all_alive?).and_return(false)
- allow(Gitlab::ProcessManagement).to receive(:signal_processes).with([process_id], anything)
+ allow(Gitlab::ProcessManagement).to receive(:signal_processes).with(process_ids, anything)
end
context 'termination signals' do
@@ -87,21 +87,21 @@ RSpec.describe Gitlab::ProcessSupervisor do
allow(Gitlab::ProcessManagement).to receive(:any_alive?).and_return(false)
expect(Gitlab::ProcessManagement).to receive(:trap_signals).ordered.with(%i(INT TERM)).and_yield(:TERM)
- expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], :TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :TERM)
expect(supervisor).not_to receive(:sleep).with(check_terminate_interval_seconds)
- supervisor.supervise(process_id) { [] }
+ supervisor.supervise(process_ids) { [] }
end
end
context 'when TERM does not result in timely shutdown of processes' do
it 'issues a KILL signal after the grace period expires' do
expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(INT TERM)).and_yield(:TERM)
- expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], :TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :TERM)
expect(supervisor).to receive(:sleep).ordered.with(check_terminate_interval_seconds).at_least(:once)
- expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], '-KILL')
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, '-KILL')
- supervisor.supervise(process_id) { [] }
+ supervisor.supervise(process_ids) { [] }
end
end
end
@@ -111,10 +111,53 @@ RSpec.describe Gitlab::ProcessSupervisor do
it 'forwards given signals to the observed processes' do
expect(Gitlab::ProcessManagement).to receive(:trap_signals).with(%i(USR1)).and_yield(:USR1)
- expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with([process_id], :USR1)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :USR1)
+
+ supervisor.supervise(process_ids) { [] }
+ end
+ end
+ end
+ end
+
+ describe '#shutdown' do
+ context 'when supervisor is supervising processes' do
+ before do
+ supervisor.supervise(process_ids)
+ end
+
+ context 'when supervisor is alive' do
+ it 'signals TERM then KILL to all supervised processes' do
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, :TERM)
+ expect(Gitlab::ProcessManagement).to receive(:signal_processes).ordered.with(process_ids, '-KILL')
- supervisor.supervise(process_id) { [] }
+ supervisor.shutdown
end
+
+ it 'stops the supervisor' do
+ expect { supervisor.shutdown }.to change { supervisor.alive }.from(true).to(false)
+ end
+ end
+
+ context 'when supervisor has already shut down' do
+ before do
+ supervisor.shutdown
+ end
+
+ it 'does nothing' do
+ expect(supervisor.alive).to be(false)
+ expect(Gitlab::ProcessManagement).not_to receive(:signal_processes)
+
+ supervisor.shutdown
+ end
+ end
+ end
+
+ context 'when supervisor never started' do
+ it 'does nothing' do
+ expect(supervisor.alive).to be(false)
+ expect(Gitlab::ProcessManagement).not_to receive(:signal_processes)
+
+ supervisor.shutdown
end
end
end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index 29e5445cfaa..a81ed38382b 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -334,4 +334,20 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
is_expected.to eq(operation)
end
end
+
+ describe '#revoke_authorizations' do
+ subject { client.revoke_authorizations }
+
+ it 'calls the revoke endpoint' do
+ stub_request(:post, "https://oauth2.googleapis.com/revoke")
+ .with(
+ body: "token=token",
+ headers: {
+ 'Accept' => '*/*',
+ 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
+ 'User-Agent' => 'Ruby'
+ })
+ .to_return(status: 200, body: "", headers: {})
+ end
+ end
end
diff --git a/spec/metrics_server/metrics_server_spec.rb b/spec/metrics_server/metrics_server_spec.rb
index 860a3299d85..591840dcba2 100644
--- a/spec/metrics_server/metrics_server_spec.rb
+++ b/spec/metrics_server/metrics_server_spec.rb
@@ -1,13 +1,10 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require_relative '../../metrics_server/metrics_server'
-require_relative '../support/helpers/next_instance_of'
RSpec.describe MetricsServer do # rubocop:disable RSpec/FilePath
- include NextInstanceOf
-
let(:prometheus_config) { ::Prometheus::Client.configuration }
let(:metrics_dir) { Dir.mktmpdir }
@@ -205,4 +202,47 @@ RSpec.describe MetricsServer do # rubocop:disable RSpec/FilePath
it_behaves_like 'a metrics exporter', 'sidekiq', 'sidekiq_exporter'
end
+
+ describe '.start_for_puma' do
+ let(:supervisor) { instance_double(Gitlab::ProcessSupervisor) }
+
+ before do
+ allow(Gitlab::ProcessSupervisor).to receive(:instance).and_return(supervisor)
+ end
+
+ it 'spawns a server process and supervises it' do
+ expect(Process).to receive(:spawn).with(
+ include('METRICS_SERVER_TARGET' => 'puma'), end_with('bin/metrics-server'), anything
+ ).once.and_return(42)
+ expect(supervisor).to receive(:supervise).with(42)
+
+ described_class.start_for_puma
+ end
+
+ context 'when the supervisor callback is invoked' do
+ context 'and the supervisor is alive' do
+ it 'restarts the metrics server' do
+ expect(supervisor).to receive(:alive).and_return(true)
+ expect(supervisor).to receive(:supervise).and_yield
+ expect(Process).to receive(:spawn).with(
+ include('METRICS_SERVER_TARGET' => 'puma'), end_with('bin/metrics-server'), anything
+ ).twice.and_return(42)
+
+ described_class.start_for_puma
+ end
+ end
+
+ context 'and the supervisor is not alive' do
+ it 'does not restart the server' do
+ expect(supervisor).to receive(:alive).and_return(false)
+ expect(supervisor).to receive(:supervise).and_yield
+ expect(Process).to receive(:spawn).with(
+ include('METRICS_SERVER_TARGET' => 'puma'), end_with('bin/metrics-server'), anything
+ ).once.and_return(42)
+
+ described_class.start_for_puma
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb b/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb
new file mode 100644
index 00000000000..07590d3710e
--- /dev/null
+++ b/spec/requests/projects/google_cloud/revoke_oauth_controller_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::GoogleCloud::RevokeOauthController do
+ include SessionHelpers
+
+ describe 'POST #create', :snowplow, :clean_gitlab_redis_sessions, :aggregate_failures do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:url) { project_google_cloud_revoke_oauth_index_path(project).to_s }
+
+ let(:user) { project.creator }
+
+ before do
+ sign_in(user)
+
+ stub_session(GoogleApi::CloudPlatform::Client.session_key_for_token => 'token')
+
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ allow(client).to receive(:validate_token).and_return(true)
+ end
+ end
+
+ context 'when GCP token is invalid' do
+ before do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ allow(client).to receive(:validate_token).and_return(false)
+ end
+ end
+
+ it 'redirects to Google OAuth2 authorize URL' do
+ sign_in(user)
+
+ post url
+
+ expect(response).to redirect_to(assigns(:authorize_url))
+ end
+ end
+
+ context 'when revocation is successful' do
+ before do
+ stub_request(:post, "https://oauth2.googleapis.com/revoke")
+ .to_return(status: 200, body: "", headers: {})
+ end
+
+ it 'calls revoke endpoint and redirects' do
+ post url
+
+ expect(request.session[GoogleApi::CloudPlatform::Client.session_key_for_token]).to be_nil
+ expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(flash[:notice]).to eq('Google OAuth2 token revocation requested')
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'revoke_oauth#create',
+ label: 'create',
+ property: 'success',
+ project: project,
+ user: user
+ )
+ end
+ end
+
+ context 'when revocation fails' do
+ before do
+ stub_request(:post, "https://oauth2.googleapis.com/revoke")
+ .to_return(status: 400, body: "", headers: {})
+ end
+
+ it 'calls revoke endpoint and redirects' do
+ post url
+
+ expect(request.session[GoogleApi::CloudPlatform::Client.session_key_for_token]).to be_nil
+ expect(response).to redirect_to(project_google_cloud_index_path(project))
+ expect(flash[:alert]).to eq('Google OAuth2 token revocation request failed')
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'revoke_oauth#create',
+ label: 'create',
+ property: 'failed',
+ project: project,
+ user: user
+ )
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/google_cloud_controller_spec.rb b/spec/requests/projects/google_cloud_controller_spec.rb
index 943c70e01bc..d0814990989 100644
--- a/spec/requests/projects/google_cloud_controller_spec.rb
+++ b/spec/requests/projects/google_cloud_controller_spec.rb
@@ -141,6 +141,38 @@ RSpec.describe Projects::GoogleCloudController do
)
end
end
+
+ context 'but google oauth2 token is not valid' do
+ it 'does not return revoke oauth url' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ allow(client).to receive(:validate_token).and_return(false)
+ end
+
+ sign_in(user)
+
+ get url
+
+ expect(response).to be_successful
+ expect_snowplow_event(
+ category: 'Projects::GoogleCloud',
+ action: 'google_cloud#index',
+ label: 'index',
+ extra: {
+ screen: 'home',
+ serviceAccounts: [],
+ createServiceAccountUrl: project_google_cloud_service_accounts_path(project),
+ enableCloudRunUrl: project_google_cloud_deployments_cloud_run_path(project),
+ enableCloudStorageUrl: project_google_cloud_deployments_cloud_storage_path(project),
+ emptyIllustrationUrl: ActionController::Base.helpers.image_path('illustrations/pipelines_empty.svg'),
+ configureGcpRegionsUrl: project_google_cloud_gcp_regions_path(project),
+ gcpRegions: [],
+ revokeOauthUrl: nil
+ },
+ project: project,
+ user: user
+ )
+ end
+ end
end
end
end
diff --git a/spec/services/issue_links/create_service_spec.rb b/spec/services/issue_links/create_service_spec.rb
index 1bca717acb7..9cb5980716a 100644
--- a/spec/services/issue_links/create_service_spec.rb
+++ b/spec/services/issue_links/create_service_spec.rb
@@ -4,180 +4,42 @@ require 'spec_helper'
RSpec.describe IssueLinks::CreateService do
describe '#execute' do
- let(:namespace) { create :namespace }
- let(:project) { create :project, namespace: namespace }
- let(:issue) { create :issue, project: project }
- let(:user) { create :user }
- let(:params) do
- {}
- end
+ let_it_be(:user) { create :user }
+ let_it_be(:namespace) { create :namespace }
+ let_it_be(:project) { create :project, namespace: namespace }
+ let_it_be(:issuable) { create :issue, project: project }
+ let_it_be(:issuable2) { create :issue, project: project }
+ let_it_be(:guest_issuable) { create :issue }
+ let_it_be(:another_project) { create :project, namespace: project.namespace }
+ let_it_be(:issuable3) { create :issue, project: another_project }
+ let_it_be(:issuable_a) { create :issue, project: project }
+ let_it_be(:issuable_b) { create :issue, project: project }
+ let_it_be(:issuable_link) { create :issue_link, source: issuable, target: issuable_b, link_type: IssueLink::TYPE_RELATES_TO }
+
+ let(:issuable_parent) { issuable.project }
+ let(:issuable_type) { :issue }
+ let(:issuable_link_class) { IssueLink }
+ let(:params) { {} }
before do
project.add_developer(user)
+ guest_issuable.project.add_guest(user)
+ another_project.add_developer(user)
end
- subject { described_class.new(issue, user, params).execute }
+ it_behaves_like 'issuable link creation'
- context 'when the reference list is empty' do
- let(:params) do
- { issuable_references: [] }
- end
+ context 'when target is an incident' do
+ let_it_be(:issue) { create(:incident, project: project) }
- it 'returns error' do
- is_expected.to eq(message: 'No matching issue found. Make sure that you are adding a valid issue URL.', status: :error, http_status: 404)
- end
- end
-
- context 'when Issue not found' do
let(:params) do
- { issuable_references: ["##{non_existing_record_iid}"] }
- end
-
- it 'returns error' do
- is_expected.to eq(message: 'No matching issue found. Make sure that you are adding a valid issue URL.', status: :error, http_status: 404)
+ { issuable_references: [issuable2.to_reference, issuable3.to_reference(another_project)] }
end
- it 'no relationship is created' do
- expect { subject }.not_to change(IssueLink, :count)
- end
- end
-
- context 'when user has no permission to target project Issue' do
- let(:target_issuable) { create :issue }
-
- let(:params) do
- { issuable_references: [target_issuable.to_reference(project)] }
- end
-
- it 'returns error' do
- target_issuable.project.add_guest(user)
-
- is_expected.to eq(message: 'No matching issue found. Make sure that you are adding a valid issue URL.', status: :error, http_status: 404)
- end
-
- it 'no relationship is created' do
- expect { subject }.not_to change(IssueLink, :count)
- end
- end
-
- context 'source and target are the same issue' do
- let(:params) do
- { issuable_references: [issue.to_reference] }
- end
-
- it 'does not create notes' do
- expect(SystemNoteService).not_to receive(:relate_issue)
-
- subject
- end
-
- it 'no relationship is created' do
- expect { subject }.not_to change(IssueLink, :count)
- end
- end
-
- context 'when there is an issue to relate' do
- let(:issue_a) { create :issue, project: project }
- let(:another_project) { create :project, namespace: project.namespace }
- let(:another_project_issue) { create :issue, project: another_project }
-
- let(:issue_a_ref) { issue_a.to_reference }
- let(:another_project_issue_ref) { another_project_issue.to_reference(project) }
-
- let(:params) do
- { issuable_references: [issue_a_ref, another_project_issue_ref] }
- end
-
- before do
- another_project.add_developer(user)
- end
-
- it 'creates relationships' do
- expect { subject }.to change(IssueLink, :count).from(0).to(2)
-
- expect(IssueLink.find_by!(target: issue_a)).to have_attributes(source: issue, link_type: 'relates_to')
- expect(IssueLink.find_by!(target: another_project_issue)).to have_attributes(source: issue, link_type: 'relates_to')
- end
-
- it 'returns success status' do
- is_expected.to eq(status: :success)
- end
-
- it 'creates notes' do
- # First two-way relation notes
- expect(SystemNoteService).to receive(:relate_issue)
- .with(issue, issue_a, user)
- expect(SystemNoteService).to receive(:relate_issue)
- .with(issue_a, issue, user)
-
- # Second two-way relation notes
- expect(SystemNoteService).to receive(:relate_issue)
- .with(issue, another_project_issue, user)
- expect(SystemNoteService).to receive(:relate_issue)
- .with(another_project_issue, issue, user)
-
- subject
- end
-
- context 'issue is an incident' do
- let(:issue) { create(:incident, project: project) }
-
- it_behaves_like 'an incident management tracked event', :incident_management_incident_relate do
- let(:current_user) { user }
- end
- end
- end
-
- context 'when reference of any already related issue is present' do
- let(:issue_a) { create :issue, project: project }
- let(:issue_b) { create :issue, project: project }
- let(:issue_c) { create :issue, project: project }
-
- before do
- create :issue_link, source: issue, target: issue_b, link_type: IssueLink::TYPE_RELATES_TO
- create :issue_link, source: issue, target: issue_c, link_type: IssueLink::TYPE_RELATES_TO
- end
-
- let(:params) do
- {
- issuable_references: [
- issue_a.to_reference,
- issue_b.to_reference,
- issue_c.to_reference
- ],
- link_type: IssueLink::TYPE_RELATES_TO
- }
- end
-
- it 'creates notes only for new relations' do
- expect(SystemNoteService).to receive(:relate_issue).with(issue, issue_a, anything)
- expect(SystemNoteService).to receive(:relate_issue).with(issue_a, issue, anything)
- expect(SystemNoteService).not_to receive(:relate_issue).with(issue, issue_b, anything)
- expect(SystemNoteService).not_to receive(:relate_issue).with(issue_b, issue, anything)
- expect(SystemNoteService).not_to receive(:relate_issue).with(issue, issue_c, anything)
- expect(SystemNoteService).not_to receive(:relate_issue).with(issue_c, issue, anything)
-
- subject
- end
- end
-
- context 'when there are invalid references' do
- let(:issue_a) { create :issue, project: project }
-
- let(:params) do
- { issuable_references: [issue.to_reference, issue_a.to_reference] }
- end
-
- it 'creates links only for valid references' do
- expect { subject }.to change { IssueLink.count }.by(1)
- end
+ subject { described_class.new(issue, user, params).execute }
- it 'returns error status' do
- expect(subject).to eq(
- status: :error,
- http_status: 422,
- message: "#{issue.to_reference} cannot be added: cannot be related to itself"
- )
+ it_behaves_like 'an incident management tracked event', :incident_management_incident_relate do
+ let(:current_user) { user }
end
end
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index e0528aace5a..c322ec35e86 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -100,7 +100,7 @@ RSpec.describe SystemNoteService do
end
end
- describe '.relate_issue' do
+ describe '.relate_issuable' do
let(:noteable_ref) { double }
let(:noteable) { double }
@@ -110,10 +110,10 @@ RSpec.describe SystemNoteService do
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
- expect(service).to receive(:relate_issue).with(noteable_ref)
+ expect(service).to receive(:relate_issuable).with(noteable_ref)
end
- described_class.relate_issue(noteable, noteable_ref, double)
+ described_class.relate_issuable(noteable, noteable_ref, double)
end
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 767f5d10c4b..5bc7ea82976 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -14,10 +14,10 @@ RSpec.describe ::SystemNotes::IssuablesService do
let(:service) { described_class.new(noteable: noteable, project: project, author: author) }
- describe '#relate_issue' do
+ describe '#relate_issuable' do
let(:noteable_ref) { create(:issue) }
- subject { service.relate_issue(noteable_ref) }
+ subject { service.relate_issuable(noteable_ref) }
it_behaves_like 'a system note' do
let(:action) { 'relate' }
diff --git a/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb b/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb
index b49d4da8cda..ecd174edec9 100644
--- a/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb
+++ b/spec/support/matchers/pushed_frontend_feature_flags_matcher.rb
@@ -5,15 +5,19 @@ RSpec::Matchers.define :have_pushed_frontend_feature_flags do |expected|
"\"#{key}\":#{value}"
end
+ def html(actual)
+ actual.try(:html) || actual
+ end
+
match do |actual|
expected.all? do |feature_flag_name, enabled|
- page.html.include?(to_js(feature_flag_name, enabled))
+ html(actual).include?(to_js(feature_flag_name, enabled))
end
end
failure_message do |actual|
missing = expected.select do |feature_flag_name, enabled|
- !page.html.include?(to_js(feature_flag_name, enabled))
+ !html(actual).include?(to_js(feature_flag_name, enabled))
end
formatted_missing_flags = missing.map { |feature_flag_name, enabled| to_js(feature_flag_name, enabled) }.join("\n")
diff --git a/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb b/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
new file mode 100644
index 00000000000..bbde448a1a1
--- /dev/null
+++ b/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'multiple assignees widget merge request' do |action, save_button_title|
+ it "#{action} a MR with multiple assignees", :js do
+ find('.js-assignee-search').click
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ click_link user2.name
+ end
+
+ # Extra click needed in order to toggle the dropdown
+ find('.js-assignee-search').click
+
+ expect(all('input[name="merge_request[assignee_ids][]"]', visible: false).map(&:value))
+ .to match_array([user.id.to_s, user2.id.to_s])
+
+ page.within '.js-assignee-search' do
+ expect(page).to have_content "#{user2.name} + 1 more"
+ end
+
+ click_button save_button_title
+
+ page.within '.issuable-sidebar' do
+ page.within '.assignee' do
+ expect(page).to have_content '2 Assignees'
+
+ click_button('Edit')
+
+ expect(page).to have_content user.name
+ expect(page).to have_content user2.name
+ end
+ end
+
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ end
+
+ page.within '.issuable-sidebar' do
+ page.within '.assignee' do
+ # Closing dropdown to persist
+ click_button('Apply')
+
+ expect(page).to have_content user2.name
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index bc5956e3eec..b3f79d9fe6e 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -599,36 +599,13 @@ RSpec.shared_examples 'wiki model' do
context 'when repository is empty' do
let(:wiki_container) { wiki_container_without_repo }
- it 'changes the HEAD reference to the default branch' do
- wiki.repository.create_if_not_exists
- wiki.repository.raw_repository.write_ref('HEAD', 'refs/heads/bar')
+ it 'creates the repository with the default branch' do
+ wiki.repository.create_if_not_exists(default_branch)
subject
expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
end
end
-
- context 'when repository is not empty' do
- before do
- wiki.create_page('index', 'test content')
- end
-
- it 'does nothing when HEAD points to the right branch' do
- expect(wiki.repository.raw_repository).not_to receive(:write_ref)
-
- subject
- end
-
- context 'when HEAD points to the wrong branch' do
- it 'rewrites HEAD with the right branch' do
- wiki.repository.raw_repository.write_ref('HEAD', 'refs/heads/bar')
-
- subject
-
- expect(File.read(head_path).squish).to eq "ref: refs/heads/#{default_branch}"
- end
- end
- end
end
end
diff --git a/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
new file mode 100644
index 00000000000..6146aae6b9b
--- /dev/null
+++ b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+shared_examples 'issuable link creation' do
+ describe '#execute' do
+ subject { described_class.new(issuable, user, params).execute }
+
+ context 'when the reference list is empty' do
+ let(:params) do
+ { issuable_references: [] }
+ end
+
+ it 'returns error' do
+ is_expected.to eq(message: "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL.", status: :error, http_status: 404)
+ end
+ end
+
+ context 'when Issuable not found' do
+ let(:params) do
+ { issuable_references: ["##{non_existing_record_iid}"] }
+ end
+
+ it 'returns error' do
+ is_expected.to eq(message: "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL.", status: :error, http_status: 404)
+ end
+
+ it 'no relationship is created' do
+ expect { subject }.not_to change(issuable_link_class, :count)
+ end
+ end
+
+ context 'when user has no permission to target issuable' do
+ let(:params) do
+ { issuable_references: [guest_issuable.to_reference(issuable_parent)] }
+ end
+
+ it 'returns error' do
+ is_expected.to eq(message: "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} URL.", status: :error, http_status: 404)
+ end
+
+ it 'no relationship is created' do
+ expect { subject }.not_to change(issuable_link_class, :count)
+ end
+ end
+
+ context 'source and target are the same issuable' do
+ let(:params) do
+ { issuable_references: [issuable.to_reference] }
+ end
+
+ it 'does not create notes' do
+ expect(SystemNoteService).not_to receive(:relate_issuable)
+
+ subject
+ end
+
+ it 'no relationship is created' do
+ expect { subject }.not_to change(issuable_link_class, :count)
+ end
+ end
+
+ context 'when there is an issuable to relate' do
+ let(:params) do
+ { issuable_references: [issuable2.to_reference, issuable3.to_reference(issuable_parent)] }
+ end
+
+ it 'creates relationships' do
+ expect { subject }.to change(issuable_link_class, :count).by(2)
+
+ expect(issuable_link_class.find_by!(target: issuable2)).to have_attributes(source: issuable, link_type: 'relates_to')
+ expect(issuable_link_class.find_by!(target: issuable3)).to have_attributes(source: issuable, link_type: 'relates_to')
+ end
+
+ it 'returns success status' do
+ is_expected.to eq(status: :success)
+ end
+
+ it 'creates notes' do
+ # First two-way relation notes
+ expect(SystemNoteService).to receive(:relate_issuable)
+ .with(issuable, issuable2, user)
+ expect(SystemNoteService).to receive(:relate_issuable)
+ .with(issuable2, issuable, user)
+
+ # Second two-way relation notes
+ expect(SystemNoteService).to receive(:relate_issuable)
+ .with(issuable, issuable3, user)
+ expect(SystemNoteService).to receive(:relate_issuable)
+ .with(issuable3, issuable, user)
+
+ subject
+ end
+ end
+
+ context 'when reference of any already related issue is present' do
+ let(:params) do
+ {
+ issuable_references: [
+ issuable_a.to_reference,
+ issuable_b.to_reference
+ ],
+ link_type: IssueLink::TYPE_RELATES_TO
+ }
+ end
+
+ it 'creates notes only for new relations' do
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable, issuable_a, anything)
+ expect(SystemNoteService).to receive(:relate_issuable).with(issuable_a, issuable, anything)
+ expect(SystemNoteService).not_to receive(:relate_issuable).with(issuable, issuable_b, anything)
+ expect(SystemNoteService).not_to receive(:relate_issuable).with(issuable_b, issuable, anything)
+
+ subject
+ end
+ end
+
+ context 'when there are invalid references' do
+ let(:params) do
+ { issuable_references: [issuable.to_reference, issuable_a.to_reference] }
+ end
+
+ it 'creates links only for valid references' do
+ expect { subject }.to change { issuable_link_class.count }.by(1)
+ end
+
+ it 'returns error status' do
+ expect(subject).to eq(
+ status: :error,
+ http_status: 422,
+ message: "#{issuable.to_reference} cannot be added: cannot be related to itself"
+ )
+ end
+ end
+ end
+end
diff --git a/workhorse/internal/upload/artifacts_store_test.go b/workhorse/internal/upload/artifacts_store_test.go
index 0e27efa7189..97e66fc37a4 100644
--- a/workhorse/internal/upload/artifacts_store_test.go
+++ b/workhorse/internal/upload/artifacts_store_test.go
@@ -17,8 +17,8 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
func createTestZipArchive(t *testing.T) (data []byte, md5Hash string) {
diff --git a/workhorse/internal/upload/artifacts_upload_test.go b/workhorse/internal/upload/artifacts_upload_test.go
index a41fe04af99..0a9e4ef3869 100644
--- a/workhorse/internal/upload/artifacts_upload_test.go
+++ b/workhorse/internal/upload/artifacts_upload_test.go
@@ -19,10 +19,10 @@ import (
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
@@ -60,7 +60,7 @@ func testArtifactsUploadServer(t *testing.T, authResponse *api.Response, bodyPro
w.Write(data)
})
mux.HandleFunc(Path, func(w http.ResponseWriter, r *http.Request) {
- opts, err := filestore.GetOpts(authResponse)
+ opts, err := destination.GetOpts(authResponse)
require.NoError(t, err)
if r.Method != "POST" {
diff --git a/workhorse/internal/upload/artifacts_uploader.go b/workhorse/internal/upload/artifacts_uploader.go
index 5a808b8de23..2a91a05fe3d 100644
--- a/workhorse/internal/upload/artifacts_uploader.go
+++ b/workhorse/internal/upload/artifacts_uploader.go
@@ -16,8 +16,8 @@ import (
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
)
@@ -35,7 +35,7 @@ var zipSubcommandsErrorsCounter = promauto.NewCounterVec(
}, []string{"error"})
type artifactsUploadProcessor struct {
- opts *filestore.SaveFileOpts
+ opts *destination.UploadOpts
format string
SavedFileTracker
@@ -57,11 +57,11 @@ func Artifacts(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
}, "/authorize")
}
-func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context, file *filestore.FileHandler) (*filestore.FileHandler, error) {
+func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context, file *destination.FileHandler) (*destination.FileHandler, error) {
metaReader, metaWriter := io.Pipe()
defer metaWriter.Close()
- metaOpts := &filestore.SaveFileOpts{
+ metaOpts := &destination.UploadOpts{
LocalTempPath: a.opts.LocalTempPath,
TempFilePrefix: "metadata.gz",
}
@@ -86,12 +86,12 @@ func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context,
type saveResult struct {
error
- *filestore.FileHandler
+ *destination.FileHandler
}
done := make(chan saveResult)
go func() {
var result saveResult
- result.FileHandler, result.error = filestore.SaveFileFromReader(ctx, metaReader, -1, metaOpts)
+ result.FileHandler, result.error = destination.Upload(ctx, metaReader, -1, metaOpts)
done <- result
}()
@@ -119,7 +119,7 @@ func (a *artifactsUploadProcessor) generateMetadataFromZip(ctx context.Context,
return result.FileHandler, result.error
}
-func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName string, file *filestore.FileHandler, writer *multipart.Writer) error {
+func (a *artifactsUploadProcessor) ProcessFile(ctx context.Context, formName string, file *destination.FileHandler, writer *multipart.Writer) error {
// ProcessFile for artifacts requires file form-data field name to eq `file`
if formName != "file" {
diff --git a/workhorse/internal/upload/body_uploader.go b/workhorse/internal/upload/body_uploader.go
index e9d372b9c47..d831f9f43a1 100644
--- a/workhorse/internal/upload/body_uploader.go
+++ b/workhorse/internal/upload/body_uploader.go
@@ -8,8 +8,8 @@ import (
"strings"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
// RequestBody is a request middleware. It will store the request body to
@@ -23,7 +23,7 @@ func RequestBody(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
return
}
- fh, err := filestore.SaveFileFromReader(r.Context(), r.Body, r.ContentLength, opts)
+ fh, err := destination.Upload(r.Context(), r.Body, r.ContentLength, opts)
if err != nil {
helper.Fail500(w, r, fmt.Errorf("RequestBody: upload failed: %v", err))
return
diff --git a/workhorse/internal/upload/body_uploader_test.go b/workhorse/internal/upload/body_uploader_test.go
index b3d561ac131..47490db8780 100644
--- a/workhorse/internal/upload/body_uploader_test.go
+++ b/workhorse/internal/upload/body_uploader_test.go
@@ -15,8 +15,8 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
const (
@@ -169,8 +169,8 @@ type alwaysLocalPreparer struct {
prepareError error
}
-func (a *alwaysLocalPreparer) Prepare(_ *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
- opts, err := filestore.GetOpts(&api.Response{TempPath: os.TempDir()})
+func (a *alwaysLocalPreparer) Prepare(_ *api.Response) (*destination.UploadOpts, Verifier, error) {
+ opts, err := destination.GetOpts(&api.Response{TempPath: os.TempDir()})
if err != nil {
return nil, nil, err
}
@@ -180,7 +180,7 @@ func (a *alwaysLocalPreparer) Prepare(_ *api.Response) (*filestore.SaveFileOpts,
type alwaysFailsVerifier struct{}
-func (alwaysFailsVerifier) Verify(handler *filestore.FileHandler) error {
+func (alwaysFailsVerifier) Verify(handler *destination.FileHandler) error {
return fmt.Errorf("Verification failed")
}
@@ -188,7 +188,7 @@ type mockVerifier struct {
invoked bool
}
-func (m *mockVerifier) Verify(handler *filestore.FileHandler) error {
+func (m *mockVerifier) Verify(handler *destination.FileHandler) error {
m.invoked = true
return nil
diff --git a/workhorse/internal/filestore/file_handler.go b/workhorse/internal/upload/destination/destination.go
index dac8d4d6247..df0d67aea30 100644
--- a/workhorse/internal/filestore/file_handler.go
+++ b/workhorse/internal/upload/destination/destination.go
@@ -1,4 +1,4 @@
-package filestore
+package destination
import (
"context"
@@ -14,8 +14,9 @@ import (
"gitlab.com/gitlab-org/labkit/log"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/filestore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
)
type SizeError error
@@ -107,9 +108,9 @@ type consumer interface {
Consume(context.Context, io.Reader, time.Time) (int64, error)
}
-// SaveFileFromReader persists the provided reader content to all the location specified in opts. A cleanup will be performed once ctx is Done
+// Upload persists the provided reader content to all the location specified in opts. A cleanup will be performed once ctx is Done
// Make sure the provided context will not expire before finalizing upload with GitLab Rails.
-func SaveFileFromReader(ctx context.Context, reader io.Reader, size int64, opts *SaveFileOpts) (*FileHandler, error) {
+func Upload(ctx context.Context, reader io.Reader, size int64, opts *UploadOpts) (*FileHandler, error) {
fh := &FileHandler{
Name: opts.TempFilePrefix,
RemoteID: opts.RemoteID,
@@ -126,7 +127,7 @@ func SaveFileFromReader(ctx context.Context, reader io.Reader, size int64, opts
switch {
case opts.IsLocal():
clientMode = "local"
- uploadDestination, err = fh.uploadLocalFile(ctx, opts)
+ uploadDestination, err = fh.newLocalFile(ctx, opts)
case opts.UseWorkhorseClientEnabled() && opts.ObjectStorageConfig.IsGoCloud():
clientMode = fmt.Sprintf("go_cloud:%s", opts.ObjectStorageConfig.Provider)
p := &objectstore.GoCloudObjectParams{
@@ -210,16 +211,16 @@ func SaveFileFromReader(ctx context.Context, reader io.Reader, size int64, opts
return fh, nil
}
-func (fh *FileHandler) uploadLocalFile(ctx context.Context, opts *SaveFileOpts) (consumer, error) {
+func (fh *FileHandler) newLocalFile(ctx context.Context, opts *UploadOpts) (consumer, error) {
// make sure TempFolder exists
err := os.MkdirAll(opts.LocalTempPath, 0700)
if err != nil {
- return nil, fmt.Errorf("uploadLocalFile: mkdir %q: %v", opts.LocalTempPath, err)
+ return nil, fmt.Errorf("newLocalFile: mkdir %q: %v", opts.LocalTempPath, err)
}
file, err := ioutil.TempFile(opts.LocalTempPath, opts.TempFilePrefix)
if err != nil {
- return nil, fmt.Errorf("uploadLocalFile: create file: %v", err)
+ return nil, fmt.Errorf("newLocalFile: create file: %v", err)
}
go func() {
@@ -228,32 +229,5 @@ func (fh *FileHandler) uploadLocalFile(ctx context.Context, opts *SaveFileOpts)
}()
fh.LocalPath = file.Name()
- return &localUpload{file}, nil
-}
-
-type localUpload struct{ io.WriteCloser }
-
-func (loc *localUpload) Consume(_ context.Context, r io.Reader, _ time.Time) (int64, error) {
- n, err := io.Copy(loc.WriteCloser, r)
- errClose := loc.Close()
- if err == nil {
- err = errClose
- }
- return n, err
-}
-
-// SaveFileFromDisk open the local file fileName and calls SaveFileFromReader
-func SaveFileFromDisk(ctx context.Context, fileName string, opts *SaveFileOpts) (fh *FileHandler, err error) {
- file, err := os.Open(fileName)
- if err != nil {
- return nil, err
- }
- defer file.Close()
-
- fi, err := file.Stat()
- if err != nil {
- return nil, err
- }
-
- return SaveFileFromReader(ctx, file, fi.Size(), opts)
+ return &filestore.LocalFile{File: file}, nil
}
diff --git a/workhorse/internal/filestore/file_handler_test.go b/workhorse/internal/upload/destination/destination_test.go
index 2fd034bb761..ddf0ea24d60 100644
--- a/workhorse/internal/filestore/file_handler_test.go
+++ b/workhorse/internal/upload/destination/destination_test.go
@@ -1,4 +1,4 @@
-package filestore_test
+package destination_test
import (
"context"
@@ -17,13 +17,13 @@ import (
"gocloud.dev/blob"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
func testDeadline() time.Time {
- return time.Now().Add(filestore.DefaultObjectStoreTimeout)
+ return time.Now().Add(destination.DefaultObjectStoreTimeout)
}
func requireFileGetsRemovedAsync(t *testing.T, filePath string) {
@@ -39,7 +39,7 @@ func requireObjectStoreDeletedAsync(t *testing.T, expectedDeletes int, osStub *t
require.Eventually(t, func() bool { return osStub.DeletesCnt() == expectedDeletes }, time.Second, time.Millisecond, "Object not deleted")
}
-func TestSaveFileWrongSize(t *testing.T) {
+func TestUploadWrongSize(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
@@ -47,15 +47,15 @@ func TestSaveFileWrongSize(t *testing.T) {
require.NoError(t, err)
defer os.RemoveAll(tmpFolder)
- opts := &filestore.SaveFileOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file"}
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, opts)
+ opts := &destination.UploadOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file"}
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize+1, opts)
require.Error(t, err)
- _, isSizeError := err.(filestore.SizeError)
+ _, isSizeError := err.(destination.SizeError)
require.True(t, isSizeError, "Should fail with SizeError")
require.Nil(t, fh)
}
-func TestSaveFileWithKnownSizeExceedLimit(t *testing.T) {
+func TestUploadWithKnownSizeExceedLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
@@ -63,15 +63,15 @@ func TestSaveFileWithKnownSizeExceedLimit(t *testing.T) {
require.NoError(t, err)
defer os.RemoveAll(tmpFolder)
- opts := &filestore.SaveFileOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
+ opts := &destination.UploadOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
require.Error(t, err)
- _, isSizeError := err.(filestore.SizeError)
+ _, isSizeError := err.(destination.SizeError)
require.True(t, isSizeError, "Should fail with SizeError")
require.Nil(t, fh)
}
-func TestSaveFileWithUnknownSizeExceedLimit(t *testing.T) {
+func TestUploadWithUnknownSizeExceedLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
@@ -79,22 +79,13 @@ func TestSaveFileWithUnknownSizeExceedLimit(t *testing.T) {
require.NoError(t, err)
defer os.RemoveAll(tmpFolder)
- opts := &filestore.SaveFileOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), -1, opts)
- require.Equal(t, err, filestore.ErrEntityTooLarge)
+ opts := &destination.UploadOpts{LocalTempPath: tmpFolder, TempFilePrefix: "test-file", MaximumSize: test.ObjectSize - 1}
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), -1, opts)
+ require.Equal(t, err, destination.ErrEntityTooLarge)
require.Nil(t, fh)
}
-func TestSaveFromDiskNotExistingFile(t *testing.T) {
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
- fh, err := filestore.SaveFileFromDisk(ctx, "/I/do/not/exist", &filestore.SaveFileOpts{})
- require.Error(t, err, "SaveFileFromDisk should fail")
- require.True(t, os.IsNotExist(err), "Provided file should not exists")
- require.Nil(t, fh, "On error FileHandler should be nil")
-}
-
-func TestSaveFileWrongETag(t *testing.T) {
+func TestUploadWrongETag(t *testing.T) {
tests := []struct {
name string
multipart bool
@@ -110,7 +101,7 @@ func TestSaveFileWrongETag(t *testing.T) {
objectURL := ts.URL + test.ObjectPath
- opts := &filestore.SaveFileOpts{
+ opts := &destination.UploadOpts{
RemoteID: "test-file",
RemoteURL: objectURL,
PresignedPut: objectURL + "?Signature=ASignature",
@@ -126,7 +117,7 @@ func TestSaveFileWrongETag(t *testing.T) {
osStub.InitiateMultipartUpload(test.ObjectPath)
}
ctx, cancel := context.WithCancel(context.Background())
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, opts)
require.Nil(t, fh)
require.Error(t, err)
require.Equal(t, 1, osStub.PutsCnt(), "File not uploaded")
@@ -138,32 +129,7 @@ func TestSaveFileWrongETag(t *testing.T) {
}
}
-func TestSaveFileFromDiskToLocalPath(t *testing.T) {
- f, err := ioutil.TempFile("", "workhorse-test")
- require.NoError(t, err)
- defer os.Remove(f.Name())
-
- _, err = fmt.Fprint(f, test.ObjectContent)
- require.NoError(t, err)
-
- tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
- require.NoError(t, err)
- defer os.RemoveAll(tmpFolder)
-
- ctx, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- opts := &filestore.SaveFileOpts{LocalTempPath: tmpFolder}
- fh, err := filestore.SaveFileFromDisk(ctx, f.Name(), opts)
- require.NoError(t, err)
- require.NotNil(t, fh)
-
- require.NotEmpty(t, fh.LocalPath, "File not persisted on disk")
- _, err = os.Stat(fh.LocalPath)
- require.NoError(t, err)
-}
-
-func TestSaveFile(t *testing.T) {
+func TestUpload(t *testing.T) {
testhelper.ConfigureSecret()
type remote int
@@ -189,7 +155,7 @@ func TestSaveFile(t *testing.T) {
for _, spec := range tests {
t.Run(spec.name, func(t *testing.T) {
- var opts filestore.SaveFileOpts
+ var opts destination.UploadOpts
var expectedDeletes, expectedPuts int
osStub, ts := test.StartObjectStore()
@@ -231,7 +197,7 @@ func TestSaveFile(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
require.NoError(t, err)
require.NotNil(t, fh)
@@ -279,7 +245,7 @@ func TestSaveFile(t *testing.T) {
}
}
-func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
+func TestUploadWithS3WorkhorseClient(t *testing.T) {
tests := []struct {
name string
objectSize int64
@@ -298,7 +264,7 @@ func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
name: "unknown object size with limit",
objectSize: -1,
maxSize: test.ObjectSize - 1,
- expectedErr: filestore.ErrEntityTooLarge,
+ expectedErr: destination.ErrEntityTooLarge,
},
}
@@ -312,12 +278,12 @@ func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
defer cancel()
remoteObject := "tmp/test-file/1"
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
RemoteID: "test-file",
Deadline: testDeadline(),
UseWorkhorseClient: true,
RemoteTempObjectID: remoteObject,
- ObjectStorageConfig: filestore.ObjectStorageConfig{
+ ObjectStorageConfig: destination.ObjectStorageConfig{
Provider: "AWS",
S3Credentials: s3Creds,
S3Config: s3Config,
@@ -325,7 +291,7 @@ func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
MaximumSize: tc.maxSize,
}
- _, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), tc.objectSize, &opts)
+ _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), tc.objectSize, &opts)
if tc.expectedErr == nil {
require.NoError(t, err)
@@ -338,7 +304,7 @@ func TestSaveFileWithS3WorkhorseClient(t *testing.T) {
}
}
-func TestSaveFileWithAzureWorkhorseClient(t *testing.T) {
+func TestUploadWithAzureWorkhorseClient(t *testing.T) {
mux, bucketDir, cleanup := test.SetupGoCloudFileBucket(t, "azblob")
defer cleanup()
@@ -346,48 +312,48 @@ func TestSaveFileWithAzureWorkhorseClient(t *testing.T) {
defer cancel()
remoteObject := "tmp/test-file/1"
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
RemoteID: "test-file",
Deadline: testDeadline(),
UseWorkhorseClient: true,
RemoteTempObjectID: remoteObject,
- ObjectStorageConfig: filestore.ObjectStorageConfig{
+ ObjectStorageConfig: destination.ObjectStorageConfig{
Provider: "AzureRM",
URLMux: mux,
GoCloudConfig: config.GoCloudConfig{URL: "azblob://test-container"},
},
}
- _, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
require.NoError(t, err)
test.GoCloudObjectExists(t, bucketDir, remoteObject)
}
-func TestSaveFileWithUnknownGoCloudScheme(t *testing.T) {
+func TestUploadWithUnknownGoCloudScheme(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
mux := new(blob.URLMux)
remoteObject := "tmp/test-file/1"
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
RemoteID: "test-file",
Deadline: testDeadline(),
UseWorkhorseClient: true,
RemoteTempObjectID: remoteObject,
- ObjectStorageConfig: filestore.ObjectStorageConfig{
+ ObjectStorageConfig: destination.ObjectStorageConfig{
Provider: "SomeCloud",
URLMux: mux,
GoCloudConfig: config.GoCloudConfig{URL: "foo://test-container"},
},
}
- _, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ _, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
require.Error(t, err)
}
-func TestSaveMultipartInBodyFailure(t *testing.T) {
+func TestUploadMultipartInBodyFailure(t *testing.T) {
osStub, ts := test.StartObjectStore()
defer ts.Close()
@@ -395,7 +361,7 @@ func TestSaveMultipartInBodyFailure(t *testing.T) {
// this is the only way to get an in-body failure from our ObjectStoreStub
objectPath := "/bucket-but-no-object-key"
objectURL := ts.URL + objectPath
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
RemoteID: "test-file",
RemoteURL: objectURL,
PartSize: test.ObjectSize,
@@ -409,13 +375,13 @@ func TestSaveMultipartInBodyFailure(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(test.ObjectContent), test.ObjectSize, &opts)
require.Nil(t, fh)
require.Error(t, err)
require.EqualError(t, err, test.MultipartUploadInternalError().Error())
}
-func TestSaveRemoteFileWithLimit(t *testing.T) {
+func TestUploadRemoteFileWithLimit(t *testing.T) {
testhelper.ConfigureSecret()
type remote int
@@ -449,20 +415,20 @@ func TestSaveRemoteFileWithLimit(t *testing.T) {
testData: test.ObjectContent,
objectSize: -1,
maxSize: test.ObjectSize - 1,
- expectedErr: filestore.ErrEntityTooLarge,
+ expectedErr: destination.ErrEntityTooLarge,
},
{
name: "large object with unknown size with limit",
testData: string(make([]byte, 20000)),
objectSize: -1,
maxSize: 19000,
- expectedErr: filestore.ErrEntityTooLarge,
+ expectedErr: destination.ErrEntityTooLarge,
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
- var opts filestore.SaveFileOpts
+ var opts destination.UploadOpts
for _, remoteType := range remoteTypes {
tmpFolder, err := ioutil.TempDir("", "workhorse-test-tmp")
@@ -502,7 +468,7 @@ func TestSaveRemoteFileWithLimit(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
- fh, err := filestore.SaveFileFromReader(ctx, strings.NewReader(tc.testData), tc.objectSize, &opts)
+ fh, err := destination.Upload(ctx, strings.NewReader(tc.testData), tc.objectSize, &opts)
if tc.expectedErr == nil {
require.NoError(t, err)
@@ -516,7 +482,7 @@ func TestSaveRemoteFileWithLimit(t *testing.T) {
}
}
-func checkFileHandlerWithFields(t *testing.T, fh *filestore.FileHandler, fields map[string]string, prefix string) {
+func checkFileHandlerWithFields(t *testing.T, fh *destination.FileHandler, fields map[string]string, prefix string) {
key := func(field string) string {
if prefix == "" {
return field
diff --git a/workhorse/internal/upload/destination/filestore/filestore.go b/workhorse/internal/upload/destination/filestore/filestore.go
new file mode 100644
index 00000000000..f593cc30aeb
--- /dev/null
+++ b/workhorse/internal/upload/destination/filestore/filestore.go
@@ -0,0 +1,20 @@
+package filestore
+
+import (
+ "context"
+ "io"
+ "time"
+)
+
+type LocalFile struct {
+ File io.WriteCloser
+}
+
+func (lf *LocalFile) Consume(_ context.Context, r io.Reader, _ time.Time) (int64, error) {
+ n, err := io.Copy(lf.File, r)
+ errClose := lf.File.Close()
+ if err == nil {
+ err = errClose
+ }
+ return n, err
+}
diff --git a/workhorse/internal/upload/destination/filestore/filestore_test.go b/workhorse/internal/upload/destination/filestore/filestore_test.go
new file mode 100644
index 00000000000..ec67eae96b9
--- /dev/null
+++ b/workhorse/internal/upload/destination/filestore/filestore_test.go
@@ -0,0 +1,38 @@
+package filestore
+
+import (
+ "context"
+ "io/ioutil"
+ "os"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestConsume(t *testing.T) {
+ f, err := ioutil.TempFile("", "filestore-local-file")
+ if f != nil {
+ defer os.Remove(f.Name())
+ }
+ require.NoError(t, err)
+ defer f.Close()
+
+ localFile := &LocalFile{File: f}
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ content := "file content"
+ reader := strings.NewReader(content)
+ var deadline time.Time
+
+ n, err := localFile.Consume(ctx, reader, deadline)
+ require.NoError(t, err)
+ require.Equal(t, int64(len(content)), n)
+
+ consumedContent, err := ioutil.ReadFile(f.Name())
+ require.NoError(t, err)
+ require.Equal(t, content, string(consumedContent))
+}
diff --git a/workhorse/internal/filestore/multi_hash.go b/workhorse/internal/upload/destination/multi_hash.go
index 40efd3a5c1f..7d4884af3dc 100644
--- a/workhorse/internal/filestore/multi_hash.go
+++ b/workhorse/internal/upload/destination/multi_hash.go
@@ -1,4 +1,4 @@
-package filestore
+package destination
import (
"crypto/md5"
diff --git a/workhorse/internal/objectstore/gocloud_object.go b/workhorse/internal/upload/destination/objectstore/gocloud_object.go
index 38545086994..38545086994 100644
--- a/workhorse/internal/objectstore/gocloud_object.go
+++ b/workhorse/internal/upload/destination/objectstore/gocloud_object.go
diff --git a/workhorse/internal/objectstore/gocloud_object_test.go b/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
index f320a65dbfb..57b3a35b41e 100644
--- a/workhorse/internal/objectstore/gocloud_object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/gocloud_object_test.go
@@ -9,9 +9,9 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
func TestGoCloudObjectUpload(t *testing.T) {
diff --git a/workhorse/internal/objectstore/multipart.go b/workhorse/internal/upload/destination/objectstore/multipart.go
index 4c5b64b27ee..4c5b64b27ee 100644
--- a/workhorse/internal/objectstore/multipart.go
+++ b/workhorse/internal/upload/destination/objectstore/multipart.go
diff --git a/workhorse/internal/objectstore/multipart_test.go b/workhorse/internal/upload/destination/objectstore/multipart_test.go
index 42ab5b4e535..4aff3467e30 100644
--- a/workhorse/internal/objectstore/multipart_test.go
+++ b/workhorse/internal/upload/destination/objectstore/multipart_test.go
@@ -11,8 +11,8 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
func TestMultipartUploadWithUpcaseETags(t *testing.T) {
diff --git a/workhorse/internal/objectstore/object.go b/workhorse/internal/upload/destination/objectstore/object.go
index b7c4f12f009..b7c4f12f009 100644
--- a/workhorse/internal/objectstore/object.go
+++ b/workhorse/internal/upload/destination/objectstore/object.go
diff --git a/workhorse/internal/objectstore/object_test.go b/workhorse/internal/upload/destination/objectstore/object_test.go
index b9c1fb2087b..24117891b6d 100644
--- a/workhorse/internal/objectstore/object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/object_test.go
@@ -11,8 +11,8 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
const testTimeout = 10 * time.Second
diff --git a/workhorse/internal/objectstore/prometheus.go b/workhorse/internal/upload/destination/objectstore/prometheus.go
index 20762fb52bc..20762fb52bc 100644
--- a/workhorse/internal/objectstore/prometheus.go
+++ b/workhorse/internal/upload/destination/objectstore/prometheus.go
diff --git a/workhorse/internal/objectstore/s3_complete_multipart_api.go b/workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go
index b84f5757f49..b84f5757f49 100644
--- a/workhorse/internal/objectstore/s3_complete_multipart_api.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_complete_multipart_api.go
diff --git a/workhorse/internal/objectstore/s3_object.go b/workhorse/internal/upload/destination/objectstore/s3_object.go
index ce0cccc7eb1..ce0cccc7eb1 100644
--- a/workhorse/internal/objectstore/s3_object.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_object.go
diff --git a/workhorse/internal/objectstore/s3_object_test.go b/workhorse/internal/upload/destination/objectstore/s3_object_test.go
index c7426e3843b..b81b0ae2024 100644
--- a/workhorse/internal/objectstore/s3_object_test.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_object_test.go
@@ -18,9 +18,9 @@ import (
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
type failedReader struct {
diff --git a/workhorse/internal/objectstore/s3_session.go b/workhorse/internal/upload/destination/objectstore/s3_session.go
index a0c1f099145..a0c1f099145 100644
--- a/workhorse/internal/objectstore/s3_session.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_session.go
diff --git a/workhorse/internal/objectstore/s3_session_test.go b/workhorse/internal/upload/destination/objectstore/s3_session_test.go
index 5d57b4f9af8..5d57b4f9af8 100644
--- a/workhorse/internal/objectstore/s3_session_test.go
+++ b/workhorse/internal/upload/destination/objectstore/s3_session_test.go
diff --git a/workhorse/internal/objectstore/test/consts.go b/workhorse/internal/upload/destination/objectstore/test/consts.go
index 7a1bcc28d45..7a1bcc28d45 100644
--- a/workhorse/internal/objectstore/test/consts.go
+++ b/workhorse/internal/upload/destination/objectstore/test/consts.go
diff --git a/workhorse/internal/objectstore/test/gocloud_stub.go b/workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go
index cf22075e407..cf22075e407 100644
--- a/workhorse/internal/objectstore/test/gocloud_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/gocloud_stub.go
diff --git a/workhorse/internal/objectstore/test/objectstore_stub.go b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
index ec5e271b759..d51a2de7456 100644
--- a/workhorse/internal/objectstore/test/objectstore_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub.go
@@ -13,7 +13,7 @@ import (
"strings"
"sync"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore"
)
type partsEtagMap map[int]string
diff --git a/workhorse/internal/objectstore/test/objectstore_stub_test.go b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub_test.go
index 8c0d52a2d79..8c0d52a2d79 100644
--- a/workhorse/internal/objectstore/test/objectstore_stub_test.go
+++ b/workhorse/internal/upload/destination/objectstore/test/objectstore_stub_test.go
diff --git a/workhorse/internal/objectstore/test/s3_stub.go b/workhorse/internal/upload/destination/objectstore/test/s3_stub.go
index 6b83426b852..6b83426b852 100644
--- a/workhorse/internal/objectstore/test/s3_stub.go
+++ b/workhorse/internal/upload/destination/objectstore/test/s3_stub.go
diff --git a/workhorse/internal/objectstore/upload_strategy.go b/workhorse/internal/upload/destination/objectstore/upload_strategy.go
index 5707ba5f24e..5707ba5f24e 100644
--- a/workhorse/internal/objectstore/upload_strategy.go
+++ b/workhorse/internal/upload/destination/objectstore/upload_strategy.go
diff --git a/workhorse/internal/objectstore/uploader.go b/workhorse/internal/upload/destination/objectstore/uploader.go
index aedfbe55ead..aedfbe55ead 100644
--- a/workhorse/internal/objectstore/uploader.go
+++ b/workhorse/internal/upload/destination/objectstore/uploader.go
diff --git a/workhorse/internal/filestore/reader.go b/workhorse/internal/upload/destination/reader.go
index b1045b991fc..925a9468e14 100644
--- a/workhorse/internal/filestore/reader.go
+++ b/workhorse/internal/upload/destination/reader.go
@@ -1,4 +1,4 @@
-package filestore
+package destination
import "io"
diff --git a/workhorse/internal/filestore/reader_test.go b/workhorse/internal/upload/destination/reader_test.go
index 424d921ecaf..a26f7746a13 100644
--- a/workhorse/internal/filestore/reader_test.go
+++ b/workhorse/internal/upload/destination/reader_test.go
@@ -1,4 +1,4 @@
-package filestore
+package destination
import (
"fmt"
diff --git a/workhorse/internal/filestore/save_file_opts.go b/workhorse/internal/upload/destination/upload_opts.go
index 544101d693a..750a79d7bc2 100644
--- a/workhorse/internal/filestore/save_file_opts.go
+++ b/workhorse/internal/upload/destination/upload_opts.go
@@ -1,4 +1,4 @@
-package filestore
+package destination
import (
"errors"
@@ -27,8 +27,8 @@ type ObjectStorageConfig struct {
GoCloudConfig config.GoCloudConfig
}
-// SaveFileOpts represents all the options available for saving a file to object store
-type SaveFileOpts struct {
+// UploadOpts represents all the options available for saving a file to object store
+type UploadOpts struct {
// TempFilePrefix is the prefix used to create temporary local file
TempFilePrefix string
// LocalTempPath is the directory where to write a local copy of the file
@@ -66,28 +66,28 @@ type SaveFileOpts struct {
}
// UseWorkhorseClientEnabled checks if the options require direct access to object storage
-func (s *SaveFileOpts) UseWorkhorseClientEnabled() bool {
+func (s *UploadOpts) UseWorkhorseClientEnabled() bool {
return s.UseWorkhorseClient && s.ObjectStorageConfig.IsValid() && s.RemoteTempObjectID != ""
}
// IsLocal checks if the options require the writing of the file on disk
-func (s *SaveFileOpts) IsLocal() bool {
+func (s *UploadOpts) IsLocal() bool {
return s.LocalTempPath != ""
}
// IsMultipart checks if the options requires a Multipart upload
-func (s *SaveFileOpts) IsMultipart() bool {
+func (s *UploadOpts) IsMultipart() bool {
return s.PartSize > 0
}
-// GetOpts converts GitLab api.Response to a proper SaveFileOpts
-func GetOpts(apiResponse *api.Response) (*SaveFileOpts, error) {
+// GetOpts converts GitLab api.Response to a proper UploadOpts
+func GetOpts(apiResponse *api.Response) (*UploadOpts, error) {
timeout := time.Duration(apiResponse.RemoteObject.Timeout) * time.Second
if timeout == 0 {
timeout = DefaultObjectStoreTimeout
}
- opts := SaveFileOpts{
+ opts := UploadOpts{
LocalTempPath: apiResponse.TempPath,
RemoteID: apiResponse.RemoteObject.ID,
RemoteURL: apiResponse.RemoteObject.GetURL,
diff --git a/workhorse/internal/filestore/save_file_opts_test.go b/workhorse/internal/upload/destination/upload_opts_test.go
index f389b2054e5..fde726c985d 100644
--- a/workhorse/internal/filestore/save_file_opts_test.go
+++ b/workhorse/internal/upload/destination/upload_opts_test.go
@@ -1,4 +1,4 @@
-package filestore_test
+package destination_test
import (
"testing"
@@ -8,11 +8,11 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
)
-func TestSaveFileOptsLocalAndRemote(t *testing.T) {
+func TestUploadOptsLocalAndRemote(t *testing.T) {
tests := []struct {
name string
localTempPath string
@@ -43,7 +43,7 @@ func TestSaveFileOptsLocalAndRemote(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
- opts := filestore.SaveFileOpts{
+ opts := destination.UploadOpts{
LocalTempPath: test.localTempPath,
PresignedPut: test.presignedPut,
PartSize: test.partSize,
@@ -106,7 +106,7 @@ func TestGetOpts(t *testing.T) {
},
}
deadline := time.Now().Add(time.Duration(apiResponse.RemoteObject.Timeout) * time.Second)
- opts, err := filestore.GetOpts(apiResponse)
+ opts, err := destination.GetOpts(apiResponse)
require.NoError(t, err)
require.Equal(t, apiResponse.TempPath, opts.LocalTempPath)
@@ -155,22 +155,22 @@ func TestGetOptsFail(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
- _, err := filestore.GetOpts(tc.in)
+ _, err := destination.GetOpts(tc.in)
require.Error(t, err, "expect input to be rejected")
})
}
}
func TestGetOptsDefaultTimeout(t *testing.T) {
- deadline := time.Now().Add(filestore.DefaultObjectStoreTimeout)
- opts, err := filestore.GetOpts(&api.Response{TempPath: "/foo/bar"})
+ deadline := time.Now().Add(destination.DefaultObjectStoreTimeout)
+ opts, err := destination.GetOpts(&api.Response{TempPath: "/foo/bar"})
require.NoError(t, err)
require.WithinDuration(t, deadline, opts.Deadline, time.Minute)
}
func TestUseWorkhorseClientEnabled(t *testing.T) {
- cfg := filestore.ObjectStorageConfig{
+ cfg := destination.ObjectStorageConfig{
Provider: "AWS",
S3Config: config.S3Config{
Bucket: "test-bucket",
@@ -195,7 +195,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
name string
UseWorkhorseClient bool
remoteTempObjectID string
- objectStorageConfig filestore.ObjectStorageConfig
+ objectStorageConfig destination.ObjectStorageConfig
expected bool
}{
{
@@ -243,7 +243,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
name: "missing S3 bucket",
UseWorkhorseClient: true,
remoteTempObjectID: "test-object",
- objectStorageConfig: filestore.ObjectStorageConfig{
+ objectStorageConfig: destination.ObjectStorageConfig{
Provider: "AWS",
S3Config: config.S3Config{},
},
@@ -269,7 +269,7 @@ func TestUseWorkhorseClientEnabled(t *testing.T) {
},
}
deadline := time.Now().Add(time.Duration(apiResponse.RemoteObject.Timeout) * time.Second)
- opts, err := filestore.GetOpts(apiResponse)
+ opts, err := destination.GetOpts(apiResponse)
require.NoError(t, err)
opts.ObjectStorageConfig = test.objectStorageConfig
@@ -323,7 +323,7 @@ func TestGoCloudConfig(t *testing.T) {
},
}
deadline := time.Now().Add(time.Duration(apiResponse.RemoteObject.Timeout) * time.Second)
- opts, err := filestore.GetOpts(apiResponse)
+ opts, err := destination.GetOpts(apiResponse)
require.NoError(t, err)
opts.ObjectStorageConfig.URLMux = mux
diff --git a/workhorse/internal/upload/lfs_preparer.go b/workhorse/internal/upload/lfs_preparer.go
index 29ef5c4a5c9..e7c5cf16a30 100644
--- a/workhorse/internal/upload/lfs_preparer.go
+++ b/workhorse/internal/upload/lfs_preparer.go
@@ -5,7 +5,7 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
type object struct {
@@ -13,7 +13,7 @@ type object struct {
oid string
}
-func (l *object) Verify(fh *filestore.FileHandler) error {
+func (l *object) Verify(fh *destination.FileHandler) error {
if fh.Size != l.size {
return fmt.Errorf("LFSObject: expected size %d, wrote %d", l.size, fh.Size)
}
@@ -35,7 +35,7 @@ func NewLfsPreparer(c config.Config, objectPreparer Preparer) Preparer {
return &uploadPreparer{objectPreparer: objectPreparer}
}
-func (l *uploadPreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
+func (l *uploadPreparer) Prepare(a *api.Response) (*destination.UploadOpts, Verifier, error) {
opts, _, err := l.objectPreparer.Prepare(a)
if err != nil {
return nil, nil, err
diff --git a/workhorse/internal/upload/object_storage_preparer.go b/workhorse/internal/upload/object_storage_preparer.go
index ecf3a249ecd..f28f598c895 100644
--- a/workhorse/internal/upload/object_storage_preparer.go
+++ b/workhorse/internal/upload/object_storage_preparer.go
@@ -3,7 +3,7 @@ package upload
import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
type ObjectStoragePreparer struct {
@@ -18,8 +18,8 @@ func NewObjectStoragePreparer(c config.Config) Preparer {
return &ObjectStoragePreparer{credentials: c.ObjectStorageCredentials, config: c.ObjectStorageConfig}
}
-func (p *ObjectStoragePreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
- opts, err := filestore.GetOpts(a)
+func (p *ObjectStoragePreparer) Prepare(a *api.Response) (*destination.UploadOpts, Verifier, error) {
+ opts, err := destination.GetOpts(a)
if err != nil {
return nil, nil, err
}
diff --git a/workhorse/internal/upload/preparer.go b/workhorse/internal/upload/preparer.go
index 4c656e6a3f1..46a4cac01b5 100644
--- a/workhorse/internal/upload/preparer.go
+++ b/workhorse/internal/upload/preparer.go
@@ -2,7 +2,7 @@ package upload
import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
// Verifier is an optional pluggable behavior for upload paths. If
@@ -12,22 +12,22 @@ import (
// checksum of the uploaded file.
type Verifier interface {
// Verify can abort the upload by returning an error
- Verify(handler *filestore.FileHandler) error
+ Verify(handler *destination.FileHandler) error
}
// Preparer is a pluggable behavior that interprets a Rails API response
// and either tells Workhorse how to handle the upload, via the
-// SaveFileOpts and Verifier, or it rejects the request by returning a
+// UploadOpts and Verifier, or it rejects the request by returning a
// non-nil error. Its intended use is to make sure the upload gets stored
// in the right location: either a local directory, or one of several
// supported object storage backends.
type Preparer interface {
- Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error)
+ Prepare(a *api.Response) (*destination.UploadOpts, Verifier, error)
}
type DefaultPreparer struct{}
-func (s *DefaultPreparer) Prepare(a *api.Response) (*filestore.SaveFileOpts, Verifier, error) {
- opts, err := filestore.GetOpts(a)
+func (s *DefaultPreparer) Prepare(a *api.Response) (*destination.UploadOpts, Verifier, error) {
+ opts, err := destination.GetOpts(a)
return opts, nil, err
}
diff --git a/workhorse/internal/upload/rewrite.go b/workhorse/internal/upload/rewrite.go
index bbabe840ef5..ff5190226af 100644
--- a/workhorse/internal/upload/rewrite.go
+++ b/workhorse/internal/upload/rewrite.go
@@ -21,8 +21,8 @@ import (
"golang.org/x/image/tiff"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/lsif_transformer/parser"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
)
@@ -68,7 +68,7 @@ type rewriter struct {
finalizedFields map[string]bool
}
-func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, preauth *api.Response, filter MultipartFormProcessor, opts *filestore.SaveFileOpts) error {
+func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, preauth *api.Response, filter MultipartFormProcessor, opts *destination.UploadOpts) error {
// Create multipart reader
reader, err := r.MultipartReader()
if err != nil {
@@ -128,7 +128,7 @@ func parseAndNormalizeContentDisposition(header textproto.MIMEHeader) (string, s
return params["name"], params["filename"]
}
-func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipart.Part, opts *filestore.SaveFileOpts) error {
+func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipart.Part, opts *destination.UploadOpts) error {
if rew.filter.Count() >= maxFilesAllowed {
return ErrTooManyFilesUploaded
}
@@ -164,10 +164,10 @@ func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipa
defer inputReader.Close()
- fh, err := filestore.SaveFileFromReader(ctx, inputReader, -1, opts)
+ fh, err := destination.Upload(ctx, inputReader, -1, opts)
if err != nil {
switch err {
- case filestore.ErrEntityTooLarge, exif.ErrRemovingExif:
+ case destination.ErrEntityTooLarge, exif.ErrRemovingExif:
return err
default:
return fmt.Errorf("persisting multipart file: %v", err)
diff --git a/workhorse/internal/upload/saved_file_tracker.go b/workhorse/internal/upload/saved_file_tracker.go
index e6f9a8c9a88..b70a303a4a4 100644
--- a/workhorse/internal/upload/saved_file_tracker.go
+++ b/workhorse/internal/upload/saved_file_tracker.go
@@ -6,8 +6,8 @@ import (
"mime/multipart"
"net/http"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
type SavedFileTracker struct {
@@ -26,7 +26,7 @@ func (s *SavedFileTracker) Count() int {
return len(s.rewrittenFields)
}
-func (s *SavedFileTracker) ProcessFile(_ context.Context, fieldName string, file *filestore.FileHandler, _ *multipart.Writer) error {
+func (s *SavedFileTracker) ProcessFile(_ context.Context, fieldName string, file *destination.FileHandler, _ *multipart.Writer) error {
if _, ok := s.rewrittenFields[fieldName]; ok {
return fmt.Errorf("the %v field has already been processed", fieldName)
}
diff --git a/workhorse/internal/upload/saved_file_tracker_test.go b/workhorse/internal/upload/saved_file_tracker_test.go
index ba927db253e..4f323bf8888 100644
--- a/workhorse/internal/upload/saved_file_tracker_test.go
+++ b/workhorse/internal/upload/saved_file_tracker_test.go
@@ -10,8 +10,8 @@ import (
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
)
func TestSavedFileTracking(t *testing.T) {
@@ -23,7 +23,7 @@ func TestSavedFileTracking(t *testing.T) {
tracker := SavedFileTracker{Request: r}
require.Equal(t, "accelerate", tracker.Name())
- file := &filestore.FileHandler{}
+ file := &destination.FileHandler{}
ctx := context.Background()
tracker.ProcessFile(ctx, "test", file, nil)
require.Equal(t, 1, tracker.Count())
@@ -40,7 +40,7 @@ func TestSavedFileTracking(t *testing.T) {
func TestDuplicatedFileProcessing(t *testing.T) {
tracker := SavedFileTracker{}
- file := &filestore.FileHandler{}
+ file := &destination.FileHandler{}
require.NoError(t, tracker.ProcessFile(context.Background(), "file", file, nil))
diff --git a/workhorse/internal/upload/uploads.go b/workhorse/internal/upload/uploads.go
index f2679adebee..8272a3d920d 100644
--- a/workhorse/internal/upload/uploads.go
+++ b/workhorse/internal/upload/uploads.go
@@ -11,8 +11,8 @@ import (
"github.com/golang-jwt/jwt/v4"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/zipartifacts"
)
@@ -31,7 +31,7 @@ type MultipartClaims struct {
// MultipartFormProcessor abstracts away implementation differences
// between generic MIME multipart file uploads and CI artifact uploads.
type MultipartFormProcessor interface {
- ProcessFile(ctx context.Context, formName string, file *filestore.FileHandler, writer *multipart.Writer) error
+ ProcessFile(ctx context.Context, formName string, file *destination.FileHandler, writer *multipart.Writer) error
ProcessField(ctx context.Context, formName string, writer *multipart.Writer) error
Finalize(ctx context.Context) error
Name() string
@@ -40,7 +40,7 @@ type MultipartFormProcessor interface {
// interceptMultipartFiles is the core of the implementation of
// Multipart.
-func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Handler, preauth *api.Response, filter MultipartFormProcessor, opts *filestore.SaveFileOpts) {
+func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Handler, preauth *api.Response, filter MultipartFormProcessor, opts *destination.UploadOpts) {
var body bytes.Buffer
writer := multipart.NewWriter(&body)
defer writer.Close()
@@ -55,7 +55,7 @@ func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Hand
helper.CaptureAndFail(w, r, err, err.Error(), http.StatusBadRequest)
case http.ErrNotMultipart:
h.ServeHTTP(w, r)
- case filestore.ErrEntityTooLarge:
+ case destination.ErrEntityTooLarge:
helper.RequestEntityTooLarge(w, r, err)
case zipartifacts.ErrBadMetadata:
helper.RequestEntityTooLarge(w, r, err)
diff --git a/workhorse/internal/upload/uploads_test.go b/workhorse/internal/upload/uploads_test.go
index 81c8b4e3c18..9d787b10d1c 100644
--- a/workhorse/internal/upload/uploads_test.go
+++ b/workhorse/internal/upload/uploads_test.go
@@ -22,9 +22,9 @@ import (
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upstream/roundtripper"
)