Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-11-19 21:12:50 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-11-19 21:12:50 +0300
commit78f7d2e7266a80502f91d5c3aeb5689c72f156a2 (patch)
treebd7dca46bb745b27d30ff9e0fab2c5aa92b37c69
parent03a3b1a4caac4c04e81ee592fdb3b9c47dbb9623 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop_manual_todo.yml1
-rw-r--r--Gemfile2
-rw-r--r--Gemfile.lock10
-rw-r--r--app/assets/javascripts/boards/components/issue_board_filtered_search.vue20
-rw-r--r--app/assets/javascripts/issues_list/components/issue_card_time_info.vue18
-rw-r--r--app/assets/javascripts/issues_list/components/issues_list_app.vue84
-rw-r--r--app/assets/javascripts/issues_list/index.js2
-rw-r--r--app/assets/javascripts/issues_list/queries/iteration.fragment.graphql10
-rw-r--r--app/assets/javascripts/issues_list/queries/search_iterations.query.graphql18
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue2
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js8
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/queries/epic.fragment.graphql15
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/queries/search_epics.query.graphql16
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/epic_token.vue129
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue134
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/weight_token.vue66
-rw-r--r--app/graphql/mutations/issues/set_crm_contacts.rb18
-rw-r--r--app/models/application_record.rb4
-rw-r--r--app/models/customer_relations/contact.rb7
-rw-r--r--app/models/customer_relations/issue_contact.rb8
-rw-r--r--app/services/ci/create_pipeline_service.rb37
-rw-r--r--app/services/issues/set_crm_contacts_service.rb79
-rw-r--r--app/services/merge_requests/squash_service.rb6
-rw-r--r--config/feature_flags/development/customer_relations.yml2
-rw-r--r--config/feature_flags/ops/ci_pipeline_creation_logger.yml8
-rw-r--r--config/metrics/counts_28d/20211109114953_i_quickactions_add_contacts_monthly.yml25
-rw-r--r--config/metrics/counts_28d/20211109120251_i_quickactions_remove_contacts_monthly.yml25
-rw-r--r--config/metrics/counts_7d/20211109114948_i_quickactions_add_contacts_weekly.yml25
-rw-r--r--config/metrics/counts_7d/20211109120245_i_quickactions_remove_contacts_weekly.yml25
-rw-r--r--doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md37
-rw-r--r--doc/api/graphql/reference/index.md6
-rw-r--r--doc/development/testing_guide/best_practices.md37
-rw-r--r--doc/topics/plan_and_track.md3
-rw-r--r--lib/gitlab/ci/config.rb56
-rw-r--r--lib/gitlab/ci/config/external/context.rb10
-rw-r--r--lib/gitlab/ci/config/external/mapper.rb54
-rw-r--r--lib/gitlab/ci/config/external/processor.rb11
-rw-r--r--lib/gitlab/ci/pipeline/chain/base.rb2
-rw-r--r--lib/gitlab/ci/pipeline/chain/command.rb13
-rw-r--r--lib/gitlab/ci/pipeline/chain/config/process.rb25
-rw-r--r--lib/gitlab/ci/pipeline/chain/create.rb6
-rw-r--r--lib/gitlab/ci/pipeline/chain/seed.rb29
-rw-r--r--lib/gitlab/ci/pipeline/chain/sequence.rb14
-rw-r--r--lib/gitlab/ci/pipeline/logger.rb102
-rw-r--r--lib/gitlab/ci/yaml_processor.rb2
-rw-r--r--lib/gitlab/quick_actions/issue_actions.rb46
-rw-r--r--lib/gitlab/usage_data_counters/known_events/quickactions.yml8
-rw-r--r--lib/tasks/gettext.rake13
-rw-r--r--locale/gitlab.pot34
-rw-r--r--package.json12
-rw-r--r--spec/factories/customer_relations/contacts.rb1
-rw-r--r--spec/features/admin/admin_projects_spec.rb3
-rw-r--r--spec/features/admin/admin_runners_spec.rb3
-rw-r--r--spec/features/admin/users/user_spec.rb5
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb3
-rw-r--r--spec/features/groups/members/manage_members_spec.rb3
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb7
-rw-r--r--spec/features/merge_request/user_squashes_merge_request_spec.rb38
-rw-r--r--spec/features/profiles/two_factor_auths_spec.rb8
-rw-r--r--spec/features/projects/members/groups_with_access_list_spec.rb3
-rw-r--r--spec/features/projects/members/list_spec.rb3
-rw-r--r--spec/features/projects/settings/user_manages_project_members_spec.rb3
-rw-r--r--spec/frontend/boards/mock_data.js14
-rw-r--r--spec/frontend/issues_list/components/issue_card_time_info_spec.js2
-rw-r--r--spec/frontend/issues_list/components/issues_list_app_spec.js78
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js104
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js169
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js116
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js38
-rw-r--r--spec/lib/gitlab/ci/config/external/context_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/pipeline/logger_spec.rb131
-rw-r--r--spec/models/customer_relations/contact_spec.rb23
-rw-r--r--spec/models/customer_relations/issue_contact_spec.rb28
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb16
-rw-r--r--spec/services/ci/create_pipeline_service/logger_spec.rb139
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb1102
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb1040
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb61
-rw-r--r--spec/services/issues/set_crm_contacts_service_spec.rb88
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb5
-rw-r--r--spec/services/merge_requests/squash_service_spec.rb32
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb54
-rw-r--r--spec/support/helpers/modal_helpers.rb27
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb4
-rw-r--r--yarn.lock96
87 files changed, 2411 insertions, 2387 deletions
diff --git a/.rubocop_manual_todo.yml b/.rubocop_manual_todo.yml
index 9031799c421..a7892e477b0 100644
--- a/.rubocop_manual_todo.yml
+++ b/.rubocop_manual_todo.yml
@@ -2609,7 +2609,6 @@ Style/OpenStructUse:
- 'spec/support/helpers/login_helpers.rb'
- 'spec/support/helpers/repo_helpers.rb'
- 'spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb'
- - 'spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb'
- 'spec/tooling/rspec_flaky/flaky_example_spec.rb'
- 'tooling/rspec_flaky/flaky_example.rb'
diff --git a/Gemfile b/Gemfile
index 920798694c1..76e6cef28e0 100644
--- a/Gemfile
+++ b/Gemfile
@@ -494,7 +494,7 @@ gem 'flipper', '~> 0.21.0'
gem 'flipper-active_record', '~> 0.21.0'
gem 'flipper-active_support_cache_store', '~> 0.21.0'
gem 'unleash', '~> 3.2.2'
-gem 'gitlab-experiment', '~> 0.6.4'
+gem 'gitlab-experiment', '~> 0.6.5'
# Structured logging
gem 'lograge', '~> 0.5'
diff --git a/Gemfile.lock b/Gemfile.lock
index df713319288..1bd733cde9f 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -461,7 +461,7 @@ GEM
gitlab-dangerfiles (2.5.0)
danger (>= 8.3.1)
danger-gitlab (>= 8.0.0)
- gitlab-experiment (0.6.4)
+ gitlab-experiment (0.6.5)
activesupport (>= 3.0)
request_store (>= 1.0)
scientist (~> 1.6, >= 1.6.0)
@@ -634,7 +634,7 @@ GEM
mime-types (~> 3.0)
multi_xml (>= 0.5.2)
httpclient (2.8.3)
- i18n (1.8.10)
+ i18n (1.8.11)
concurrent-ruby (~> 1.0)
i18n_data (0.8.0)
icalendar (2.4.1)
@@ -1161,7 +1161,7 @@ GEM
sawyer (0.8.2)
addressable (>= 2.3.5)
faraday (> 0.8, < 2.0)
- scientist (1.6.0)
+ scientist (1.6.2)
sd_notify (0.1.0)
securecompare (1.0.0)
seed-fu (2.3.7)
@@ -1376,7 +1376,7 @@ GEM
nokogiri (~> 1.8)
yajl-ruby (1.4.1)
yard (0.9.26)
- zeitwerk (2.4.2)
+ zeitwerk (2.5.1)
PLATFORMS
ruby
@@ -1471,7 +1471,7 @@ DEPENDENCIES
github-markup (~> 1.7.0)
gitlab-chronic (~> 0.10.5)
gitlab-dangerfiles (~> 2.5.0)
- gitlab-experiment (~> 0.6.4)
+ gitlab-experiment (~> 0.6.5)
gitlab-fog-azure-rm (~> 1.2.0)
gitlab-labkit (~> 0.21.1)
gitlab-license (~> 2.0)
diff --git a/app/assets/javascripts/boards/components/issue_board_filtered_search.vue b/app/assets/javascripts/boards/components/issue_board_filtered_search.vue
index c20b5e3f377..aa6ffa500ea 100644
--- a/app/assets/javascripts/boards/components/issue_board_filtered_search.vue
+++ b/app/assets/javascripts/boards/components/issue_board_filtered_search.vue
@@ -18,7 +18,6 @@ import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/auth
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
-import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue';
export default {
types: {
@@ -35,7 +34,6 @@ export default {
incident: __('Incident'),
issue: __('Issue'),
milestone: __('Milestone'),
- weight: __('Weight'),
},
components: { BoardFilteredSearch },
inject: ['isSignedIn'],
@@ -59,16 +57,7 @@ export default {
: this.fullPath.slice(0, this.fullPath.lastIndexOf('/'));
},
tokensCE() {
- const {
- label,
- author,
- assignee,
- issue,
- incident,
- type,
- milestone,
- weight,
- } = this.$options.i18n;
+ const { label, author, assignee, issue, incident, type, milestone } = this.$options.i18n;
const { types } = this.$options;
const { fetchAuthors, fetchLabels } = issueBoardFilters(
this.$apollo,
@@ -155,13 +144,6 @@ export default {
{ icon: 'issue-type-incident', value: types.INCIDENT, title: incident },
],
},
- {
- type: 'weight',
- title: weight,
- icon: 'weight',
- token: WeightToken,
- unique: true,
- },
];
},
tokens() {
diff --git a/app/assets/javascripts/issues_list/components/issue_card_time_info.vue b/app/assets/javascripts/issues_list/components/issue_card_time_info.vue
index 4a2f7861492..aece7372182 100644
--- a/app/assets/javascripts/issues_list/components/issue_card_time_info.vue
+++ b/app/assets/javascripts/issues_list/components/issue_card_time_info.vue
@@ -7,25 +7,16 @@ import {
isInPast,
isToday,
} from '~/lib/utils/datetime_utility';
-import { convertToCamelCase } from '~/lib/utils/text_utility';
import { __ } from '~/locale';
export default {
components: {
GlLink,
GlIcon,
- IssueHealthStatus: () =>
- import('ee_component/related_items_tree/components/issue_health_status.vue'),
- WeightCount: () => import('ee_component/issues/components/weight_count.vue'),
},
directives: {
GlTooltip: GlTooltipDirective,
},
- inject: {
- hasIssuableHealthStatusFeature: {
- default: false,
- },
- },
props: {
issue: {
type: Object,
@@ -54,12 +45,6 @@ export default {
timeEstimate() {
return this.issue.humanTimeEstimate || this.issue.timeStats?.humanTimeEstimate;
},
- showHealthStatus() {
- return this.hasIssuableHealthStatusFeature && this.issue.healthStatus;
- },
- healthStatus() {
- return convertToCamelCase(this.issue.healthStatus);
- },
},
methods: {
milestoneRemainingTime(dueDate, startDate) {
@@ -114,7 +99,6 @@ export default {
<gl-icon name="timer" />
{{ timeEstimate }}
</span>
- <weight-count class="issuable-weight gl-mr-3" :weight="issue.weight" />
- <issue-health-status v-if="showHealthStatus" :health-status="healthStatus" />
+ <slot></slot>
</span>
</template>
diff --git a/app/assets/javascripts/issues_list/components/issues_list_app.vue b/app/assets/javascripts/issues_list/components/issues_list_app.vue
index 013361495c9..62b672d3e7d 100644
--- a/app/assets/javascripts/issues_list/components/issues_list_app.vue
+++ b/app/assets/javascripts/issues_list/components/issues_list_app.vue
@@ -11,6 +11,7 @@ import {
import fuzzaldrinPlus from 'fuzzaldrin-plus';
import getIssuesQuery from 'ee_else_ce/issues_list/queries/get_issues.query.graphql';
import getIssuesCountsQuery from 'ee_else_ce/issues_list/queries/get_issues_counts.query.graphql';
+import IssueCardTimeInfo from 'ee_else_ce/issues_list/components/issue_card_time_info.vue';
import createFlash, { FLASH_TYPES } from '~/flash';
import { TYPE_USER } from '~/graphql_shared/constants';
import { convertToGraphQLId, getIdFromGraphQLId } from '~/graphql_shared/utils';
@@ -31,14 +32,11 @@ import {
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_AUTHOR,
TOKEN_TYPE_CONFIDENTIAL,
- TOKEN_TYPE_EPIC,
- TOKEN_TYPE_ITERATION,
TOKEN_TYPE_LABEL,
TOKEN_TYPE_MILESTONE,
TOKEN_TYPE_MY_REACTION,
TOKEN_TYPE_RELEASE,
TOKEN_TYPE_TYPE,
- TOKEN_TYPE_WEIGHT,
UPDATED_DESC,
urlSortParams,
} from '~/issues_list/constants';
@@ -61,39 +59,29 @@ import {
TOKEN_TITLE_ASSIGNEE,
TOKEN_TITLE_AUTHOR,
TOKEN_TITLE_CONFIDENTIAL,
- TOKEN_TITLE_EPIC,
- TOKEN_TITLE_ITERATION,
TOKEN_TITLE_LABEL,
TOKEN_TITLE_MILESTONE,
TOKEN_TITLE_MY_REACTION,
TOKEN_TITLE_RELEASE,
TOKEN_TITLE_TYPE,
- TOKEN_TITLE_WEIGHT,
} from '~/vue_shared/components/filtered_search_bar/constants';
import eventHub from '../eventhub';
import reorderIssuesMutation from '../queries/reorder_issues.mutation.graphql';
-import searchIterationsQuery from '../queries/search_iterations.query.graphql';
import searchLabelsQuery from '../queries/search_labels.query.graphql';
import searchMilestonesQuery from '../queries/search_milestones.query.graphql';
import searchUsersQuery from '../queries/search_users.query.graphql';
-import IssueCardTimeInfo from './issue_card_time_info.vue';
import NewIssueDropdown from './new_issue_dropdown.vue';
const AuthorToken = () =>
import('~/vue_shared/components/filtered_search_bar/tokens/author_token.vue');
const EmojiToken = () =>
import('~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue');
-const EpicToken = () => import('~/vue_shared/components/filtered_search_bar/tokens/epic_token.vue');
-const IterationToken = () =>
- import('~/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue');
const LabelToken = () =>
import('~/vue_shared/components/filtered_search_bar/tokens/label_token.vue');
const MilestoneToken = () =>
import('~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue');
const ReleaseToken = () =>
import('~/vue_shared/components/filtered_search_bar/tokens/release_token.vue');
-const WeightToken = () =>
- import('~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue');
export default {
i18n,
@@ -109,7 +97,6 @@ export default {
IssuableList,
IssueCardTimeInfo,
NewIssueDropdown,
- BlockingIssuesCount: () => import('ee_component/issues/components/blocking_issues_count.vue'),
},
directives: {
GlTooltip: GlTooltipDirective,
@@ -133,9 +120,6 @@ export default {
fullPath: {
default: '',
},
- groupPath: {
- default: '',
- },
hasAnyIssues: {
default: false,
},
@@ -148,9 +132,6 @@ export default {
hasIssueWeightsFeature: {
default: false,
},
- hasIterationsFeature: {
- default: false,
- },
hasMultipleIssueAssigneesFeature: {
default: false,
},
@@ -185,6 +166,13 @@ export default {
default: '',
},
},
+ props: {
+ eeSearchTokens: {
+ type: Array,
+ required: false,
+ default: () => [],
+ },
+ },
data() {
const state = getParameterByName(PARAM_STATE);
const defaultSortKey = state === IssuableStates.Closed ? UPDATED_DESC : CREATED_DESC;
@@ -389,39 +377,8 @@ export default {
});
}
- if (this.hasIterationsFeature) {
- tokens.push({
- type: TOKEN_TYPE_ITERATION,
- title: TOKEN_TITLE_ITERATION,
- icon: 'iteration',
- token: IterationToken,
- fetchIterations: this.fetchIterations,
- });
- }
-
- if (this.groupPath) {
- tokens.push({
- type: TOKEN_TYPE_EPIC,
- title: TOKEN_TITLE_EPIC,
- icon: 'epic',
- token: EpicToken,
- unique: true,
- symbol: '&',
- idProperty: 'id',
- useIdValue: true,
- recentSuggestionsStorageKey: `${this.fullPath}-issues-recent-tokens-epic_id`,
- fullPath: this.groupPath,
- });
- }
-
- if (this.hasIssueWeightsFeature) {
- tokens.push({
- type: TOKEN_TYPE_WEIGHT,
- title: TOKEN_TITLE_WEIGHT,
- icon: 'weight',
- token: WeightToken,
- unique: true,
- });
+ if (this.eeSearchTokens.length) {
+ tokens.push(...this.eeSearchTokens);
}
return tokens;
@@ -499,20 +456,6 @@ export default {
})
.then(({ data }) => data[this.namespace]?.milestones.nodes);
},
- fetchIterations(search) {
- const id = Number(search);
- const variables =
- !search || Number.isNaN(id)
- ? { fullPath: this.fullPath, search, isProject: this.isProject }
- : { fullPath: this.fullPath, id, isProject: this.isProject };
-
- return this.$apollo
- .query({
- query: searchIterationsQuery,
- variables,
- })
- .then(({ data }) => data[this.namespace]?.iterations.nodes);
- },
fetchUsers(search) {
return this.$apollo
.query({
@@ -746,12 +689,7 @@ export default {
<gl-icon name="thumb-down" />
{{ issuable.downvotes }}
</li>
- <blocking-issues-count
- class="blocking-issues gl-display-none gl-sm-display-block"
- :blocking-issues-count="issuable.blockingCount"
- :is-list-item="true"
- data-testid="blocking-issues"
- />
+ <slot :issuable="issuable"></slot>
</template>
<template #empty-state>
diff --git a/app/assets/javascripts/issues_list/index.js b/app/assets/javascripts/issues_list/index.js
index 77b67cdf763..89e9aec206e 100644
--- a/app/assets/javascripts/issues_list/index.js
+++ b/app/assets/javascripts/issues_list/index.js
@@ -2,7 +2,7 @@ import produce from 'immer';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import getIssuesQuery from 'ee_else_ce/issues_list/queries/get_issues.query.graphql';
-import IssuesListApp from '~/issues_list/components/issues_list_app.vue';
+import IssuesListApp from 'ee_else_ce/issues_list/components/issues_list_app.vue';
import createDefaultClient from '~/lib/graphql';
import { convertObjectPropsToCamelCase, parseBoolean } from '~/lib/utils/common_utils';
import IssuablesListApp from './components/issuables_list_app.vue';
diff --git a/app/assets/javascripts/issues_list/queries/iteration.fragment.graphql b/app/assets/javascripts/issues_list/queries/iteration.fragment.graphql
deleted file mode 100644
index 4f7217be7f7..00000000000
--- a/app/assets/javascripts/issues_list/queries/iteration.fragment.graphql
+++ /dev/null
@@ -1,10 +0,0 @@
-fragment Iteration on Iteration {
- id
- title
- startDate
- dueDate
- iterationCadence {
- id
- title
- }
-}
diff --git a/app/assets/javascripts/issues_list/queries/search_iterations.query.graphql b/app/assets/javascripts/issues_list/queries/search_iterations.query.graphql
deleted file mode 100644
index 93600c62905..00000000000
--- a/app/assets/javascripts/issues_list/queries/search_iterations.query.graphql
+++ /dev/null
@@ -1,18 +0,0 @@
-#import "./iteration.fragment.graphql"
-
-query searchIterations($fullPath: ID!, $search: String, $id: ID, $isProject: Boolean = false) {
- group(fullPath: $fullPath) @skip(if: $isProject) {
- iterations(title: $search, id: $id, includeAncestors: true) {
- nodes {
- ...Iteration
- }
- }
- }
- project(fullPath: $fullPath) @include(if: $isProject) {
- iterations(title: $search, id: $id, includeAncestors: true) {
- nodes {
- ...Iteration
- }
- }
- }
-}
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue b/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
index f0950374182..08a44d81bf0 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue
@@ -287,7 +287,7 @@ export default {
return false;
}
- return enableSquashBeforeMerge && this.commitsCount > 1;
+ return enableSquashBeforeMerge;
},
shouldShowMergeControls() {
if (this.glFeatures.restructuredMrWidget) {
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js b/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js
index d9290e86bca..c881d3cf6a4 100644
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js
+++ b/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js
@@ -2,7 +2,6 @@ import { __ } from '~/locale';
export const DEBOUNCE_DELAY = 200;
export const MAX_RECENT_TOKENS_SIZE = 3;
-export const WEIGHT_TOKEN_SUGGESTIONS_SIZE = 21;
export const FILTER_NONE = 'None';
export const FILTER_ANY = 'Any';
@@ -24,10 +23,6 @@ export const DEFAULT_LABEL_NONE = { value: FILTER_NONE, text: __('None'), title:
export const DEFAULT_LABEL_ANY = { value: FILTER_ANY, text: __('Any'), title: __('Any') };
export const DEFAULT_NONE_ANY = [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY];
-export const DEFAULT_ITERATIONS = DEFAULT_NONE_ANY.concat([
- { value: FILTER_CURRENT, text: __('Current') },
-]);
-
export const DEFAULT_MILESTONES = DEFAULT_NONE_ANY.concat([
{ value: FILTER_UPCOMING, text: __('Upcoming'), title: __('Upcoming') },
{ value: FILTER_STARTED, text: __('Started'), title: __('Started') },
@@ -56,6 +51,3 @@ export const TOKEN_TITLE_TYPE = __('Type');
export const TOKEN_TITLE_RELEASE = __('Release');
export const TOKEN_TITLE_MY_REACTION = __('My-Reaction');
export const TOKEN_TITLE_CONFIDENTIAL = __('Confidential');
-export const TOKEN_TITLE_ITERATION = __('Iteration');
-export const TOKEN_TITLE_EPIC = __('Epic');
-export const TOKEN_TITLE_WEIGHT = __('Weight');
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/queries/epic.fragment.graphql b/app/assets/javascripts/vue_shared/components/filtered_search_bar/queries/epic.fragment.graphql
deleted file mode 100644
index 9e9bda8ad3e..00000000000
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/queries/epic.fragment.graphql
+++ /dev/null
@@ -1,15 +0,0 @@
-fragment EpicNode on Epic {
- id
- iid
- group {
- fullPath
- }
- title
- state
- reference
- referencePath: reference(full: true)
- webPath
- webUrl
- createdAt
- closedAt
-}
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/queries/search_epics.query.graphql b/app/assets/javascripts/vue_shared/components/filtered_search_bar/queries/search_epics.query.graphql
deleted file mode 100644
index 4bb4b586fc9..00000000000
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/queries/search_epics.query.graphql
+++ /dev/null
@@ -1,16 +0,0 @@
-#import "./epic.fragment.graphql"
-
-query searchEpics($fullPath: ID!, $search: String, $state: EpicState) {
- group(fullPath: $fullPath) {
- epics(
- search: $search
- state: $state
- includeAncestorGroups: true
- includeDescendantGroups: false
- ) {
- nodes {
- ...EpicNode
- }
- }
- }
-}
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/epic_token.vue b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/epic_token.vue
deleted file mode 100644
index 9c2f5306654..00000000000
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/epic_token.vue
+++ /dev/null
@@ -1,129 +0,0 @@
-<script>
-import { GlFilteredSearchSuggestion } from '@gitlab/ui';
-import createFlash from '~/flash';
-import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-import { __ } from '~/locale';
-import { DEFAULT_NONE_ANY, FILTER_NONE_ANY, OPERATOR_IS_NOT } from '../constants';
-import searchEpicsQuery from '../queries/search_epics.query.graphql';
-
-import BaseToken from './base_token.vue';
-
-export default {
- prefix: '&',
- separator: '::',
- components: {
- BaseToken,
- GlFilteredSearchSuggestion,
- },
- props: {
- config: {
- type: Object,
- required: true,
- },
- value: {
- type: Object,
- required: true,
- },
- active: {
- type: Boolean,
- required: true,
- },
- },
- data() {
- return {
- epics: this.config.initialEpics || [],
- loading: false,
- };
- },
- computed: {
- idProperty() {
- return this.config.idProperty || 'iid';
- },
- currentValue() {
- const epicIid = Number(this.value.data);
- if (epicIid) {
- return epicIid;
- }
- return this.value.data;
- },
- defaultEpics() {
- return this.config.defaultEpics || DEFAULT_NONE_ANY;
- },
- availableDefaultEpics() {
- if (this.value.operator === OPERATOR_IS_NOT) {
- return this.defaultEpics.filter(
- (suggestion) => !FILTER_NONE_ANY.includes(suggestion.value),
- );
- }
- return this.defaultEpics;
- },
- },
- methods: {
- fetchEpics(search = '') {
- return this.$apollo
- .query({
- query: searchEpicsQuery,
- variables: { fullPath: this.config.fullPath, search },
- })
- .then(({ data }) => data.group?.epics.nodes);
- },
- fetchEpicsBySearchTerm(search) {
- this.loading = true;
- this.fetchEpics(search)
- .then((response) => {
- this.epics = Array.isArray(response) ? response : response?.data;
- })
- .catch(() => createFlash({ message: __('There was a problem fetching epics.') }))
- .finally(() => {
- this.loading = false;
- });
- },
- getActiveEpic(epics, data) {
- if (data && epics.length) {
- return epics.find((epic) => this.getValue(epic) === data);
- }
- return undefined;
- },
- getValue(epic) {
- return this.getEpicIdProperty(epic).toString();
- },
- displayValue(epic) {
- return `${this.$options.prefix}${this.getEpicIdProperty(epic)}${this.$options.separator}${
- epic?.title
- }`;
- },
- getEpicIdProperty(epic) {
- return getIdFromGraphQLId(epic[this.idProperty]);
- },
- },
-};
-</script>
-
-<template>
- <base-token
- :config="config"
- :value="value"
- :active="active"
- :suggestions-loading="loading"
- :suggestions="epics"
- :get-active-token-value="getActiveEpic"
- :default-suggestions="availableDefaultEpics"
- :recent-suggestions-storage-key="config.recentSuggestionsStorageKey"
- search-by="title"
- @fetch-suggestions="fetchEpicsBySearchTerm"
- v-on="$listeners"
- >
- <template #view="{ viewTokenProps: { inputValue, activeTokenValue } }">
- {{ activeTokenValue ? displayValue(activeTokenValue) : inputValue }}
- </template>
- <template #suggestions-list="{ suggestions }">
- <gl-filtered-search-suggestion
- v-for="epic in suggestions"
- :key="epic.id"
- :value="getValue(epic)"
- >
- {{ epic.title }}
- </gl-filtered-search-suggestion>
- </template>
- </base-token>
-</template>
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue
deleted file mode 100644
index 4a1dbf9d3fe..00000000000
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue
+++ /dev/null
@@ -1,134 +0,0 @@
-<script>
-import { GlDropdownDivider, GlDropdownSectionHeader, GlFilteredSearchSuggestion } from '@gitlab/ui';
-import createFlash from '~/flash';
-import { __ } from '~/locale';
-import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
-import { formatDate } from '~/lib/utils/datetime_utility';
-import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
-import { DEFAULT_ITERATIONS } from '../constants';
-
-export default {
- components: {
- BaseToken,
- GlDropdownDivider,
- GlDropdownSectionHeader,
- GlFilteredSearchSuggestion,
- },
- mixins: [glFeatureFlagMixin()],
- props: {
- active: {
- type: Boolean,
- required: true,
- },
- config: {
- type: Object,
- required: true,
- },
- value: {
- type: Object,
- required: true,
- },
- },
- data() {
- return {
- iterations: this.config.initialIterations || [],
- loading: false,
- };
- },
- computed: {
- defaultIterations() {
- return this.config.defaultIterations || DEFAULT_ITERATIONS;
- },
- },
- methods: {
- getActiveIteration(iterations, data) {
- return iterations.find((iteration) => iteration.id === data);
- },
- groupIterationsByCadence(iterations) {
- const cadences = [];
- iterations.forEach((iteration) => {
- if (!iteration.iterationCadence) {
- return;
- }
- const { title } = iteration.iterationCadence;
- const cadenceIteration = {
- id: iteration.id,
- title: iteration.title,
- period: this.getIterationPeriod(iteration),
- };
- const cadence = cadences.find((cad) => cad.title === title);
- if (cadence) {
- cadence.iterations.push(cadenceIteration);
- } else {
- cadences.push({ title, iterations: [cadenceIteration] });
- }
- });
- return cadences;
- },
- fetchIterations(searchTerm) {
- this.loading = true;
- this.config
- .fetchIterations(searchTerm)
- .then((response) => {
- this.iterations = Array.isArray(response) ? response : response.data;
- })
- .catch(() => {
- createFlash({ message: __('There was a problem fetching iterations.') });
- })
- .finally(() => {
- this.loading = false;
- });
- },
- /**
- * TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/344619
- * This method also exists as a utility function in ee/../iterations/utils.js
- * Remove the duplication when iteration token is moved to EE.
- */
- getIterationPeriod({ startDate, dueDate }) {
- const start = formatDate(startDate, 'mmm d, yyyy', true);
- const due = formatDate(dueDate, 'mmm d, yyyy', true);
- return `${start} - ${due}`;
- },
- },
-};
-</script>
-
-<template>
- <base-token
- :active="active"
- :config="config"
- :value="value"
- :default-suggestions="defaultIterations"
- :suggestions="iterations"
- :suggestions-loading="loading"
- :get-active-token-value="getActiveIteration"
- @fetch-suggestions="fetchIterations"
- v-on="$listeners"
- >
- <template #view="{ viewTokenProps: { inputValue, activeTokenValue } }">
- {{ activeTokenValue ? activeTokenValue.title : inputValue }}
- </template>
- <template #suggestions-list="{ suggestions }">
- <template v-for="(cadence, index) in groupIterationsByCadence(suggestions)">
- <gl-dropdown-divider v-if="index !== 0" :key="index" />
- <gl-dropdown-section-header
- :key="cadence.title"
- class="gl-overflow-hidden"
- :title="cadence.title"
- >
- {{ cadence.title }}
- </gl-dropdown-section-header>
- <gl-filtered-search-suggestion
- v-for="iteration in cadence.iterations"
- :key="iteration.id"
- :value="iteration.id"
- >
- {{ iteration.title }}
- <div v-if="glFeatures.iterationCadences" class="gl-text-gray-400">
- {{ iteration.period }}
- </div>
- </gl-filtered-search-suggestion>
- </template>
- </template>
- </base-token>
-</template>
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/weight_token.vue b/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/weight_token.vue
deleted file mode 100644
index 280fb234576..00000000000
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/tokens/weight_token.vue
+++ /dev/null
@@ -1,66 +0,0 @@
-<script>
-import { GlFilteredSearchSuggestion } from '@gitlab/ui';
-import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
-import { DEFAULT_NONE_ANY, WEIGHT_TOKEN_SUGGESTIONS_SIZE } from '../constants';
-
-const weights = Array.from(Array(WEIGHT_TOKEN_SUGGESTIONS_SIZE), (_, index) => index.toString());
-
-export default {
- components: {
- BaseToken,
- GlFilteredSearchSuggestion,
- },
- props: {
- active: {
- type: Boolean,
- required: true,
- },
- config: {
- type: Object,
- required: true,
- },
- value: {
- type: Object,
- required: true,
- },
- },
- data() {
- return {
- weights,
- };
- },
- computed: {
- defaultWeights() {
- return this.config.defaultWeights || DEFAULT_NONE_ANY;
- },
- },
- methods: {
- getActiveWeight(weightSuggestions, data) {
- return weightSuggestions.find((weight) => weight === data);
- },
- updateWeights(searchTerm) {
- const weight = parseInt(searchTerm, 10);
- this.weights = Number.isNaN(weight) ? weights : [String(weight)];
- },
- },
-};
-</script>
-
-<template>
- <base-token
- :active="active"
- :config="config"
- :value="value"
- :default-suggestions="defaultWeights"
- :suggestions="weights"
- :get-active-token-value="getActiveWeight"
- @fetch-suggestions="updateWeights"
- v-on="$listeners"
- >
- <template #suggestions-list="{ suggestions }">
- <gl-filtered-search-suggestion v-for="weight of suggestions" :key="weight" :value="weight">
- {{ weight }}
- </gl-filtered-search-suggestion>
- </template>
- </base-token>
-</template>
diff --git a/app/graphql/mutations/issues/set_crm_contacts.rb b/app/graphql/mutations/issues/set_crm_contacts.rb
index 7a9e6237eaa..4e49a45d52a 100644
--- a/app/graphql/mutations/issues/set_crm_contacts.rb
+++ b/app/graphql/mutations/issues/set_crm_contacts.rb
@@ -5,7 +5,7 @@ module Mutations
class SetCrmContacts < Base
graphql_name 'IssueSetCrmContacts'
- argument :crm_contact_ids,
+ argument :contact_ids,
[::Types::GlobalIDType[::CustomerRelations::Contact]],
required: true,
description: 'Customer relations contact IDs to set. Replaces existing contacts by default.'
@@ -15,27 +15,27 @@ module Mutations
required: false,
description: 'Changes the operation mode. Defaults to REPLACE.'
- def resolve(project_path:, iid:, crm_contact_ids:, operation_mode: Types::MutationOperationModeEnum.enum[:replace])
+ def resolve(project_path:, iid:, contact_ids:, operation_mode: Types::MutationOperationModeEnum.enum[:replace])
issue = authorized_find!(project_path: project_path, iid: iid)
project = issue.project
raise Gitlab::Graphql::Errors::ResourceNotAvailable, 'Feature disabled' unless Feature.enabled?(:customer_relations, project.group, default_enabled: :yaml)
- crm_contact_ids = crm_contact_ids.compact.map do |crm_contact_id|
- raise Gitlab::Graphql::Errors::ArgumentError, "Contact #{crm_contact_id} is invalid." unless crm_contact_id.respond_to?(:model_id)
+ contact_ids = contact_ids.compact.map do |contact_id|
+ raise Gitlab::Graphql::Errors::ArgumentError, "Contact #{contact_id} is invalid." unless contact_id.respond_to?(:model_id)
- crm_contact_id.model_id.to_i
+ contact_id.model_id.to_i
end
attribute_name = case operation_mode
when Types::MutationOperationModeEnum.enum[:append]
- :add_crm_contact_ids
+ :add_ids
when Types::MutationOperationModeEnum.enum[:remove]
- :remove_crm_contact_ids
+ :remove_ids
else
- :crm_contact_ids
+ :replace_ids
end
- response = ::Issues::SetCrmContactsService.new(project: project, current_user: current_user, params: { attribute_name => crm_contact_ids })
+ response = ::Issues::SetCrmContactsService.new(project: project, current_user: current_user, params: { attribute_name => contact_ids })
.execute(issue)
{
diff --git a/app/models/application_record.rb b/app/models/application_record.rb
index bcd8bdd6638..b64e6c59817 100644
--- a/app/models/application_record.rb
+++ b/app/models/application_record.rb
@@ -7,6 +7,10 @@ class ApplicationRecord < ActiveRecord::Base
self.abstract_class = true
+ # We should avoid using pluck https://docs.gitlab.com/ee/development/sql.html#plucking-ids
+ # but, if we are going to use it, let's try and limit the number of records
+ MAX_PLUCK = 1_000
+
alias_method :reset, :reload
def self.without_order
diff --git a/app/models/customer_relations/contact.rb b/app/models/customer_relations/contact.rb
index 5898bc3412f..d8669f1f4c2 100644
--- a/app/models/customer_relations/contact.rb
+++ b/app/models/customer_relations/contact.rb
@@ -25,6 +25,13 @@ class CustomerRelations::Contact < ApplicationRecord
validates :description, length: { maximum: 1024 }
validate :validate_email_format
+ def self.find_ids_by_emails(group_id, emails)
+ raise ArgumentError, "Cannot lookup more than #{MAX_PLUCK} emails" if emails.length > MAX_PLUCK
+
+ where(group_id: group_id, email: emails)
+ .pluck(:id)
+ end
+
private
def validate_email_format
diff --git a/app/models/customer_relations/issue_contact.rb b/app/models/customer_relations/issue_contact.rb
index 98faf8d6644..78f662b6a58 100644
--- a/app/models/customer_relations/issue_contact.rb
+++ b/app/models/customer_relations/issue_contact.rb
@@ -8,6 +8,14 @@ class CustomerRelations::IssueContact < ApplicationRecord
validate :contact_belongs_to_issue_group
+ def self.find_contact_ids_by_emails(issue_id, emails)
+ raise ArgumentError, "Cannot lookup more than #{MAX_PLUCK} emails" if emails.length > MAX_PLUCK
+
+ joins(:contact)
+ .where(issue_id: issue_id, customer_relations_contacts: { email: emails })
+ .pluck(:contact_id)
+ end
+
private
def contact_belongs_to_issue_group
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index 540e8f7b970..0548566c271 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -2,10 +2,14 @@
module Ci
class CreatePipelineService < BaseService
- attr_reader :pipeline
+ attr_reader :pipeline, :logger
CreateError = Class.new(StandardError)
+ LOG_MAX_DURATION_THRESHOLD = 3.seconds
+ LOG_MAX_PIPELINE_SIZE = 2_000
+ LOG_MAX_CREATION_THRESHOLD = 20.seconds
+
SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Build,
Gitlab::Ci::Pipeline::Chain::Build::Associations,
Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
@@ -53,6 +57,7 @@ module Ci
# @return [Ci::Pipeline] The created Ci::Pipeline object.
# rubocop: disable Metrics/ParameterLists
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, merge_request: nil, external_pull_request: nil, bridge: nil, **options, &block)
+ @logger = build_logger
@pipeline = Ci::Pipeline.new
command = Gitlab::Ci::Pipeline::Chain::Command.new(
@@ -76,6 +81,7 @@ module Ci
push_options: params[:push_options] || {},
chat_data: params[:chat_data],
bridge: bridge,
+ logger: @logger,
**extra_options(**options))
# Ensure we never persist the pipeline when dry_run: true
@@ -98,6 +104,9 @@ module Ci
else
ServiceResponse.success(payload: pipeline)
end
+
+ ensure
+ @logger.commit(pipeline: pipeline, caller: self.class.name)
end
# rubocop: enable Metrics/ParameterLists
@@ -135,6 +144,32 @@ module Ci
def extra_options(content: nil, dry_run: false)
{ content: content, dry_run: dry_run }
end
+
+ def build_logger
+ Gitlab::Ci::Pipeline::Logger.new(project: project) do |l|
+ l.log_when do |observations|
+ observations.any? do |name, values|
+ values.any? &&
+ name.to_s.end_with?('duration_s') &&
+ values.max >= LOG_MAX_DURATION_THRESHOLD
+ end
+ end
+
+ l.log_when do |observations|
+ values = observations['pipeline_size_count']
+ next false if values.empty?
+
+ values.max >= LOG_MAX_PIPELINE_SIZE
+ end
+
+ l.log_when do |observations|
+ values = observations['pipeline_creation_duration_s']
+ next false if values.empty?
+
+ values.max >= LOG_MAX_CREATION_THRESHOLD
+ end
+ end
+ end
end
end
diff --git a/app/services/issues/set_crm_contacts_service.rb b/app/services/issues/set_crm_contacts_service.rb
index 13fe30b5ac8..d02c128d207 100644
--- a/app/services/issues/set_crm_contacts_service.rb
+++ b/app/services/issues/set_crm_contacts_service.rb
@@ -2,10 +2,9 @@
module Issues
class SetCrmContactsService < ::BaseProjectService
- attr_accessor :issue, :errors
-
MAX_ADDITIONAL_CONTACTS = 6
+ # Replacing contacts by email is not currently supported
def execute(issue)
@issue = issue
@errors = []
@@ -13,12 +12,15 @@ module Issues
return error_no_permissions unless allowed?
return error_invalid_params unless valid_params?
- determine_changes if params[:crm_contact_ids]
-
+ @existing_ids = issue.issue_customer_relations_contacts.map(&:contact_id)
+ determine_changes if params[:replace_ids].present?
return error_too_many if too_many?
- add_contacts if params[:add_crm_contact_ids]
- remove_contacts if params[:remove_crm_contact_ids]
+ add if params[:add_ids].present?
+ remove if params[:remove_ids].present?
+
+ add_by_email if params[:add_emails].present?
+ remove_by_email if params[:remove_emails].present?
if issue.valid?
ServiceResponse.success(payload: issue)
@@ -26,20 +28,31 @@ module Issues
# The default error isn't very helpful: "Issue customer relations contacts is invalid"
issue.errors.delete(:issue_customer_relations_contacts)
issue.errors.add(:issue_customer_relations_contacts, errors.to_sentence)
- ServiceResponse.error(payload: issue, message: issue.errors.full_messages)
+ ServiceResponse.error(payload: issue, message: issue.errors.full_messages.to_sentence)
end
end
private
+ attr_accessor :issue, :errors, :existing_ids
+
def determine_changes
- existing_contact_ids = issue.issue_customer_relations_contacts.map(&:contact_id)
- params[:add_crm_contact_ids] = params[:crm_contact_ids] - existing_contact_ids
- params[:remove_crm_contact_ids] = existing_contact_ids - params[:crm_contact_ids]
+ params[:add_ids] = params[:replace_ids] - existing_ids
+ params[:remove_ids] = existing_ids - params[:replace_ids]
+ end
+
+ def add
+ add_by_id(params[:add_ids])
+ end
+
+ def add_by_email
+ contact_ids = ::CustomerRelations::Contact.find_ids_by_emails(project_group.id, params[:add_emails])
+ add_by_id(contact_ids)
end
- def add_contacts
- params[:add_crm_contact_ids].uniq.each do |contact_id|
+ def add_by_id(contact_ids)
+ contact_ids -= existing_ids
+ contact_ids.uniq.each do |contact_id|
issue_contact = issue.issue_customer_relations_contacts.create(contact_id: contact_id)
unless issue_contact.persisted?
@@ -49,9 +62,19 @@ module Issues
end
end
- def remove_contacts
+ def remove
+ remove_by_id(params[:remove_ids])
+ end
+
+ def remove_by_email
+ contact_ids = ::CustomerRelations::IssueContact.find_contact_ids_by_emails(issue.id, params[:remove_emails])
+ remove_by_id(contact_ids)
+ end
+
+ def remove_by_id(contact_ids)
+ contact_ids &= existing_ids
issue.issue_customer_relations_contacts
- .where(contact_id: params[:remove_crm_contact_ids]) # rubocop: disable CodeReuse/ActiveRecord
+ .where(contact_id: contact_ids) # rubocop: disable CodeReuse/ActiveRecord
.delete_all
end
@@ -64,27 +87,43 @@ module Issues
end
def set_present?
- params[:crm_contact_ids].present?
+ params[:replace_ids].present?
end
def add_or_remove_present?
- params[:add_crm_contact_ids].present? || params[:remove_crm_contact_ids].present?
+ add_present? || remove_present?
+ end
+
+ def add_present?
+ params[:add_ids].present? || params[:add_emails].present?
+ end
+
+ def remove_present?
+ params[:remove_ids].present? || params[:remove_emails].present?
end
def too_many?
- params[:add_crm_contact_ids] && params[:add_crm_contact_ids].length > MAX_ADDITIONAL_CONTACTS
+ too_many_ids? || too_many_emails?
+ end
+
+ def too_many_ids?
+ params[:add_ids] && params[:add_ids].length > MAX_ADDITIONAL_CONTACTS
+ end
+
+ def too_many_emails?
+ params[:add_emails] && params[:add_emails].length > MAX_ADDITIONAL_CONTACTS
end
def error_no_permissions
- ServiceResponse.error(message: ['You have insufficient permissions to set customer relations contacts for this issue'])
+ ServiceResponse.error(message: _('You have insufficient permissions to set customer relations contacts for this issue'))
end
def error_invalid_params
- ServiceResponse.error(message: ['You cannot combine crm_contact_ids with add_crm_contact_ids or remove_crm_contact_ids'])
+ ServiceResponse.error(message: _('You cannot combine replace_ids with add_ids or remove_ids'))
end
def error_too_many
- ServiceResponse.error(payload: issue, message: ["You can only add up to #{MAX_ADDITIONAL_CONTACTS} contacts at one time"])
+ ServiceResponse.error(payload: issue, message: _("You can only add up to %{max_contacts} contacts at one time" % { max_contacts: MAX_ADDITIONAL_CONTACTS }))
end
end
end
diff --git a/app/services/merge_requests/squash_service.rb b/app/services/merge_requests/squash_service.rb
index 102f78c6a9b..0600fd1d740 100644
--- a/app/services/merge_requests/squash_service.rb
+++ b/app/services/merge_requests/squash_service.rb
@@ -5,7 +5,7 @@ module MergeRequests
def execute
# If performing a squash would result in no change, then
# immediately return a success message without performing a squash
- if merge_request.commits_count < 2 && message.nil?
+ if merge_request.commits_count == 1 && message == merge_request.first_commit.safe_message
return success(squash_sha: merge_request.diff_head_sha)
end
@@ -17,7 +17,7 @@ module MergeRequests
private
def squash!
- squash_sha = repository.squash(current_user, merge_request, message || merge_request.default_squash_commit_message)
+ squash_sha = repository.squash(current_user, merge_request, message)
success(squash_sha: squash_sha)
rescue StandardError => e
@@ -39,7 +39,7 @@ module MergeRequests
end
def message
- params[:squash_commit_message].presence
+ params[:squash_commit_message].presence || merge_request.default_squash_commit_message
end
end
end
diff --git a/config/feature_flags/development/customer_relations.yml b/config/feature_flags/development/customer_relations.yml
index 207f675423a..58783708921 100644
--- a/config/feature_flags/development/customer_relations.yml
+++ b/config/feature_flags/development/customer_relations.yml
@@ -1,7 +1,7 @@
---
name: customer_relations
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/69472
-rollout_issue_url:
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/346082
milestone: '14.3'
type: development
group: group::product planning
diff --git a/config/feature_flags/ops/ci_pipeline_creation_logger.yml b/config/feature_flags/ops/ci_pipeline_creation_logger.yml
new file mode 100644
index 00000000000..8f886be2fd5
--- /dev/null
+++ b/config/feature_flags/ops/ci_pipeline_creation_logger.yml
@@ -0,0 +1,8 @@
+---
+name: ci_pipeline_creation_logger
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/72996
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/345779
+milestone: '14.5'
+type: ops
+group: group::pipeline execution
+default_enabled: false
diff --git a/config/metrics/counts_28d/20211109114953_i_quickactions_add_contacts_monthly.yml b/config/metrics/counts_28d/20211109114953_i_quickactions_add_contacts_monthly.yml
new file mode 100644
index 00000000000..ce6ab2f1580
--- /dev/null
+++ b/config/metrics/counts_28d/20211109114953_i_quickactions_add_contacts_monthly.yml
@@ -0,0 +1,25 @@
+---
+key_path: redis_hll_counters.quickactions.i_quickactions_add_contacts_monthly
+description: Count of MAU using the `/add_contacts` quick action
+product_section: dev
+product_stage: plan
+product_group: group::product planning
+product_category: service_desk
+value_type: number
+status: active
+milestone: '14.5'
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73413
+time_frame: 28d
+data_source: redis_hll
+data_category: optional
+instrumentation_class: RedisHLLMetric
+options:
+ events:
+ - i_quickactions_add_contacts
+distribution:
+ - ce
+ - ee
+tier:
+ - free
+ - premium
+ - ultimate
diff --git a/config/metrics/counts_28d/20211109120251_i_quickactions_remove_contacts_monthly.yml b/config/metrics/counts_28d/20211109120251_i_quickactions_remove_contacts_monthly.yml
new file mode 100644
index 00000000000..7a544890e5a
--- /dev/null
+++ b/config/metrics/counts_28d/20211109120251_i_quickactions_remove_contacts_monthly.yml
@@ -0,0 +1,25 @@
+---
+key_path: redis_hll_counters.quickactions.i_quickactions_remove_contacts_monthly
+description: Count of MAU using the `/remove_contacts` quick action
+product_section: dev
+product_stage: plan
+product_group: group::product planning
+product_category: service_desk
+value_type: number
+status: active
+milestone: '14.5'
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73413
+time_frame: 28d
+data_source: redis_hll
+data_category: optional
+instrumentation_class: RedisHLLMetric
+options:
+ events:
+ - i_quickactions_remove_contacts
+distribution:
+ - ce
+ - ee
+tier:
+ - free
+ - premium
+ - ultimate
diff --git a/config/metrics/counts_7d/20211109114948_i_quickactions_add_contacts_weekly.yml b/config/metrics/counts_7d/20211109114948_i_quickactions_add_contacts_weekly.yml
new file mode 100644
index 00000000000..356d969cea5
--- /dev/null
+++ b/config/metrics/counts_7d/20211109114948_i_quickactions_add_contacts_weekly.yml
@@ -0,0 +1,25 @@
+---
+key_path: redis_hll_counters.quickactions.i_quickactions_add_contacts_weekly
+description: Count of WAU using the `/add_contacts` quick action
+product_section: dev
+product_stage: plan
+product_group: group::product planning
+product_category: service_desk
+value_type: number
+status: active
+milestone: '14.5'
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73413
+time_frame: 7d
+data_source: redis_hll
+data_category: optional
+instrumentation_class: RedisHLLMetric
+options:
+ events:
+ - i_quickactions_add_contacts
+distribution:
+ - ce
+ - ee
+tier:
+ - free
+ - premium
+ - ultimate
diff --git a/config/metrics/counts_7d/20211109120245_i_quickactions_remove_contacts_weekly.yml b/config/metrics/counts_7d/20211109120245_i_quickactions_remove_contacts_weekly.yml
new file mode 100644
index 00000000000..7a660ddffd2
--- /dev/null
+++ b/config/metrics/counts_7d/20211109120245_i_quickactions_remove_contacts_weekly.yml
@@ -0,0 +1,25 @@
+---
+key_path: redis_hll_counters.quickactions.i_quickactions_remove_contacts_weekly
+description: Count of WAU using the `/remove_contacts` quick action
+product_section: dev
+product_stage: plan
+product_group: group::product planning
+product_category: service_desk
+value_type: number
+status: active
+milestone: '14.5'
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73413
+time_frame: 7d
+data_source: redis_hll
+data_category: optional
+instrumentation_class: RedisHLLMetric
+options:
+ events:
+ - i_quickactions_remove_contacts
+distribution:
+ - ce
+ - ee
+tier:
+ - free
+ - premium
+ - ultimate
diff --git a/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md b/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md
index 1d2c1cbabfd..4ed6d373bc3 100644
--- a/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md
+++ b/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md
@@ -97,14 +97,15 @@ Rails.cache.instance_variable_get(:@data).keys
## Profile a page
```ruby
+url = '<url/of/the/page>'
+
# Before 11.6.0
logger = Logger.new($stdout)
-admin_token = User.find_by_username('ADMIN_USERNAME').personal_access_tokens.first.token
-app.get("URL/?private_token=#{admin_token}")
+admin_token = User.find_by_username('<admin-username>').personal_access_tokens.first.token
+app.get("#{url}/?private_token=#{admin_token}")
# From 11.6.0
-admin = User.find_by_username('ADMIN_USERNAME')
-url = "/url/goes/here"
+admin = User.find_by_username('<admin-username>')
Gitlab::Profiler.with_user(admin) { app.get(url) }
```
@@ -112,8 +113,8 @@ Gitlab::Profiler.with_user(admin) { app.get(url) }
```ruby
logger = Logger.new($stdout)
-admin = User.find_by_username('ADMIN_USERNAME')
-Gitlab::Profiler.profile('URL', logger: logger, user: admin)
+admin = User.find_by_username('<admin-username>')
+Gitlab::Profiler.profile('<url/of/the/page>', logger: logger, user: admin)
```
## Time an operation
@@ -414,12 +415,14 @@ p.create_wiki ### creates the wiki project on the filesystem
### In case of issue boards not loading properly and it's getting time out. We need to call the Issue Rebalancing service to fix this
```ruby
-p = Project.find_by_full_path('PROJECT PATH')
+p = Project.find_by_full_path('<username-or-group>/<project-name>')
Issues::RelativePositionRebalancingService.new(p.root_namespace.all_projects).execute
```
-## Imports / Exports
+## Imports and exports
+
+### Import a project
```ruby
# Find the project and get the error
@@ -462,18 +465,19 @@ Clear the cache:
sudo gitlab-rake cache:clear
```
-### Export a repository
+### Export a project
It's typically recommended to export a project through [the web interface](../../user/project/settings/import_export.md#export-a-project-and-its-data) or through [the API](../../api/project_import_export.md). In situations where this is not working as expected, it may be preferable to export a project directly via the Rails console:
```ruby
-user = User.find_by_username('USERNAME')
-project = Project.find_by_full_path('PROJECT_PATH')
+user = User.find_by_username('<username>')
+# Sufficient permissions needed
+# Read https://docs.gitlab.com/ee/user/permissions.html#project-members-permissions
+
+project = Project.find_by_full_path('<username-or-group>/<project-name')
Projects::ImportExport::ExportService.new(project, user).execute
```
-If the project you wish to export is available at `https://gitlab.example.com/baltig/pipeline-templates`, the value to use for `PROJECT_PATH` would be `baltig/pipeline-templates`.
-
If this all runs successfully, you see an output like the following before being returned to the Rails console prompt:
```ruby
@@ -482,6 +486,11 @@ If this all runs successfully, you see an output like the following before being
The exported project is located within a `.tar.gz` file in `/var/opt/gitlab/gitlab-rails/uploads/-/system/import_export_upload/export_file/`.
+If this fails, [enable verbose logging](navigating_gitlab_via_rails_console.md#looking-up-database-persisted-objects),
+repeat the above procedure after,
+and report the output to
+[GitLab Support](https://about.gitlab.com/support/).
+
## Repository
### Search sequence of pushes to a repository
@@ -782,7 +791,7 @@ end
emails = [email1, email2]
emails.each do |e|
- delete_bad_scim(e,'GROUPPATH')
+ delete_bad_scim(e,'<group-path>')
end
```
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 8a426e94d1f..b86ffbe0d84 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -2846,7 +2846,7 @@ Input type: `IssueSetCrmContactsInput`
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationissuesetcrmcontactsclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
-| <a id="mutationissuesetcrmcontactscrmcontactids"></a>`crmContactIds` | [`[CustomerRelationsContactID!]!`](#customerrelationscontactid) | Customer relations contact IDs to set. Replaces existing contacts by default. |
+| <a id="mutationissuesetcrmcontactscontactids"></a>`contactIds` | [`[CustomerRelationsContactID!]!`](#customerrelationscontactid) | Customer relations contact IDs to set. Replaces existing contacts by default. |
| <a id="mutationissuesetcrmcontactsiid"></a>`iid` | [`String!`](#string) | IID of the issue to mutate. |
| <a id="mutationissuesetcrmcontactsoperationmode"></a>`operationMode` | [`MutationOperationMode`](#mutationoperationmode) | Changes the operation mode. Defaults to REPLACE. |
| <a id="mutationissuesetcrmcontactsprojectpath"></a>`projectPath` | [`ID!`](#id) | Project the issue to mutate is in. |
@@ -8339,6 +8339,7 @@ Represents an epic on an issue board.
| <a id="boardepicdownvotes"></a>`downvotes` | [`Int!`](#int) | Number of downvotes the epic has received. |
| <a id="boardepicduedate"></a>`dueDate` | [`Time`](#time) | Due date of the epic. |
| <a id="boardepicduedatefixed"></a>`dueDateFixed` | [`Time`](#time) | Fixed due date of the epic. |
+| <a id="boardepicduedatefrominheritedsource"></a>`dueDateFromInheritedSource` | [`Time`](#time) | Inherited due date of the epic from child epics or milestones. |
| <a id="boardepicduedatefrommilestones"></a>`dueDateFromMilestones` | [`Time`](#time) | Inherited due date of the epic from milestones. |
| <a id="boardepicduedateisfixed"></a>`dueDateIsFixed` | [`Boolean`](#boolean) | Indicates if the due date has been manually set. |
| <a id="boardepicevents"></a>`events` | [`EventConnection`](#eventconnection) | List of events associated with the object. (see [Connections](#connections)) |
@@ -8358,6 +8359,7 @@ Represents an epic on an issue board.
| <a id="boardepicrelativeposition"></a>`relativePosition` | [`Int`](#int) | Relative position of the epic in the epic tree. |
| <a id="boardepicstartdate"></a>`startDate` | [`Time`](#time) | Start date of the epic. |
| <a id="boardepicstartdatefixed"></a>`startDateFixed` | [`Time`](#time) | Fixed start date of the epic. |
+| <a id="boardepicstartdatefrominheritedsource"></a>`startDateFromInheritedSource` | [`Time`](#time) | Inherited start date of the epic from child epics or milestones. |
| <a id="boardepicstartdatefrommilestones"></a>`startDateFromMilestones` | [`Time`](#time) | Inherited start date of the epic from milestones. |
| <a id="boardepicstartdateisfixed"></a>`startDateIsFixed` | [`Boolean`](#boolean) | Indicates if the start date has been manually set. |
| <a id="boardepicstate"></a>`state` | [`EpicState!`](#epicstate) | State of the epic. |
@@ -9765,6 +9767,7 @@ Represents an epic.
| <a id="epicdownvotes"></a>`downvotes` | [`Int!`](#int) | Number of downvotes the epic has received. |
| <a id="epicduedate"></a>`dueDate` | [`Time`](#time) | Due date of the epic. |
| <a id="epicduedatefixed"></a>`dueDateFixed` | [`Time`](#time) | Fixed due date of the epic. |
+| <a id="epicduedatefrominheritedsource"></a>`dueDateFromInheritedSource` | [`Time`](#time) | Inherited due date of the epic from child epics or milestones. |
| <a id="epicduedatefrommilestones"></a>`dueDateFromMilestones` | [`Time`](#time) | Inherited due date of the epic from milestones. |
| <a id="epicduedateisfixed"></a>`dueDateIsFixed` | [`Boolean`](#boolean) | Indicates if the due date has been manually set. |
| <a id="epicevents"></a>`events` | [`EventConnection`](#eventconnection) | List of events associated with the object. (see [Connections](#connections)) |
@@ -9784,6 +9787,7 @@ Represents an epic.
| <a id="epicrelativeposition"></a>`relativePosition` | [`Int`](#int) | Relative position of the epic in the epic tree. |
| <a id="epicstartdate"></a>`startDate` | [`Time`](#time) | Start date of the epic. |
| <a id="epicstartdatefixed"></a>`startDateFixed` | [`Time`](#time) | Fixed start date of the epic. |
+| <a id="epicstartdatefrominheritedsource"></a>`startDateFromInheritedSource` | [`Time`](#time) | Inherited start date of the epic from child epics or milestones. |
| <a id="epicstartdatefrommilestones"></a>`startDateFromMilestones` | [`Time`](#time) | Inherited start date of the epic from milestones. |
| <a id="epicstartdateisfixed"></a>`startDateIsFixed` | [`Boolean`](#boolean) | Indicates if the start date has been manually set. |
| <a id="epicstate"></a>`state` | [`EpicState!`](#epicstate) | State of the epic. |
diff --git a/doc/development/testing_guide/best_practices.md b/doc/development/testing_guide/best_practices.md
index 6a739d9e1a5..152edee9921 100644
--- a/doc/development/testing_guide/best_practices.md
+++ b/doc/development/testing_guide/best_practices.md
@@ -483,6 +483,43 @@ expect(page).to have_css '[data-testid="weight"]', text: 2
expect(page).to have_css '.atwho-view ul', visible: true
```
+##### Interacting with modals
+
+Use the `within_modal` helper to interact with [GitLab UI modals](https://gitlab-org.gitlab.io/gitlab-ui/?path=/story/base-modal--default).
+
+```ruby
+include Spec::Support::Helpers::ModalHelpers
+
+within_modal do
+ expect(page).to have_link _('UI testing docs')
+
+ fill_in _('Search projects'), with: 'gitlab'
+
+ click_button 'Continue'
+end
+```
+
+Furthermore, you can use `accept_gl_confirm` for confirmation modals that only need to be accepted.
+This is helpful when migrating [`window.confirm()`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm) to [`confirmAction`](https://gitlab.com/gitlab-org/gitlab/-/blob/ee280ed2b763d1278ad38c6e7e8a0aff092f617a/app/assets/javascripts/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal.js#L3).
+
+```ruby
+include Spec::Support::Helpers::ModalHelpers
+
+accept_gl_confirm do
+ click_button 'Delete user'
+end
+```
+
+You can also pass the expected confirmation message and button text to `accept_gl_confirm`.
+
+```ruby
+include Spec::Support::Helpers::ModalHelpers
+
+accept_gl_confirm('Are you sure you want to delete this user?', button_text: 'Delete') do
+ click_button 'Delete user'
+end
+```
+
##### Other useful methods
After you retrieve an element using a [finder method](#finders), you can invoke a number of
diff --git a/doc/topics/plan_and_track.md b/doc/topics/plan_and_track.md
index abede62c00b..d4d69959b6a 100644
--- a/doc/topics/plan_and_track.md
+++ b/doc/topics/plan_and_track.md
@@ -40,4 +40,7 @@ Get work done as a team.
Align your work across teams.
- [Epics](../user/group/epics/index.md)
+ - [Multi-level epics](../user/group/epics/manage_epics.md#multi-level-child-epics)
+ - [Epic boards](../user/group/epics/epic_boards.md)
+ - [View heath status](../user/project/issues/managing_issues.md#health-status)
- [Roadmaps](../user/group/roadmap/index.md)
diff --git a/lib/gitlab/ci/config.rb b/lib/gitlab/ci/config.rb
index 6f149385969..42b487fdf81 100644
--- a/lib/gitlab/ci/config.rb
+++ b/lib/gitlab/ci/config.rb
@@ -17,21 +17,27 @@ module Gitlab
Config::Yaml::Tags::TagError
].freeze
- attr_reader :root, :context, :source_ref_path, :source
+ attr_reader :root, :context, :source_ref_path, :source, :logger
- def initialize(config, project: nil, pipeline: nil, sha: nil, user: nil, parent_pipeline: nil, source: nil)
+ def initialize(config, project: nil, pipeline: nil, sha: nil, user: nil, parent_pipeline: nil, source: nil, logger: nil)
+ @logger = logger || ::Gitlab::Ci::Pipeline::Logger.new(project: project)
@source_ref_path = pipeline&.source_ref_path
- @context = build_context(project: project, pipeline: pipeline, sha: sha, user: user, parent_pipeline: parent_pipeline)
+ @context = self.logger.instrument(:config_build_context) do
+ build_context(project: project, pipeline: pipeline, sha: sha, user: user, parent_pipeline: parent_pipeline)
+ end
+
@context.set_deadline(TIMEOUT_SECONDS)
@source = source
- @config = expand_config(config)
-
- @root = Entry::Root.new(@config)
- @root.compose!
+ @config = self.logger.instrument(:config_expand) do
+ expand_config(config)
+ end
+ @root = self.logger.instrument(:config_compose) do
+ Entry::Root.new(@config).tap(&:compose!)
+ end
rescue *rescue_errors => e
raise Config::ConfigError, e.message
end
@@ -94,11 +100,25 @@ module Gitlab
end
def build_config(config)
- initial_config = Config::Yaml.load!(config)
- initial_config = Config::External::Processor.new(initial_config, @context).perform
- initial_config = Config::Extendable.new(initial_config).to_hash
- initial_config = Config::Yaml::Tags::Resolver.new(initial_config).to_hash
- Config::EdgeStagesInjector.new(initial_config).to_hash
+ initial_config = logger.instrument(:config_yaml_load) do
+ Config::Yaml.load!(config)
+ end
+
+ initial_config = logger.instrument(:config_external_process) do
+ Config::External::Processor.new(initial_config, @context).perform
+ end
+
+ initial_config = logger.instrument(:config_yaml_extend) do
+ Config::Extendable.new(initial_config).to_hash
+ end
+
+ initial_config = logger.instrument(:config_tags_resolve) do
+ Config::Yaml::Tags::Resolver.new(initial_config).to_hash
+ end
+
+ logger.instrument(:config_stages_inject) do
+ Config::EdgeStagesInjector.new(initial_config).to_hash
+ end
end
def find_sha(project)
@@ -115,10 +135,20 @@ module Gitlab
sha: sha || find_sha(project),
user: user,
parent_pipeline: parent_pipeline,
- variables: build_variables(project: project, pipeline: pipeline))
+ variables: build_variables(project: project, pipeline: pipeline),
+ logger: logger)
end
def build_variables(project:, pipeline:)
+ logger.instrument(:config_build_variables) do
+ build_variables_without_instrumentation(
+ project: project,
+ pipeline: pipeline
+ )
+ end
+ end
+
+ def build_variables_without_instrumentation(project:, pipeline:)
Gitlab::Ci::Variables::Collection.new.tap do |variables|
break variables unless project
diff --git a/lib/gitlab/ci/config/external/context.rb b/lib/gitlab/ci/config/external/context.rb
index 51624dc30ea..c94180a5306 100644
--- a/lib/gitlab/ci/config/external/context.rb
+++ b/lib/gitlab/ci/config/external/context.rb
@@ -10,9 +10,11 @@ module Gitlab
TimeoutError = Class.new(StandardError)
attr_reader :project, :sha, :user, :parent_pipeline, :variables
- attr_reader :expandset, :execution_deadline
+ attr_reader :expandset, :execution_deadline, :logger
- def initialize(project: nil, sha: nil, user: nil, parent_pipeline: nil, variables: [])
+ delegate :instrument, to: :logger
+
+ def initialize(project: nil, sha: nil, user: nil, parent_pipeline: nil, variables: [], logger: nil)
@project = project
@sha = sha
@user = user
@@ -20,6 +22,7 @@ module Gitlab
@variables = variables
@expandset = Set.new
@execution_deadline = 0
+ @logger = logger || Gitlab::Ci::Pipeline::Logger.new(project: project)
yield self if block_given?
end
@@ -40,6 +43,7 @@ module Gitlab
self.class.new(**attrs) do |ctx|
ctx.expandset = expandset
ctx.execution_deadline = execution_deadline
+ ctx.logger = logger
end
end
@@ -60,7 +64,7 @@ module Gitlab
protected
- attr_writer :expandset, :execution_deadline
+ attr_writer :expandset, :execution_deadline, :logger
private
diff --git a/lib/gitlab/ci/config/external/mapper.rb b/lib/gitlab/ci/config/external/mapper.rb
index 95f1a842c50..7036fddae0b 100644
--- a/lib/gitlab/ci/config/external/mapper.rb
+++ b/lib/gitlab/ci/config/external/mapper.rb
@@ -30,6 +30,18 @@ module Gitlab
def process
return [] if locations.empty?
+ logger.instrument(:config_mapper_process) do
+ process_without_instrumentation
+ end
+ end
+
+ private
+
+ attr_reader :locations, :context
+
+ delegate :expandset, :logger, to: :context
+
+ def process_without_instrumentation
locations
.compact
.map(&method(:normalize_location))
@@ -41,14 +53,14 @@ module Gitlab
.map(&method(:select_first_matching))
end
- private
-
- attr_reader :locations, :context
-
- delegate :expandset, to: :context
+ def normalize_location(location)
+ logger.instrument(:config_mapper_normalize) do
+ normalize_location_without_instrumentation(location)
+ end
+ end
# convert location if String to canonical form
- def normalize_location(location)
+ def normalize_location_without_instrumentation(location)
if location.is_a?(String)
expanded_location = expand_variables(location)
normalize_location_string(expanded_location)
@@ -58,6 +70,12 @@ module Gitlab
end
def verify_rules(location)
+ logger.instrument(:config_mapper_rules) do
+ verify_rules_without_instrumentation(location)
+ end
+ end
+
+ def verify_rules_without_instrumentation(location)
return unless Rules.new(location[:rules]).evaluate(context).pass?
location
@@ -72,6 +90,12 @@ module Gitlab
end
def expand_wildcard_paths(location)
+ logger.instrument(:config_mapper_wildcards) do
+ expand_wildcard_paths_without_instrumentation(location)
+ end
+ end
+
+ def expand_wildcard_paths_without_instrumentation(location)
# We only support local files for wildcard paths
return location unless location[:local] && location[:local].include?('*')
@@ -89,6 +113,12 @@ module Gitlab
end
def verify_duplicates!(location)
+ logger.instrument(:config_mapper_verify) do
+ verify_duplicates_without_instrumentation!(location)
+ end
+ end
+
+ def verify_duplicates_without_instrumentation!(location)
if expandset.count >= MAX_INCLUDES
raise TooManyIncludesError, "Maximum of #{MAX_INCLUDES} nested includes are allowed!"
end
@@ -106,6 +136,12 @@ module Gitlab
end
def select_first_matching(location)
+ logger.instrument(:config_mapper_select) do
+ select_first_matching_without_instrumentation(location)
+ end
+ end
+
+ def select_first_matching_without_instrumentation(location)
matching = FILE_CLASSES.map do |file_class|
file_class.new(location, context)
end.select(&:matching?)
@@ -116,6 +152,12 @@ module Gitlab
end
def expand_variables(data)
+ logger.instrument(:config_mapper_variables) do
+ expand_variables_without_instrumentation(data)
+ end
+ end
+
+ def expand_variables_without_instrumentation(data)
if data.is_a?(String)
expand(data)
else
diff --git a/lib/gitlab/ci/config/external/processor.rb b/lib/gitlab/ci/config/external/processor.rb
index de69a1b1e8f..6a4aee26d80 100644
--- a/lib/gitlab/ci/config/external/processor.rb
+++ b/lib/gitlab/ci/config/external/processor.rb
@@ -7,10 +7,13 @@ module Gitlab
class Processor
IncludeError = Class.new(StandardError)
+ attr_reader :context, :logger
+
def initialize(values, context)
@values = values
@external_files = External::Mapper.new(values, context).process
@content = {}
+ @logger = context.logger
rescue External::Mapper::Error,
OpenSSL::SSL::SSLError => e
raise IncludeError, e.message
@@ -29,13 +32,17 @@ module Gitlab
def validate_external_files!
@external_files.each do |file|
- raise IncludeError, file.error_message unless file.valid?
+ logger.instrument(:config_external_verify) do
+ raise IncludeError, file.error_message unless file.valid?
+ end
end
end
def merge_external_files!
@external_files.each do |file|
- @content.deep_merge!(file.to_hash)
+ logger.instrument(:config_external_merge) do
+ @content.deep_merge!(file.to_hash)
+ end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/base.rb b/lib/gitlab/ci/pipeline/chain/base.rb
index 9b494f3a7ec..28567437719 100644
--- a/lib/gitlab/ci/pipeline/chain/base.rb
+++ b/lib/gitlab/ci/pipeline/chain/base.rb
@@ -7,7 +7,7 @@ module Gitlab
class Base
attr_reader :pipeline, :command, :config
- delegate :project, :current_user, :parent_pipeline, to: :command
+ delegate :project, :current_user, :parent_pipeline, :logger, to: :command
def initialize(pipeline, command)
@pipeline = pipeline
diff --git a/lib/gitlab/ci/pipeline/chain/command.rb b/lib/gitlab/ci/pipeline/chain/command.rb
index beb8801096b..c466b8b36d0 100644
--- a/lib/gitlab/ci/pipeline/chain/command.rb
+++ b/lib/gitlab/ci/pipeline/chain/command.rb
@@ -11,7 +11,7 @@ module Gitlab
:trigger_request, :schedule, :merge_request, :external_pull_request,
:ignore_skip_ci, :save_incompleted,
:seeds_block, :variables_attributes, :push_options,
- :chat_data, :allow_mirror_update, :bridge, :content, :dry_run,
+ :chat_data, :allow_mirror_update, :bridge, :content, :dry_run, :logger,
# These attributes are set by Chains during processing:
:config_content, :yaml_processor_result, :workflow_rules_result, :pipeline_seed
) do
@@ -88,7 +88,14 @@ module Gitlab
@metrics ||= ::Gitlab::Ci::Pipeline::Metrics
end
+ def logger
+ self[:logger] ||= ::Gitlab::Ci::Pipeline::Logger.new(project: project)
+ end
+
def observe_step_duration(step_class, duration)
+ step = step_class.name.underscore.parameterize(separator: '_')
+ logger.observe("pipeline_step_#{step}_duration_s", duration)
+
if Feature.enabled?(:ci_pipeline_creation_step_duration_tracking, type: :ops, default_enabled: :yaml)
metrics.pipeline_creation_step_duration_histogram
.observe({ step: step_class.name }, duration.seconds)
@@ -96,11 +103,15 @@ module Gitlab
end
def observe_creation_duration(duration)
+ logger.observe(:pipeline_creation_duration_s, duration)
+
metrics.pipeline_creation_duration_histogram
.observe({}, duration.seconds)
end
def observe_pipeline_size(pipeline)
+ logger.observe(:pipeline_size_count, pipeline.total_size)
+
metrics.pipeline_size_histogram
.observe({ source: pipeline.source.to_s }, pipeline.total_size)
end
diff --git a/lib/gitlab/ci/pipeline/chain/config/process.rb b/lib/gitlab/ci/pipeline/chain/config/process.rb
index f3c937ddd28..64d1b001e3c 100644
--- a/lib/gitlab/ci/pipeline/chain/config/process.rb
+++ b/lib/gitlab/ci/pipeline/chain/config/process.rb
@@ -11,16 +11,21 @@ module Gitlab
def perform!
raise ArgumentError, 'missing config content' unless @command.config_content
- result = ::Gitlab::Ci::YamlProcessor.new(
- @command.config_content, {
- project: project,
- pipeline: @pipeline,
- sha: @pipeline.sha,
- source: @pipeline.source,
- user: current_user,
- parent_pipeline: parent_pipeline
- }
- ).execute
+ result = logger.instrument(:pipeline_config_process) do
+ processor = ::Gitlab::Ci::YamlProcessor.new(
+ @command.config_content, {
+ project: project,
+ pipeline: @pipeline,
+ sha: @pipeline.sha,
+ source: @pipeline.source,
+ user: current_user,
+ parent_pipeline: parent_pipeline,
+ logger: logger
+ }
+ )
+
+ processor.execute
+ end
add_warnings_to_pipeline(result.warnings)
diff --git a/lib/gitlab/ci/pipeline/chain/create.rb b/lib/gitlab/ci/pipeline/chain/create.rb
index 81ef3bb074d..9dba557eef6 100644
--- a/lib/gitlab/ci/pipeline/chain/create.rb
+++ b/lib/gitlab/ci/pipeline/chain/create.rb
@@ -8,8 +8,10 @@ module Gitlab
include Chain::Helpers
def perform!
- BulkInsertableAssociations.with_bulk_insert do
- pipeline.save!
+ logger.instrument(:pipeline_save) do
+ BulkInsertableAssociations.with_bulk_insert do
+ pipeline.save!
+ end
end
rescue ActiveRecord::RecordInvalid => e
error("Failed to persist the pipeline: #{e}")
diff --git a/lib/gitlab/ci/pipeline/chain/seed.rb b/lib/gitlab/ci/pipeline/chain/seed.rb
index ef7447fa83d..356eeb76908 100644
--- a/lib/gitlab/ci/pipeline/chain/seed.rb
+++ b/lib/gitlab/ci/pipeline/chain/seed.rb
@@ -13,8 +13,10 @@ module Gitlab
raise ArgumentError, 'missing workflow rules result' unless @command.workflow_rules_result
# Allocate next IID. This operation must be outside of transactions of pipeline creations.
- pipeline.ensure_project_iid!
- pipeline.ensure_ci_ref!
+ logger.instrument(:pipeline_allocate_seed_attributes) do
+ pipeline.ensure_project_iid!
+ pipeline.ensure_ci_ref!
+ end
# Protect the pipeline. This is assigned in Populate instead of
# Build to prevent erroring out on ambiguous refs.
@@ -23,8 +25,12 @@ module Gitlab
##
# Gather all runtime build/stage errors
#
- if pipeline_seed.errors
- return error(pipeline_seed.errors.join("\n"), config_error: true)
+ seed_errors = logger.instrument(:pipeline_seed_evaluation) do
+ pipeline_seed.errors
+ end
+
+ if seed_errors
+ return error(seed_errors.join("\n"), config_error: true)
end
@command.pipeline_seed = pipeline_seed
@@ -38,8 +44,11 @@ module Gitlab
def pipeline_seed
strong_memoize(:pipeline_seed) do
- stages_attributes = @command.yaml_processor_result.stages_attributes
- Gitlab::Ci::Pipeline::Seed::Pipeline.new(context, stages_attributes)
+ logger.instrument(:pipeline_seed_initialization) do
+ stages_attributes = @command.yaml_processor_result.stages_attributes
+
+ Gitlab::Ci::Pipeline::Seed::Pipeline.new(context, stages_attributes)
+ end
end
end
@@ -48,9 +57,11 @@ module Gitlab
end
def root_variables
- ::Gitlab::Ci::Variables::Helpers.merge_variables(
- @command.yaml_processor_result.root_variables, @command.workflow_rules_result.variables
- )
+ logger.instrument(:pipeline_seed_merge_variables) do
+ ::Gitlab::Ci::Variables::Helpers.merge_variables(
+ @command.yaml_processor_result.root_variables, @command.workflow_rules_result.variables
+ )
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/sequence.rb b/lib/gitlab/ci/pipeline/chain/sequence.rb
index 845eb6c7a42..de147914850 100644
--- a/lib/gitlab/ci/pipeline/chain/sequence.rb
+++ b/lib/gitlab/ci/pipeline/chain/sequence.rb
@@ -9,30 +9,36 @@ module Gitlab
@pipeline = pipeline
@command = command
@sequence = sequence
- @start = Time.now
+ @start = current_monotonic_time
end
def build!
@sequence.each do |step_class|
- step_start = ::Gitlab::Metrics::System.monotonic_time
+ step_start = current_monotonic_time
step = step_class.new(@pipeline, @command)
step.perform!
@command.observe_step_duration(
step_class,
- ::Gitlab::Metrics::System.monotonic_time - step_start
+ current_monotonic_time - step_start
)
break if step.break?
end
- @command.observe_creation_duration(Time.now - @start)
+ @command.observe_creation_duration(current_monotonic_time - @start)
@command.observe_pipeline_size(@pipeline)
@command.observe_jobs_count_in_alive_pipelines
@pipeline
end
+
+ private
+
+ def current_monotonic_time
+ ::Gitlab::Metrics::System.monotonic_time
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/logger.rb b/lib/gitlab/ci/pipeline/logger.rb
new file mode 100644
index 00000000000..a3e7dda9be5
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/logger.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Ci
+ module Pipeline
+ class Logger
+ include ::Gitlab::Utils::StrongMemoize
+
+ def self.current_monotonic_time
+ ::Gitlab::Metrics::System.monotonic_time
+ end
+
+ def initialize(project:, destination: Gitlab::AppJsonLogger)
+ @started_at = current_monotonic_time
+ @project = project
+ @destination = destination
+ @log_conditions = []
+
+ yield(self) if block_given?
+ end
+
+ def log_when(&block)
+ log_conditions.push(block)
+ end
+
+ def instrument(operation)
+ return yield unless enabled?
+
+ raise ArgumentError, 'block not given' unless block_given?
+
+ op_started_at = current_monotonic_time
+
+ result = yield
+
+ observe("#{operation}_duration_s", current_monotonic_time - op_started_at)
+
+ result
+ end
+
+ def observe(operation, value)
+ return unless enabled?
+
+ observations[operation.to_s].push(value)
+ end
+
+ def commit(pipeline:, caller:)
+ return unless log?
+
+ attributes = {
+ caller: caller,
+ project_id: project.id,
+ pipeline_id: pipeline.id,
+ persisted: pipeline.persisted?,
+ source: pipeline.source,
+ duration_s: age
+ }.stringify_keys.merge(observations_hash)
+
+ destination.info(attributes)
+ end
+
+ def observations_hash
+ observations.transform_values do |values|
+ next if values.empty?
+
+ {
+ 'count' => values.size,
+ 'min' => values.min,
+ 'max' => values.max,
+ 'avg' => values.sum / values.size
+ }
+ end.compact
+ end
+
+ private
+
+ attr_reader :project, :destination, :started_at, :log_conditions
+ delegate :current_monotonic_time, to: :class
+
+ def age
+ current_monotonic_time - started_at
+ end
+
+ def log?
+ return false unless enabled?
+ return true if log_conditions.empty?
+
+ log_conditions.any? { |cond| cond.call(observations) }
+ end
+
+ def enabled?
+ strong_memoize(:enabled) do
+ ::Feature.enabled?(:ci_pipeline_creation_logger, project, type: :ops, default_enabled: :yaml)
+ end
+ end
+
+ def observations
+ @observations ||= Hash.new { |hash, key| hash[key] = [] }
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/yaml_processor.rb b/lib/gitlab/ci/yaml_processor.rb
index 1aa3dbc5e47..296b0cfded2 100644
--- a/lib/gitlab/ci/yaml_processor.rb
+++ b/lib/gitlab/ci/yaml_processor.rb
@@ -29,10 +29,8 @@ module Gitlab
run_logical_validations!
Result.new(ci_config: @ci_config, warnings: @ci_config&.warnings)
-
rescue Gitlab::Ci::Config::ConfigError => e
Result.new(ci_config: @ci_config, errors: [e.message], warnings: @ci_config&.warnings)
-
rescue ValidationError => e
Result.new(ci_config: @ci_config, errors: [e.message], warnings: @ci_config&.warnings)
end
diff --git a/lib/gitlab/quick_actions/issue_actions.rb b/lib/gitlab/quick_actions/issue_actions.rb
index a55ead519e2..5afcb386152 100644
--- a/lib/gitlab/quick_actions/issue_actions.rb
+++ b/lib/gitlab/quick_actions/issue_actions.rb
@@ -206,7 +206,7 @@ module Gitlab
end
desc _('Add Zoom meeting')
- explanation _('Adds a Zoom meeting')
+ explanation _('Adds a Zoom meeting.')
params '<Zoom URL>'
types Issue
condition do
@@ -223,7 +223,7 @@ module Gitlab
end
desc _('Remove Zoom meeting')
- explanation _('Remove Zoom meeting')
+ explanation _('Remove Zoom meeting.')
execution_message _('Zoom meeting removed')
types Issue
condition do
@@ -236,7 +236,7 @@ module Gitlab
end
desc _('Add email participant(s)')
- explanation _('Adds email participant(s)')
+ explanation _('Adds email participant(s).')
params 'email1@example.com email2@example.com (up to 6 emails)'
types Issue
condition do
@@ -285,6 +285,46 @@ module Gitlab
end
end
+ desc _('Add customer relation contacts')
+ explanation _('Add customer relation contact(s).')
+ params 'contact@example.com person@example.org'
+ types Issue
+ condition do
+ current_user.can?(:set_issue_crm_contacts, quick_action_target)
+ end
+ command :add_contacts do |contact_emails|
+ result = ::Issues::SetCrmContactsService
+ .new(project: project, current_user: current_user, params: { add_emails: contact_emails.split(' ') })
+ .execute(quick_action_target)
+
+ @execution_message[:add_contacts] =
+ if result.success?
+ _('One or more contacts were successfully added.')
+ else
+ result.message
+ end
+ end
+
+ desc _('Remove customer relation contacts')
+ explanation _('Remove customer relation contact(s).')
+ params 'contact@example.com person@example.org'
+ types Issue
+ condition do
+ current_user.can?(:set_issue_crm_contacts, quick_action_target)
+ end
+ command :remove_contacts do |contact_emails|
+ result = ::Issues::SetCrmContactsService
+ .new(project: project, current_user: current_user, params: { remove_emails: contact_emails.split(' ') })
+ .execute(quick_action_target)
+
+ @execution_message[:remove_contacts] =
+ if result.success?
+ _('One or more contacts were successfully removed.')
+ else
+ result.message
+ end
+ end
+
private
def zoom_link_service
diff --git a/lib/gitlab/usage_data_counters/known_events/quickactions.yml b/lib/gitlab/usage_data_counters/known_events/quickactions.yml
index dff2c4f8d03..d831ac02dd1 100644
--- a/lib/gitlab/usage_data_counters/known_events/quickactions.yml
+++ b/lib/gitlab/usage_data_counters/known_events/quickactions.yml
@@ -279,3 +279,11 @@
category: quickactions
redis_slot: quickactions
aggregation: weekly
+- name: i_quickactions_add_contacts
+ category: quickactions
+ redis_slot: quickactions
+ aggregation: weekly
+- name: i_quickactions_remove_contacts
+ category: quickactions
+ redis_slot: quickactions
+ aggregation: weekly
diff --git a/lib/tasks/gettext.rake b/lib/tasks/gettext.rake
index e03c78d5a40..17f9414ad52 100644
--- a/lib/tasks/gettext.rake
+++ b/lib/tasks/gettext.rake
@@ -58,6 +58,7 @@ namespace :gettext do
task lint: :environment do
require 'simple_po_parser'
require 'gitlab/utils'
+ require 'parallel'
FastGettext.silence_errors
files = Dir.glob(Rails.root.join('locale/*/gitlab.po'))
@@ -70,7 +71,9 @@ namespace :gettext do
linters.unshift(Gitlab::I18n::PoLinter.new(po_path: pot_file_path))
- failed_linters = linters.select { |linter| linter.errors.any? }
+ failed_linters = Parallel
+ .map(linters, progress: 'Linting po files') { |linter| linter if linter.errors.any? }
+ .compact
if failed_linters.empty?
puts 'All PO files are valid.'
@@ -129,14 +132,6 @@ namespace :gettext do
)
end
- # Disallow HTML from translatable strings
- # See: https://docs.gitlab.com/ee/development/i18n/externalization.html#html
- def html_todolist
- return @html_todolist if defined?(@html_todolist)
-
- @html_todolist = YAML.safe_load(File.read(Rails.root.join('lib/gitlab/i18n/html_todo.yml')))
- end
-
def report_errors_for_file(file, errors_for_file)
puts "Errors in `#{file}`:"
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index e271417bb10..2c73e384755 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -2019,6 +2019,12 @@ msgstr ""
msgid "Add commit messages as comments to Pivotal Tracker stories. %{docs_link}"
msgstr ""
+msgid "Add customer relation contact(s)."
+msgstr ""
+
+msgid "Add customer relation contacts"
+msgstr ""
+
msgid "Add deploy freeze"
msgstr ""
@@ -2220,7 +2226,7 @@ msgstr ""
msgid "Adds %{labels} %{label_text}."
msgstr ""
-msgid "Adds a Zoom meeting"
+msgid "Adds a Zoom meeting."
msgstr ""
msgid "Adds a to do."
@@ -2229,7 +2235,7 @@ msgstr ""
msgid "Adds an issue to an epic."
msgstr ""
-msgid "Adds email participant(s)"
+msgid "Adds email participant(s)."
msgstr ""
msgid "Adjust how frequently the GitLab UI polls for updates."
@@ -24229,6 +24235,12 @@ msgid_plural "%d more items"
msgstr[0] ""
msgstr[1] ""
+msgid "One or more contacts were successfully added."
+msgstr ""
+
+msgid "One or more contacts were successfully removed."
+msgstr ""
+
msgid "One or more groups that you don't have access to."
msgstr ""
@@ -28753,6 +28765,9 @@ msgstr ""
msgid "Remove Zoom meeting"
msgstr ""
+msgid "Remove Zoom meeting."
+msgstr ""
+
msgid "Remove access"
msgstr ""
@@ -28792,6 +28807,12 @@ msgstr ""
msgid "Remove child epic from an epic"
msgstr ""
+msgid "Remove customer relation contact(s)."
+msgstr ""
+
+msgid "Remove customer relation contacts"
+msgstr ""
+
msgid "Remove deploy key"
msgstr ""
@@ -39569,6 +39590,9 @@ msgstr ""
msgid "You can only %{action} files when you are on a branch"
msgstr ""
+msgid "You can only add up to %{max_contacts} contacts at one time"
+msgstr ""
+
msgid "You can only edit files when you are on a branch"
msgstr ""
@@ -39611,6 +39635,9 @@ msgstr ""
msgid "You cannot access the raw file. Please wait a minute."
msgstr ""
+msgid "You cannot combine replace_ids with add_ids or remove_ids"
+msgstr ""
+
msgid "You cannot impersonate a blocked user"
msgstr ""
@@ -39749,6 +39776,9 @@ msgstr ""
msgid "You have insufficient permissions to remove this HTTP integration"
msgstr ""
+msgid "You have insufficient permissions to set customer relations contacts for this issue"
+msgstr ""
+
msgid "You have insufficient permissions to update an on-call schedule for this project"
msgstr ""
diff --git a/package.json b/package.json
index 2f76133a40d..c76c46b4470 100644
--- a/package.json
+++ b/package.json
@@ -63,14 +63,14 @@
"@rails/ujs": "6.1.4-1",
"@sentry/browser": "5.30.0",
"@sourcegraph/code-host-integration": "0.0.60",
- "@tiptap/core": "^2.0.0-beta.138",
+ "@tiptap/core": "^2.0.0-beta.140",
"@tiptap/extension-blockquote": "^2.0.0-beta.24",
"@tiptap/extension-bold": "^2.0.0-beta.24",
"@tiptap/extension-bullet-list": "^2.0.0-beta.23",
"@tiptap/extension-code": "^2.0.0-beta.25",
- "@tiptap/extension-code-block-lowlight": "2.0.0-beta.55",
+ "@tiptap/extension-code-block-lowlight": "2.0.0-beta.57",
"@tiptap/extension-document": "^2.0.0-beta.15",
- "@tiptap/extension-dropcursor": "^2.0.0-beta.24",
+ "@tiptap/extension-dropcursor": "^2.0.0-beta.25",
"@tiptap/extension-gapcursor": "^2.0.0-beta.33",
"@tiptap/extension-hard-break": "^2.0.0-beta.30",
"@tiptap/extension-heading": "^2.0.0-beta.23",
@@ -85,14 +85,14 @@
"@tiptap/extension-strike": "^2.0.0-beta.26",
"@tiptap/extension-subscript": "^2.0.0-beta.9",
"@tiptap/extension-superscript": "^2.0.0-beta.9",
- "@tiptap/extension-table": "^2.0.0-beta.42",
+ "@tiptap/extension-table": "^2.0.0-beta.43",
"@tiptap/extension-table-cell": "^2.0.0-beta.20",
"@tiptap/extension-table-header": "^2.0.0-beta.22",
"@tiptap/extension-table-row": "^2.0.0-beta.19",
"@tiptap/extension-task-item": "^2.0.0-beta.28",
"@tiptap/extension-task-list": "^2.0.0-beta.23",
"@tiptap/extension-text": "^2.0.0-beta.15",
- "@tiptap/vue-2": "^2.0.0-beta.68",
+ "@tiptap/vue-2": "^2.0.0-beta.69",
"@toast-ui/editor": "^2.5.2",
"@toast-ui/vue-editor": "^2.5.2",
"apollo-cache-inmemory": "^1.6.6",
@@ -165,7 +165,7 @@
"prosemirror-model": "^1.15.0",
"prosemirror-state": "^1.3.4",
"prosemirror-tables": "^1.1.1",
- "prosemirror-view": "^1.23.1",
+ "prosemirror-view": "^1.23.2",
"raphael": "^2.2.7",
"raw-loader": "^4.0.2",
"scrollparent": "^2.0.1",
diff --git a/spec/factories/customer_relations/contacts.rb b/spec/factories/customer_relations/contacts.rb
index 437f8feea48..821c45d7514 100644
--- a/spec/factories/customer_relations/contacts.rb
+++ b/spec/factories/customer_relations/contacts.rb
@@ -6,6 +6,7 @@ FactoryBot.define do
first_name { generate(:name) }
last_name { generate(:name) }
+ email { generate(:email) }
trait :with_organization do
organization
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index a50ef34d327..76e9b999638 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe "Admin::Projects" do
include Spec::Support::Helpers::Features::MembersHelpers
include Spec::Support::Helpers::Features::InviteMembersModalHelper
include Select2Helper
+ include Spec::Support::Helpers::ModalHelpers
let(:user) { create :user }
let(:project) { create(:project) }
@@ -145,7 +146,7 @@ RSpec.describe "Admin::Projects" do
click_button 'Leave'
end
- page.within('[role="dialog"]') do
+ within_modal do
click_button('Leave')
end
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index 7e2751daefa..091a13c5aa2 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe "Admin Runners" do
include StubENV
+ include Spec::Support::Helpers::ModalHelpers
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
@@ -353,7 +354,7 @@ RSpec.describe "Admin Runners" do
end
it 'dismisses runner installation modal' do
- page.within('[role="dialog"]') do
+ within_modal do
click_button('Close', match: :first)
end
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
index 73477fb93dd..ae940fecabe 100644
--- a/spec/features/admin/users/user_spec.rb
+++ b/spec/features/admin/users/user_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Admin::Users::User' do
include Spec::Support::Helpers::Features::AdminUsersHelpers
+ include Spec::Support::Helpers::ModalHelpers
let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
let_it_be(:current_user) { create(:admin) }
@@ -113,7 +114,7 @@ RSpec.describe 'Admin::Users::User' do
click_action_in_user_dropdown(user_sole_owner_of_group.id, 'Delete user and contributions')
- page.within('[role="dialog"]') do
+ within_modal do
fill_in('username', with: user_sole_owner_of_group.name)
click_button('Delete user and contributions')
end
@@ -426,7 +427,7 @@ RSpec.describe 'Admin::Users::User' do
click_button 'Confirm user'
- page.within('[role="dialog"]') do
+ within_modal do
expect(page).to have_content("Confirm user #{unconfirmed_user.name}?")
expect(page).to have_content('This user has an unconfirmed email address. You may force a confirmation.')
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index d822a5ea871..2beecda23b5 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
include Select2Helper
include Spec::Support::Helpers::Features::MembersHelpers
include Spec::Support::Helpers::Features::InviteMembersModalHelper
+ include Spec::Support::Helpers::ModalHelpers
let_it_be(:user) { create(:user) }
@@ -92,7 +93,7 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
click_button 'Remove group'
end
- page.within('[role="dialog"]') do
+ within_modal do
click_button('Remove group')
end
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index 38e829bafcc..1cae7cdeb16 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Groups > Members > Manage members' do
include Spec::Support::Helpers::Features::MembersHelpers
include Spec::Support::Helpers::Features::InviteMembersModalHelper
+ include Spec::Support::Helpers::ModalHelpers
let_it_be(:user1) { create(:user, name: 'John Doe') }
let_it_be(:user2) { create(:user, name: 'Mary Jane') }
@@ -170,7 +171,7 @@ RSpec.describe 'Groups > Members > Manage members' do
click_button 'Remove member'
end
- page.within('[role="dialog"]') do
+ within_modal do
expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
click_button('Remove member')
end
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 79e46e69157..9e314e18563 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Merge request > User posts diff notes', :js do
include MergeRequestDiffHelpers
+ include Spec::Support::Helpers::ModalHelpers
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.source_project }
@@ -238,10 +239,8 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
def should_allow_dismissing_a_comment(line_holder, diff_side = nil)
write_comment_on_line(line_holder, diff_side)
- find('.js-close-discussion-note-form').click
-
- page.within('.modal') do
- click_button 'OK'
+ accept_gl_confirm(s_('Notes|Are you sure you want to cancel creating this comment?')) do
+ find('.js-close-discussion-note-form').click
end
assert_comment_dismissal(line_holder)
diff --git a/spec/features/merge_request/user_squashes_merge_request_spec.rb b/spec/features/merge_request/user_squashes_merge_request_spec.rb
index 15f59c0d7bc..2a48657ac4f 100644
--- a/spec/features/merge_request/user_squashes_merge_request_spec.rb
+++ b/spec/features/merge_request/user_squashes_merge_request_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'User squashes a merge request', :js do
committer_name: user.name)
merge_commit = an_object_having_attributes(sha: a_string_matching(/\h{40}/),
- message: a_string_starting_with("Merge branch 'csv' into 'master'"),
+ message: a_string_starting_with("Merge branch '#{source_branch}' into 'master'"),
author_name: user.name,
committer_name: user.name)
@@ -57,34 +57,34 @@ RSpec.describe 'User squashes a merge request', :js do
end
context 'when the MR has only one commit' do
- let(:source_branch) { 'master' }
- let(:target_branch) { 'branch-merged' }
- let(:protected_source_branch) { true }
+ let(:source_branch) { 'feature' }
+ let(:target_branch) { 'master' }
let(:source_sha) { project.commit(source_branch).sha }
let(:target_sha) { project.commit(target_branch).sha }
before do
- merge_request = create(:merge_request, source_project: project, target_project: project, source_branch: source_branch, target_branch: target_branch, squash: true)
-
- visit project_merge_request_path(project, merge_request)
+ visit project_new_merge_request_path(project, merge_request: { target_branch: target_branch, source_branch: source_branch })
+ check 'merge_request[squash]'
+ click_on 'Create merge request'
+ wait_for_requests
end
- it 'accepts the merge request without issuing a squash request', :sidekiq_inline do
- expect_next_instance_of(Gitlab::GitalyClient::OperationService) do |instance|
- expect(instance).not_to receive(:user_squash)
+ context 'when squash message differs from existing commit message' do
+ before do
+ accept_mr
end
- expect(project.repository.ancestor?(source_branch, target_branch)).to be_falsey
- expect(page).not_to have_field('squash')
-
- accept_mr
-
- expect(page).to have_content('Merged')
+ include_examples 'squash'
+ end
- latest_target_commits = project.repository.commits_between(source_sha, target_sha).map(&:raw)
+ context 'when squash message is the same as existing commit message' do
+ before do
+ click_button("Modify commit messages")
+ fill_in('Squash commit message', with: project.commit(source_branch).safe_message)
+ accept_mr
+ end
- expect(latest_target_commits.count).to eq(1)
- expect(project.repository.ancestor?(source_branch, target_branch)).to be_truthy
+ include_examples 'no squash'
end
end
diff --git a/spec/features/profiles/two_factor_auths_spec.rb b/spec/features/profiles/two_factor_auths_spec.rb
index a9256a73d7b..9a58950b8f3 100644
--- a/spec/features/profiles/two_factor_auths_spec.rb
+++ b/spec/features/profiles/two_factor_auths_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Two factor auths' do
+ include Spec::Support::Helpers::ModalHelpers
+
context 'when signed in' do
before do
sign_in(user)
@@ -70,7 +72,7 @@ RSpec.describe 'Two factor auths' do
click_button 'Disable two-factor authentication'
- page.within('[role="dialog"]') do
+ within_modal do
click_button 'Disable'
end
@@ -80,7 +82,7 @@ RSpec.describe 'Two factor auths' do
click_button 'Disable two-factor authentication'
- page.within('[role="dialog"]') do
+ within_modal do
click_button 'Disable'
end
@@ -112,7 +114,7 @@ RSpec.describe 'Two factor auths' do
click_button 'Disable two-factor authentication'
- page.within('[role="dialog"]') do
+ within_modal do
click_button 'Disable'
end
diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb
index eb32570448b..6adc3503492 100644
--- a/spec/features/projects/members/groups_with_access_list_spec.rb
+++ b/spec/features/projects/members/groups_with_access_list_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Projects > Members > Groups with access list', :js do
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::ModalHelpers
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :public) }
@@ -70,7 +71,7 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do
click_button 'Remove group'
end
- page.within('[role="dialog"]') do
+ within_modal do
click_button('Remove group')
end
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index 25598146604..d0ef7aa3964 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Project members list', :js do
include Spec::Support::Helpers::Features::MembersHelpers
include Spec::Support::Helpers::Features::InviteMembersModalHelper
+ include Spec::Support::Helpers::ModalHelpers
let_it_be(:user1) { create(:user, name: 'John Doe') }
let_it_be(:user2) { create(:user, name: 'Mary Jane') }
@@ -93,7 +94,7 @@ RSpec.describe 'Project members list', :js do
click_button 'Remove member'
end
- page.within('[role="dialog"]') do
+ within_modal do
expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
click_button('Remove member')
end
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb
index 02a634a0fcc..31dc939e6b8 100644
--- a/spec/features/projects/settings/user_manages_project_members_spec.rb
+++ b/spec/features/projects/settings/user_manages_project_members_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Projects > Settings > User manages project members' do
include Spec::Support::Helpers::Features::MembersHelpers
include Select2Helper
+ include Spec::Support::Helpers::ModalHelpers
let(:group) { create(:group, name: 'OpenSource') }
let(:project) { create(:project) }
@@ -26,7 +27,7 @@ RSpec.describe 'Projects > Settings > User manages project members' do
click_button 'Remove member'
end
- page.within('[role="dialog"]') do
+ within_modal do
expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
click_button('Remove member')
end
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 8fcad99f8a7..25764269677 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -7,7 +7,6 @@ import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/auth
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
-import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue';
export const boardObj = {
id: 1,
@@ -21,7 +20,6 @@ export const listObj = {
position: 0,
title: 'Test',
list_type: 'label',
- weight: 3,
label: {
id: 5000,
title: 'Test',
@@ -154,7 +152,6 @@ export const rawIssue = {
iid: '27',
dueDate: null,
timeEstimate: 0,
- weight: null,
confidential: false,
referencePath: 'gitlab-org/test-subgroup/gitlab-test#27',
path: '/gitlab-org/test-subgroup/gitlab-test/-/issues/27',
@@ -184,7 +181,6 @@ export const mockIssue = {
title: 'Issue 1',
dueDate: null,
timeEstimate: 0,
- weight: null,
confidential: false,
referencePath: `${mockIssueFullPath}#27`,
path: `/${mockIssueFullPath}/-/issues/27`,
@@ -216,7 +212,6 @@ export const mockIssue2 = {
title: 'Issue 2',
dueDate: null,
timeEstimate: 0,
- weight: null,
confidential: false,
referencePath: 'gitlab-org/test-subgroup/gitlab-test#28',
path: '/gitlab-org/test-subgroup/gitlab-test/-/issues/28',
@@ -234,7 +229,6 @@ export const mockIssue3 = {
referencePath: '#29',
dueDate: null,
timeEstimate: 0,
- weight: null,
confidential: false,
path: '/gitlab-org/gitlab-test/-/issues/28',
assignees,
@@ -249,7 +243,6 @@ export const mockIssue4 = {
referencePath: '#30',
dueDate: null,
timeEstimate: 0,
- weight: null,
confidential: false,
path: '/gitlab-org/gitlab-test/-/issues/28',
assignees,
@@ -622,13 +615,6 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, hasEmoji)
{ icon: 'issue-type-incident', value: 'INCIDENT', title: 'Incident' },
],
},
- {
- icon: 'weight',
- title: __('Weight'),
- type: 'weight',
- token: WeightToken,
- unique: true,
- },
];
export const mockLabel1 = {
diff --git a/spec/frontend/issues_list/components/issue_card_time_info_spec.js b/spec/frontend/issues_list/components/issue_card_time_info_spec.js
index d195c159cbb..7c5faeb8dc1 100644
--- a/spec/frontend/issues_list/components/issue_card_time_info_spec.js
+++ b/spec/frontend/issues_list/components/issue_card_time_info_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { useFakeDate } from 'helpers/fake_date';
import IssueCardTimeInfo from '~/issues_list/components/issue_card_time_info.vue';
-describe('IssuesListApp component', () => {
+describe('CE IssueCardTimeInfo component', () => {
useFakeDate(2020, 11, 11);
let wrapper;
diff --git a/spec/frontend/issues_list/components/issues_list_app_spec.js b/spec/frontend/issues_list/components/issues_list_app_spec.js
index c398acf081b..8c068fa1f8c 100644
--- a/spec/frontend/issues_list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues_list/components/issues_list_app_spec.js
@@ -1,8 +1,8 @@
import { GlButton, GlEmptyState, GlLink } from '@gitlab/ui';
-import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
+import { mount, shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import { cloneDeep } from 'lodash';
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import getIssuesQuery from 'ee_else_ce/issues_list/queries/get_issues.query.graphql';
import getIssuesCountsQuery from 'ee_else_ce/issues_list/queries/get_issues_counts.query.graphql';
@@ -34,14 +34,11 @@ import {
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_AUTHOR,
TOKEN_TYPE_CONFIDENTIAL,
- TOKEN_TYPE_EPIC,
- TOKEN_TYPE_ITERATION,
TOKEN_TYPE_LABEL,
TOKEN_TYPE_MILESTONE,
TOKEN_TYPE_MY_REACTION,
TOKEN_TYPE_RELEASE,
TOKEN_TYPE_TYPE,
- TOKEN_TYPE_WEIGHT,
urlSortParams,
} from '~/issues_list/constants';
import eventHub from '~/issues_list/eventhub';
@@ -55,12 +52,11 @@ jest.mock('~/lib/utils/scroll_utils', () => ({
scrollUp: jest.fn().mockName('scrollUpMock'),
}));
-describe('IssuesListApp component', () => {
+describe('CE IssuesListApp component', () => {
let axiosMock;
let wrapper;
- const localVue = createLocalVue();
- localVue.use(VueApollo);
+ Vue.use(VueApollo);
const defaultProvide = {
calendarPath: 'calendar/path',
@@ -71,6 +67,7 @@ describe('IssuesListApp component', () => {
hasAnyIssues: true,
hasAnyProjects: true,
hasBlockedIssuesFeature: true,
+ hasIssuableHealthStatusFeature: true,
hasIssueWeightsFeature: true,
hasIterationsFeature: true,
isProject: true,
@@ -113,7 +110,6 @@ describe('IssuesListApp component', () => {
const apolloProvider = createMockApollo(requestHandlers);
return mountFn(IssuesListApp, {
- localVue,
apolloProvider,
provide: {
...defaultProvide,
@@ -526,54 +522,6 @@ describe('IssuesListApp component', () => {
});
});
- describe('when iterations are not available', () => {
- beforeEach(() => {
- wrapper = mountComponent({
- provide: {
- projectIterationsPath: '',
- },
- });
- });
-
- it('does not render Iteration token', () => {
- expect(findIssuableList().props('searchTokens')).not.toMatchObject([
- { type: TOKEN_TYPE_ITERATION },
- ]);
- });
- });
-
- describe('when epics are not available', () => {
- beforeEach(() => {
- wrapper = mountComponent({
- provide: {
- groupPath: '',
- },
- });
- });
-
- it('does not render Epic token', () => {
- expect(findIssuableList().props('searchTokens')).not.toMatchObject([
- { type: TOKEN_TYPE_EPIC },
- ]);
- });
- });
-
- describe('when weights are not available', () => {
- beforeEach(() => {
- wrapper = mountComponent({
- provide: {
- groupPath: '',
- },
- });
- });
-
- it('does not render Weight token', () => {
- expect(findIssuableList().props('searchTokens')).not.toMatchObject([
- { type: TOKEN_TYPE_WEIGHT },
- ]);
- });
- });
-
describe('when all tokens are available', () => {
const originalGon = window.gon;
@@ -586,14 +534,11 @@ describe('IssuesListApp component', () => {
current_user_avatar_url: mockCurrentUser.avatar_url,
};
- wrapper = mountComponent({
- provide: {
- isSignedIn: true,
- projectIterationsPath: 'project/iterations/path',
- groupPath: 'group/path',
- hasIssueWeightsFeature: true,
- },
- });
+ wrapper = mountComponent({ provide: { isSignedIn: true } });
+ });
+
+ afterEach(() => {
+ window.gon = originalGon;
});
it('renders all tokens', () => {
@@ -610,9 +555,6 @@ describe('IssuesListApp component', () => {
{ type: TOKEN_TYPE_RELEASE },
{ type: TOKEN_TYPE_MY_REACTION },
{ type: TOKEN_TYPE_CONFIDENTIAL },
- { type: TOKEN_TYPE_ITERATION },
- { type: TOKEN_TYPE_EPIC },
- { type: TOKEN_TYPE_WEIGHT },
]);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 016b6b2220b..a9cf085f84c 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -503,10 +503,10 @@ describe('ReadyToMerge', () => {
expect(findCheckboxElement().exists()).toBeFalsy();
});
- it('should not be rendered when there is only 1 commit', () => {
+ it('should be rendered when there is only 1 commit', () => {
createComponent({ mr: { commitsCount: 1, enableSquashBeforeMerge: true } });
- expect(findCheckboxElement().exists()).toBeFalsy();
+ expect(findCheckboxElement().exists()).toBe(true);
});
describe('squash options', () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index 238c5d16db5..e3e2ef5610d 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -5,12 +5,9 @@ import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/co
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import BranchToken from '~/vue_shared/components/filtered_search_bar/tokens/branch_token.vue';
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
-import EpicToken from '~/vue_shared/components/filtered_search_bar/tokens/epic_token.vue';
-import IterationToken from '~/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
import ReleaseToken from '~/vue_shared/components/filtered_search_bar/tokens/release_token.vue';
-import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue';
export const mockAuthor1 = {
id: 1,
@@ -65,11 +62,6 @@ export const mockMilestones = [
mockEscapedMilestone,
];
-export const mockEpics = [
- { iid: 1, id: 1, title: 'Foo', group_full_path: 'gitlab-org' },
- { iid: 2, id: 2, title: 'Bar', group_full_path: 'gitlab-org/design' },
-];
-
export const mockEmoji1 = {
name: 'thumbsup',
};
@@ -102,27 +94,6 @@ export const mockAuthorToken = {
fetchAuthors: Api.projectUsers.bind(Api),
};
-export const mockIterationToken = {
- type: 'iteration',
- icon: 'iteration',
- title: 'Iteration',
- unique: true,
- token: IterationToken,
- fetchIterations: () => Promise.resolve(),
-};
-
-export const mockIterations = [
- {
- id: 1,
- title: 'Iteration 1',
- startDate: '2021-11-05',
- dueDate: '2021-11-10',
- iterationCadence: {
- title: 'Cadence 1',
- },
- },
-];
-
export const mockLabelToken = {
type: 'label_name',
icon: 'labels',
@@ -153,73 +124,6 @@ export const mockReleaseToken = {
fetchReleases: () => Promise.resolve(),
};
-export const mockEpicToken = {
- type: 'epic_iid',
- icon: 'clock',
- title: 'Epic',
- unique: true,
- symbol: '&',
- token: EpicToken,
- operators: OPERATOR_IS_ONLY,
- idProperty: 'iid',
- fullPath: 'gitlab-org',
-};
-
-export const mockEpicNode1 = {
- __typename: 'Epic',
- parent: null,
- id: 'gid://gitlab/Epic/40',
- iid: '2',
- title: 'Marketing epic',
- description: 'Mock epic description',
- state: 'opened',
- startDate: '2017-12-25',
- dueDate: '2018-02-15',
- webUrl: 'http://gdk.test:3000/groups/gitlab-org/marketing/-/epics/1',
- hasChildren: false,
- hasParent: false,
- confidential: false,
-};
-
-export const mockEpicNode2 = {
- __typename: 'Epic',
- parent: null,
- id: 'gid://gitlab/Epic/41',
- iid: '3',
- title: 'Another marketing',
- startDate: '2017-12-26',
- dueDate: '2018-03-10',
- state: 'opened',
- webUrl: 'http://gdk.test:3000/groups/gitlab-org/marketing/-/epics/2',
-};
-
-export const mockGroupEpicsQueryResponse = {
- data: {
- group: {
- id: 'gid://gitlab/Group/1',
- name: 'Gitlab Org',
- epics: {
- edges: [
- {
- node: {
- ...mockEpicNode1,
- },
- __typename: 'EpicEdge',
- },
- {
- node: {
- ...mockEpicNode2,
- },
- __typename: 'EpicEdge',
- },
- ],
- __typename: 'EpicConnection',
- },
- __typename: 'Group',
- },
- },
-};
-
export const mockReactionEmojiToken = {
type: 'my_reaction_emoji',
icon: 'thumb-up',
@@ -243,14 +147,6 @@ export const mockMembershipToken = {
],
};
-export const mockWeightToken = {
- type: 'weight',
- icon: 'weight',
- title: 'Weight',
- unique: true,
- token: WeightToken,
-};
-
export const mockMembershipTokenOptionsWithoutTitles = {
...mockMembershipToken,
options: [{ value: 'exclude' }, { value: 'only' }],
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
deleted file mode 100644
index 6ee5d50d396..00000000000
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
+++ /dev/null
@@ -1,169 +0,0 @@
-import { GlFilteredSearchTokenSegment } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
-import axios from '~/lib/utils/axios_utils';
-
-import searchEpicsQuery from '~/vue_shared/components/filtered_search_bar/queries/search_epics.query.graphql';
-import EpicToken from '~/vue_shared/components/filtered_search_bar/tokens/epic_token.vue';
-import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
-
-import { mockEpicToken, mockEpics, mockGroupEpicsQueryResponse } from '../mock_data';
-
-jest.mock('~/flash');
-Vue.use(VueApollo);
-
-const defaultStubs = {
- Portal: true,
- GlFilteredSearchSuggestionList: {
- template: '<div></div>',
- methods: {
- getValue: () => '=',
- },
- },
-};
-
-describe('EpicToken', () => {
- let mock;
- let wrapper;
- let fakeApollo;
-
- const findBaseToken = () => wrapper.findComponent(BaseToken);
-
- function createComponent(
- options = {},
- epicsQueryHandler = jest.fn().mockResolvedValue(mockGroupEpicsQueryResponse),
- ) {
- fakeApollo = createMockApollo([[searchEpicsQuery, epicsQueryHandler]]);
- const {
- config = mockEpicToken,
- value = { data: '' },
- active = false,
- stubs = defaultStubs,
- } = options;
- return mount(EpicToken, {
- apolloProvider: fakeApollo,
- propsData: {
- config,
- value,
- active,
- },
- provide: {
- portalName: 'fake target',
- alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: 'custom-class',
- },
- stubs,
- });
- }
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- wrapper = createComponent();
- });
-
- afterEach(() => {
- mock.restore();
- wrapper.destroy();
- });
-
- describe('computed', () => {
- beforeEach(async () => {
- wrapper = createComponent({
- data: {
- epics: mockEpics,
- },
- });
-
- await wrapper.vm.$nextTick();
- });
- });
-
- describe('methods', () => {
- describe('fetchEpicsBySearchTerm', () => {
- it('calls fetchEpics with provided searchTerm param', () => {
- jest.spyOn(wrapper.vm, 'fetchEpics');
-
- findBaseToken().vm.$emit('fetch-suggestions', 'foo');
-
- expect(wrapper.vm.fetchEpics).toHaveBeenCalledWith('foo');
- });
-
- it('sets response to `epics` when request is successful', async () => {
- jest.spyOn(wrapper.vm, 'fetchEpics').mockResolvedValue({
- data: mockEpics,
- });
-
- findBaseToken().vm.$emit('fetch-suggestions');
-
- await waitForPromises();
-
- expect(wrapper.vm.epics).toEqual(mockEpics);
- });
-
- it('calls `createFlash` with flash error message when request fails', async () => {
- jest.spyOn(wrapper.vm, 'fetchEpics').mockRejectedValue({});
-
- findBaseToken().vm.$emit('fetch-suggestions', 'foo');
-
- await waitForPromises();
-
- expect(createFlash).toHaveBeenCalledWith({
- message: 'There was a problem fetching epics.',
- });
- });
-
- it('sets `loading` to false when request completes', async () => {
- jest.spyOn(wrapper.vm, 'fetchEpics').mockRejectedValue({});
-
- findBaseToken().vm.$emit('fetch-suggestions', 'foo');
-
- await waitForPromises();
-
- expect(wrapper.vm.loading).toBe(false);
- });
- });
- });
-
- describe('template', () => {
- const getTokenValueEl = () => wrapper.findAllComponents(GlFilteredSearchTokenSegment).at(2);
-
- beforeEach(async () => {
- wrapper = createComponent({
- value: { data: `${mockEpics[0].title}::&${mockEpics[0].iid}` },
- data: { epics: mockEpics },
- });
-
- await wrapper.vm.$nextTick();
- });
-
- it('renders BaseToken component', () => {
- expect(findBaseToken().exists()).toBe(true);
- });
-
- it('renders token item when value is selected', () => {
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
-
- expect(tokenSegments).toHaveLength(3);
- expect(tokenSegments.at(2).text()).toBe(`${mockEpics[0].title}::&${mockEpics[0].iid}`);
- });
-
- it.each`
- value | valueType | tokenValueString
- ${`${mockEpics[0].title}::&${mockEpics[0].iid}`} | ${'string'} | ${`${mockEpics[0].title}::&${mockEpics[0].iid}`}
- ${`${mockEpics[1].title}::&${mockEpics[1].iid}`} | ${'number'} | ${`${mockEpics[1].title}::&${mockEpics[1].iid}`}
- `('renders token item when selection is a $valueType', async ({ value, tokenValueString }) => {
- wrapper.setProps({
- value: { data: value },
- });
-
- await wrapper.vm.$nextTick();
-
- expect(getTokenValueEl().text()).toBe(tokenValueString);
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
deleted file mode 100644
index 44bc16adb97..00000000000
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
+++ /dev/null
@@ -1,116 +0,0 @@
-import {
- GlFilteredSearchToken,
- GlFilteredSearchTokenSegment,
- GlFilteredSearchSuggestion,
-} from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
-import IterationToken from '~/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue';
-import { mockIterationToken, mockIterations } from '../mock_data';
-
-jest.mock('~/flash');
-
-describe('IterationToken', () => {
- const id = 123;
- let wrapper;
-
- const createComponent = ({
- config = mockIterationToken,
- value = { data: '' },
- active = false,
- stubs = {},
- provide = {},
- } = {}) =>
- mount(IterationToken, {
- propsData: {
- active,
- config,
- value,
- },
- provide: {
- portalName: 'fake target',
- alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: () => 'custom-class',
- ...provide,
- },
- stubs,
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('when iteration cadence feature is available', () => {
- beforeEach(async () => {
- wrapper = createComponent({
- active: true,
- config: { ...mockIterationToken, initialIterations: mockIterations },
- value: { data: 'i' },
- stubs: { Portal: true },
- provide: {
- glFeatures: {
- iterationCadences: true,
- },
- },
- });
-
- await wrapper.setData({ loading: false });
- });
-
- it('renders iteration start date and due date', () => {
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
-
- expect(suggestions.at(3).text()).toContain('Nov 5, 2021 - Nov 10, 2021');
- });
- });
-
- it('renders iteration value', async () => {
- wrapper = createComponent({ value: { data: id } });
-
- await wrapper.vm.$nextTick();
-
- const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
-
- expect(tokenSegments).toHaveLength(3); // `Iteration` `=` `gitlab-org: #1`
- expect(tokenSegments.at(2).text()).toBe(id.toString());
- });
-
- it('fetches initial values', () => {
- const fetchIterationsSpy = jest.fn().mockResolvedValue();
-
- wrapper = createComponent({
- config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy },
- value: { data: id },
- });
-
- expect(fetchIterationsSpy).toHaveBeenCalledWith(id);
- });
-
- it('fetches iterations on user input', () => {
- const search = 'hello';
- const fetchIterationsSpy = jest.fn().mockResolvedValue();
-
- wrapper = createComponent({
- config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy },
- });
-
- wrapper.findComponent(GlFilteredSearchToken).vm.$emit('input', { data: search });
-
- expect(fetchIterationsSpy).toHaveBeenCalledWith(search);
- });
-
- it('renders error message when request fails', async () => {
- const fetchIterationsSpy = jest.fn().mockRejectedValue();
-
- wrapper = createComponent({
- config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy },
- });
-
- await waitForPromises();
-
- expect(createFlash).toHaveBeenCalledWith({
- message: 'There was a problem fetching iterations.',
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
deleted file mode 100644
index 4277899f8db..00000000000
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
+++ /dev/null
@@ -1,38 +0,0 @@
-import { GlFilteredSearchTokenSegment } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue';
-import { mockWeightToken } from '../mock_data';
-
-jest.mock('~/flash');
-
-describe('WeightToken', () => {
- const weight = '3';
- let wrapper;
-
- const createComponent = ({ config = mockWeightToken, value = { data: '' } } = {}) =>
- mount(WeightToken, {
- propsData: {
- active: false,
- config,
- value,
- },
- provide: {
- portalName: 'fake target',
- alignSuggestions: function fakeAlignSuggestions() {},
- suggestionsListClass: () => 'custom-class',
- },
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders weight value', () => {
- wrapper = createComponent({ value: { data: weight } });
-
- const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
-
- expect(tokenSegments).toHaveLength(3); // `Weight` `=` `3`
- expect(tokenSegments.at(2).text()).toBe(weight);
- });
-});
diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb
index 4b9adf7e87b..39cb32d90b7 100644
--- a/spec/lib/gitlab/ci/config/external/context_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/context_spec.rb
@@ -94,6 +94,15 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
end
describe '#mutate' do
+ let(:attributes) do
+ {
+ project: project,
+ user: user,
+ sha: sha,
+ logger: double('logger')
+ }
+ end
+
shared_examples 'a mutated context' do
let(:mutated) { subject.mutate(new_attributes) }
@@ -107,6 +116,7 @@ RSpec.describe Gitlab::Ci::Config::External::Context do
it { expect(mutated).to have_attributes(new_attributes) }
it { expect(mutated.expandset).to eq(subject.expandset) }
it { expect(mutated.execution_deadline).to eq(mutated.execution_deadline) }
+ it { expect(mutated.logger).to eq(mutated.logger) }
end
context 'with attributes' do
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 2e9e6f95071..97bd74721f2 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
include StubRequests
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:another_project) { create(:project, :repository) }
+ let_it_be_with_reload(:another_project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:sha) { '12345' }
@@ -251,6 +251,17 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
it 'properly expands all includes' do
is_expected.to include(:my_build, :remote_build, :rspec)
end
+
+ it 'propagates the pipeline logger' do
+ processor.perform
+
+ process_obs_count = processor
+ .logger
+ .observations_hash
+ .dig('config_mapper_process_duration_s', 'count')
+
+ expect(process_obs_count).to eq(3)
+ end
end
context 'when user is reporter of another project' do
diff --git a/spec/lib/gitlab/ci/pipeline/logger_spec.rb b/spec/lib/gitlab/ci/pipeline/logger_spec.rb
new file mode 100644
index 00000000000..1a6f449c255
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/logger_spec.rb
@@ -0,0 +1,131 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
+ let_it_be(:project) { build_stubbed(:project) }
+ let_it_be(:pipeline) { build_stubbed(:ci_pipeline, project: project) }
+
+ subject(:logger) { described_class.new(project: project) }
+
+ describe '#log_when' do
+ it 'stores blocks for later evaluation' do
+ logger.log_when { |obs| true }
+
+ expect(logger.send(:log_conditions).first).to be_a(Proc)
+ end
+ end
+
+ describe '#instrument' do
+ it "returns the block's value" do
+ expect(logger.instrument(:expensive_operation) { 123 }).to eq(123)
+ end
+
+ it 'records durations of instrumented operations' do
+ loggable_data = {
+ 'expensive_operation_duration_s' => {
+ 'count' => 1,
+ 'avg' => a_kind_of(Numeric),
+ 'max' => a_kind_of(Numeric),
+ 'min' => a_kind_of(Numeric)
+ }
+ }
+
+ logger.instrument(:expensive_operation) { 123 }
+ expect(logger.observations_hash).to match(a_hash_including(loggable_data))
+ end
+
+ it 'raises an error when block is not provided' do
+ expect { logger.instrument(:expensive_operation) }
+ .to raise_error(ArgumentError, 'block not given')
+ end
+ end
+
+ describe '#observe' do
+ it 'records durations of observed operations' do
+ loggable_data = {
+ 'pipeline_creation_duration_s' => {
+ 'avg' => 30, 'count' => 1, 'max' => 30, 'min' => 30
+ }
+ }
+
+ expect(logger.observe(:pipeline_creation_duration_s, 30)).to be_truthy
+ expect(logger.observations_hash).to match(a_hash_including(loggable_data))
+ end
+ end
+
+ describe '#commit' do
+ subject(:commit) { logger.commit(pipeline: pipeline, caller: 'source') }
+
+ before do
+ stub_feature_flags(ci_pipeline_creation_logger: flag)
+ allow(logger).to receive(:current_monotonic_time) { Time.current.to_i }
+
+ logger.instrument(:pipeline_save) { travel(60.seconds) }
+ logger.observe(:pipeline_creation_duration_s, 30)
+ logger.observe(:pipeline_creation_duration_s, 10)
+ end
+
+ context 'when the feature flag is enabled' do
+ let(:flag) { true }
+
+ let(:loggable_data) do
+ {
+ 'pipeline_id' => pipeline.id,
+ 'persisted' => true,
+ 'project_id' => project.id,
+ 'duration_s' => a_kind_of(Numeric),
+ 'caller' => 'source',
+ 'source' => pipeline.source,
+ 'pipeline_save_duration_s' => {
+ 'avg' => 60, 'count' => 1, 'max' => 60, 'min' => 60
+ },
+ 'pipeline_creation_duration_s' => {
+ 'avg' => 20, 'count' => 2, 'max' => 30, 'min' => 10
+ }
+ }
+ end
+
+ it 'logs to application.json' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect(commit).to be_truthy
+ end
+
+ context 'with log conditions' do
+ it 'does not log when the conditions are false' do
+ logger.log_when { |_obs| false }
+
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+
+ expect(commit).to be_falsey
+ end
+
+ it 'logs when a condition is true' do
+ logger.log_when { |_obs| true }
+ logger.log_when { |_obs| false }
+
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect(commit).to be_truthy
+ end
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ let(:flag) { false }
+
+ it 'does not log' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+
+ expect(commit).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb
index 3a2d4e2d0ca..7e26d324ac2 100644
--- a/spec/models/customer_relations/contact_spec.rb
+++ b/spec/models/customer_relations/contact_spec.rb
@@ -36,4 +36,27 @@ RSpec.describe CustomerRelations::Contact, type: :model do
expect(contact.phone).to eq('123456')
end
end
+
+ describe '#self.find_ids_by_emails' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_contacts) { create_list(:contact, 2, group: group) }
+ let_it_be(:other_contacts) { create_list(:contact, 2) }
+
+ it 'returns ids of contacts from group' do
+ contact_ids = described_class.find_ids_by_emails(group.id, group_contacts.pluck(:email))
+
+ expect(contact_ids).to match_array(group_contacts.pluck(:id))
+ end
+
+ it 'does not return ids of contacts from other groups' do
+ contact_ids = described_class.find_ids_by_emails(group.id, other_contacts.pluck(:email))
+
+ expect(contact_ids).to be_empty
+ end
+
+ it 'raises ArgumentError when called with too many emails' do
+ too_many_emails = described_class::MAX_PLUCK + 1
+ expect { described_class.find_ids_by_emails(group.id, Array(0..too_many_emails)) }.to raise_error(ArgumentError)
+ end
+ end
end
diff --git a/spec/models/customer_relations/issue_contact_spec.rb b/spec/models/customer_relations/issue_contact_spec.rb
index 3747d159833..474455a9884 100644
--- a/spec/models/customer_relations/issue_contact_spec.rb
+++ b/spec/models/customer_relations/issue_contact_spec.rb
@@ -4,6 +4,9 @@ require 'spec_helper'
RSpec.describe CustomerRelations::IssueContact do
let_it_be(:issue_contact, reload: true) { create(:issue_customer_relations_contact) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
subject { issue_contact }
@@ -19,9 +22,6 @@ RSpec.describe CustomerRelations::IssueContact do
let(:stubbed) { build_stubbed(:issue_customer_relations_contact) }
let(:created) { create(:issue_customer_relations_contact) }
- let(:group) { build(:group) }
- let(:project) { build(:project, group: group) }
- let(:issue) { build(:issue, project: project) }
let(:contact) { build(:contact, group: group) }
let(:for_issue) { build(:issue_customer_relations_contact, :for_issue, issue: issue) }
let(:for_contact) { build(:issue_customer_relations_contact, :for_contact, contact: contact) }
@@ -45,4 +45,26 @@ RSpec.describe CustomerRelations::IssueContact do
expect(built).not_to be_valid
end
end
+
+ describe '#self.find_contact_ids_by_emails' do
+ let_it_be(:for_issue) { create_list(:issue_customer_relations_contact, 2, :for_issue, issue: issue) }
+ let_it_be(:not_for_issue) { create_list(:issue_customer_relations_contact, 2) }
+
+ it 'returns ids of contacts from issue' do
+ contact_ids = described_class.find_contact_ids_by_emails(issue.id, for_issue.map(&:contact).pluck(:email))
+
+ expect(contact_ids).to match_array(for_issue.pluck(:contact_id))
+ end
+
+ it 'does not return ids of contacts from other issues' do
+ contact_ids = described_class.find_contact_ids_by_emails(issue.id, not_for_issue.map(&:contact).pluck(:email))
+
+ expect(contact_ids).to be_empty
+ end
+
+ it 'raises ArgumentError when called with too many emails' do
+ too_many_emails = described_class::MAX_PLUCK + 1
+ expect { described_class.find_contact_ids_by_emails(issue.id, Array(0..too_many_emails)) }.to raise_error(ArgumentError)
+ end
+ end
end
diff --git a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
index 3da702c55d7..2da69509ad6 100644
--- a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Setting issues crm contacts' do
let(:issue) { create(:issue, project: project) }
let(:operation_mode) { Types::MutationOperationModeEnum.default_mode }
- let(:crm_contact_ids) { [global_id_of(contacts[1]), global_id_of(contacts[2])] }
+ let(:contact_ids) { [global_id_of(contacts[1]), global_id_of(contacts[2])] }
let(:does_not_exist_or_no_permission) { "The resource that you are attempting to access does not exist or you don't have permission to perform this action" }
let(:mutation) do
@@ -20,7 +20,7 @@ RSpec.describe 'Setting issues crm contacts' do
project_path: issue.project.full_path,
iid: issue.iid.to_s,
operation_mode: operation_mode,
- crm_contact_ids: crm_contact_ids
+ contact_ids: contact_ids
}
graphql_mutation(:issue_set_crm_contacts, variables,
@@ -83,7 +83,7 @@ RSpec.describe 'Setting issues crm contacts' do
end
context 'append' do
- let(:crm_contact_ids) { [global_id_of(contacts[3])] }
+ let(:contact_ids) { [global_id_of(contacts[3])] }
let(:operation_mode) { Types::MutationOperationModeEnum.enum[:append] }
it 'updates the issue with correct contacts' do
@@ -95,7 +95,7 @@ RSpec.describe 'Setting issues crm contacts' do
end
context 'remove' do
- let(:crm_contact_ids) { [global_id_of(contacts[0])] }
+ let(:contact_ids) { [global_id_of(contacts[0])] }
let(:operation_mode) { Types::MutationOperationModeEnum.enum[:remove] }
it 'updates the issue with correct contacts' do
@@ -107,7 +107,7 @@ RSpec.describe 'Setting issues crm contacts' do
end
context 'when the contact does not exist' do
- let(:crm_contact_ids) { ["gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"] }
+ let(:contact_ids) { ["gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"] }
it 'returns expected error' do
post_graphql_mutation(mutation, current_user: user)
@@ -120,7 +120,7 @@ RSpec.describe 'Setting issues crm contacts' do
context 'when the contact belongs to a different group' do
let(:group2) { create(:group) }
let(:contact) { create(:contact, group: group2) }
- let(:crm_contact_ids) { [global_id_of(contact)] }
+ let(:contact_ids) { [global_id_of(contact)] }
before do
group2.add_reporter(user)
@@ -137,7 +137,7 @@ RSpec.describe 'Setting issues crm contacts' do
context 'when attempting to add more than 6' do
let(:operation_mode) { Types::MutationOperationModeEnum.enum[:append] }
let(:gid) { global_id_of(contacts[0]) }
- let(:crm_contact_ids) { [gid, gid, gid, gid, gid, gid, gid] }
+ let(:contact_ids) { [gid, gid, gid, gid, gid, gid, gid] }
it 'returns expected error' do
post_graphql_mutation(mutation, current_user: user)
@@ -149,7 +149,7 @@ RSpec.describe 'Setting issues crm contacts' do
context 'when trying to remove non-existent contact' do
let(:operation_mode) { Types::MutationOperationModeEnum.enum[:remove] }
- let(:crm_contact_ids) { ["gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"] }
+ let(:contact_ids) { ["gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}"] }
it 'raises expected error' do
post_graphql_mutation(mutation, current_user: user)
diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb
new file mode 100644
index 00000000000..8f0c79b8b8e
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/logger_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService do
+ context 'pipeline logger' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
+
+ let(:ref) { 'refs/heads/master' }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+ let(:pipeline) { service.execute(:push).payload }
+ let(:file_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
+
+ before do
+ stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
+ end
+
+ let(:counters) do
+ {
+ 'count' => a_kind_of(Numeric),
+ 'avg' => a_kind_of(Numeric),
+ 'max' => a_kind_of(Numeric),
+ 'min' => a_kind_of(Numeric)
+ }
+ end
+
+ let(:loggable_data) do
+ {
+ 'caller' => 'Ci::CreatePipelineService',
+ 'source' => 'push',
+ 'pipeline_id' => a_kind_of(Numeric),
+ 'persisted' => true,
+ 'project_id' => project.id,
+ 'duration_s' => a_kind_of(Numeric),
+ 'pipeline_creation_duration_s' => counters,
+ 'pipeline_size_count' => counters,
+ 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
+ }
+ end
+
+ context 'when the duration is under the threshold' do
+ it 'does not create a log entry but it collects the data' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ expect(pipeline).to be_created_successfully
+
+ expect(service.logger.observations_hash)
+ .to match(
+ a_hash_including(
+ 'pipeline_creation_duration_s' => counters,
+ 'pipeline_size_count' => counters,
+ 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
+ )
+ )
+ end
+ end
+
+ context 'when the durations exceeds the threshold' do
+ let(:timer) do
+ proc do
+ @timer = @timer.to_i + 30
+ end
+ end
+
+ before do
+ allow(Gitlab::Ci::Pipeline::Logger)
+ .to receive(:current_monotonic_time) { timer.call }
+ end
+
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect(pipeline).to be_created_successfully
+ end
+
+ context 'when the pipeline is not persisted' do
+ let(:loggable_data) do
+ {
+ 'caller' => 'Ci::CreatePipelineService',
+ 'source' => 'push',
+ 'pipeline_id' => nil,
+ 'persisted' => false,
+ 'project_id' => project.id,
+ 'duration_s' => a_kind_of(Numeric),
+ 'pipeline_step_gitlab_ci_pipeline_chain_seed_duration_s' => counters
+ }
+ end
+
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ expect(pipeline).to receive(:save!).and_raise { RuntimeError }
+ end
+ end
+
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect { pipeline }.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_pipeline_creation_logger: false)
+ end
+
+ it 'does not create a log entry' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+
+ expect(pipeline).to be_created_successfully
+ expect(service.logger.observations_hash).to eq({})
+ end
+ end
+ end
+
+ context 'when the size exceeds the threshold' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:total_size) { 5000 }
+ end
+ end
+
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including(loggable_data))
+ .and_call_original
+
+ expect(pipeline).to be_created_successfully
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index a66d3898c5c..02f8f2dd99f 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -1,12 +1,1106 @@
# frozen_string_literal: true
require 'spec_helper'
-require_relative 'shared_processing_service'
-require_relative 'shared_processing_service_tests_with_yaml'
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
- it_behaves_like 'Pipeline Processing Service'
- it_behaves_like 'Pipeline Processing Service Tests With Yaml'
+ describe 'Pipeline Processing Service Tests With Yaml' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
+
+ where(:test_file_path) do
+ Dir.glob(Rails.root.join('spec/services/ci/pipeline_processing/test_cases/*.yml'))
+ end
+
+ with_them do
+ let(:test_file) { YAML.load_file(test_file_path) }
+ let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline).payload }
+
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(test_file['config']))
+ end
+
+ it 'follows transitions' do
+ expect(pipeline).to be_persisted
+ Sidekiq::Worker.drain_all # ensure that all async jobs are executed
+ check_expectation(test_file.dig('init', 'expect'), "init")
+
+ test_file['transitions'].each_with_index do |transition, idx|
+ event_on_jobs(transition['event'], transition['jobs'])
+ Sidekiq::Worker.drain_all # ensure that all async jobs are executed
+ check_expectation(transition['expect'], "transition:#{idx}")
+ end
+ end
+
+ private
+
+ def check_expectation(expectation, message)
+ expect(current_state.deep_stringify_keys).to eq(expectation), message
+ end
+
+ def current_state
+ # reload pipeline and all relations
+ pipeline.reload
+
+ {
+ pipeline: pipeline.status,
+ stages: pipeline.stages.pluck(:name, :status).to_h,
+ jobs: pipeline.latest_statuses.pluck(:name, :status).to_h
+ }
+ end
+
+ def event_on_jobs(event, job_names)
+ statuses = pipeline.latest_statuses.by_name(job_names).to_a
+ expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
+
+ statuses.each do |status|
+ if event == 'play'
+ status.play(user)
+ else
+ status.public_send("#{event}!")
+ end
+ end
+ end
+ end
+ end
+
+ describe 'Pipeline Processing Service' do
+ let(:project) { create(:project, :repository) }
+ let(:user) { project.owner }
+
+ let(:pipeline) do
+ create(:ci_empty_pipeline, ref: 'master', project: project)
+ end
+
+ context 'when simple pipeline is defined' do
+ before do
+ create_build('linux', stage_idx: 0)
+ create_build('mac', stage_idx: 0)
+ create_build('rspec', stage_idx: 1)
+ create_build('rubocop', stage_idx: 1)
+ create_build('deploy', stage_idx: 2)
+ end
+
+ it 'processes a pipeline', :sidekiq_inline do
+ expect(process_pipeline).to be_truthy
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(2)
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(4)
+
+ succeed_pending
+
+ expect(builds.success.count).to eq(5)
+ end
+
+ it 'does not process pipeline if existing stage is running' do
+ expect(process_pipeline).to be_truthy
+ expect(builds.pending.count).to eq(2)
+
+ expect(process_pipeline).to be_falsey
+ expect(builds.pending.count).to eq(2)
+ end
+ end
+
+ context 'custom stage with first job allowed to fail' do
+ before do
+ create_build('clean_job', stage_idx: 0, allow_failure: true)
+ create_build('test_job', stage_idx: 1, allow_failure: true)
+ end
+
+ it 'automatically triggers a next stage when build finishes', :sidekiq_inline do
+ expect(process_pipeline).to be_truthy
+ expect(builds_statuses).to eq ['pending']
+
+ fail_running_or_pending
+
+ expect(builds_statuses).to eq %w(failed pending)
+
+ fail_running_or_pending
+
+ expect(pipeline.reload).to be_success
+ end
+ end
+
+ context 'when optional manual actions are defined', :sidekiq_inline do
+ before do
+ create_build('build', stage_idx: 0)
+ create_build('test', stage_idx: 1)
+ create_build('test_failure', stage_idx: 2, when: 'on_failure')
+ create_build('deploy', stage_idx: 3)
+ create_build('production', stage_idx: 3, when: 'manual', allow_failure: true)
+ create_build('cleanup', stage_idx: 4, when: 'always')
+ create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true)
+ end
+
+ context 'when builds are successful' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production)
+ expect(builds_statuses).to eq %w(success success pending manual)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production cleanup clear:cache)
+ expect(builds_statuses).to eq %w(success success success manual pending manual)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success success success manual success manual)
+ expect(pipeline.reload.status).to eq 'success'
+ end
+ end
+
+ context 'when test job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure)
+ expect(builds_statuses).to eq %w(success failed pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success failed success success)
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+ end
+
+ context 'when test and test_failure jobs fail' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure)
+ expect(builds_statuses).to eq %w(success failed pending)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed failed pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test test_failure cleanup)
+ expect(builds_statuses).to eq %w(success failed failed success)
+ expect(pipeline.reload.status).to eq('failed')
+ end
+ end
+
+ context 'when deploy job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production)
+ expect(builds_statuses).to eq %w(success success pending manual)
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w(build test deploy production cleanup)
+ expect(builds_statuses).to eq %w(success success failed manual pending)
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w(success success failed manual success)
+ expect(pipeline.reload).to be_failed
+ end
+ end
+
+ context 'when build is canceled in the second stage' do
+ it 'does not schedule builds after build has been canceled' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build']
+ expect(builds_statuses).to eq ['pending']
+
+ succeed_running_or_pending
+
+ expect(builds.running_or_pending).not_to be_empty
+ expect(builds_names).to eq %w(build test)
+ expect(builds_statuses).to eq %w(success pending)
+
+ cancel_running_or_pending
+
+ expect(builds.running_or_pending).to be_empty
+ expect(builds_names).to eq %w[build test]
+ expect(builds_statuses).to eq %w[success canceled]
+ expect(pipeline.reload).to be_canceled
+ end
+ end
+
+ context 'when listing optional manual actions' do
+ it 'returns only for skipped builds' do
+ # currently all builds are created
+ expect(process_pipeline).to be_truthy
+ expect(manual_actions).to be_empty
+
+ # succeed stage build
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_empty
+
+ # succeed stage test
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_one # production
+
+ # succeed stage deploy
+ succeed_running_or_pending
+
+ expect(manual_actions).to be_many # production and clear cache
+ end
+ end
+ end
+
+ context 'when delayed jobs are defined', :sidekiq_inline do
+ context 'when the scene is timed incremental rollout' do
+ before do
+ create_build('build', stage_idx: 0)
+ create_build('rollout10%', **delayed_options, stage_idx: 1)
+ create_build('rollout100%', **delayed_options, stage_idx: 2)
+ create_build('cleanup', stage_idx: 3)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ context 'when builds are successful' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('rollout10%')
+ end
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('rollout100%')
+ end
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' })
+ expect(pipeline.reload.status).to eq 'success'
+ end
+ end
+
+ context 'when build job fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'failed' })
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+ end
+
+ context 'when rollout 10% is unscheduled' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ unschedule
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' })
+ expect(pipeline.reload.status).to eq 'manual'
+ end
+
+ context 'when user plays rollout 10%' do
+ it 'schedules rollout100%' do
+ process_pipeline
+ succeed_pending
+ unschedule
+ play_manual_action('rollout10%')
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
+ expect(pipeline.reload.status).to eq 'scheduled'
+ end
+ end
+ end
+
+ context 'when rollout 10% fails' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('rollout10%')
+ end
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' })
+ expect(pipeline.reload.status).to eq 'failed'
+ end
+
+ context 'when user retries rollout 10%' do
+ it 'does not schedule rollout10% again' do
+ process_pipeline
+ succeed_pending
+ enqueue_scheduled('rollout10%')
+ fail_running_or_pending
+ retry_build('rollout10%')
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+ end
+
+ context 'when rollout 10% is played immidiately' do
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
+
+ succeed_pending
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
+
+ play_manual_action('rollout10%')
+
+ expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+ end
+
+ context 'when only one scheduled job exists in a pipeline' do
+ before do
+ create_build('delayed', **delayed_options, stage_idx: 0)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'properly processes the pipeline' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
+
+ expect(pipeline.reload.status).to eq 'scheduled'
+ end
+ end
+
+ context 'when there are two delayed jobs in a stage' do
+ before do
+ create_build('delayed1', **delayed_options, stage_idx: 0)
+ create_build('delayed2', **delayed_options, stage_idx: 0)
+ create_build('job', stage_idx: 1)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'blocks the stage until all scheduled jobs finished' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('delayed1')
+ end
+
+ expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' })
+ expect(pipeline.reload.status).to eq 'running'
+ end
+ end
+
+ context 'when a delayed job is allowed to fail' do
+ before do
+ create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0)
+ create_build('job', stage_idx: 1)
+
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at)
+ end
+
+ it 'blocks the stage and continues after it failed' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
+
+ travel_to 2.minutes.from_now do
+ enqueue_scheduled('delayed')
+ end
+ fail_running_or_pending
+
+ expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' })
+ expect(pipeline.reload.status).to eq 'pending'
+ end
+ end
+ end
+
+ context 'when an exception is raised during a persistent ref creation' do
+ before do
+ successful_build('test', stage_idx: 0)
+
+ allow_next_instance_of(Ci::PersistentRef) do |instance|
+ allow(instance).to receive(:delete_refs) { raise ArgumentError }
+ end
+ end
+
+ it 'process the pipeline' do
+ expect { process_pipeline }.not_to raise_error
+ end
+ end
+
+ context 'when there are manual action in earlier stages' do
+ context 'when first stage has only optional manual actions' do
+ before do
+ create_build('build', stage_idx: 0, when: 'manual', allow_failure: true)
+ create_build('check', stage_idx: 1)
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'starts from the second stage' do
+ expect(all_builds_statuses).to eq %w[manual pending created]
+ end
+ end
+
+ context 'when second stage has only optional manual actions' do
+ before do
+ create_build('check', stage_idx: 0)
+ create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'skips second stage and continues on third stage', :sidekiq_inline do
+ expect(all_builds_statuses).to eq(%w[pending created created])
+
+ builds.first.success
+
+ expect(all_builds_statuses).to eq(%w[success manual pending])
+ end
+ end
+ end
+
+ context 'when there are only manual actions in stages' do
+ before do
+ create_build('image', stage_idx: 0, when: 'manual', allow_failure: true)
+ create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
+ create_build('deploy', stage_idx: 2, when: 'manual')
+ create_build('check', stage_idx: 3)
+
+ process_pipeline
+ end
+
+ it 'processes all jobs until blocking actions encountered' do
+ expect(all_builds_statuses).to eq(%w[manual manual manual created])
+ expect(all_builds_names).to eq(%w[image build deploy check])
+
+ expect(pipeline.reload).to be_blocked
+ end
+ end
+
+ context 'when there is only one manual action' do
+ before do
+ create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true)
+
+ process_pipeline
+ end
+
+ it 'skips the pipeline' do
+ expect(pipeline.reload).to be_skipped
+ end
+
+ context 'when the action was played' do
+ before do
+ play_manual_action('deploy')
+ end
+
+ it 'queues the action and pipeline', :sidekiq_inline do
+ expect(all_builds_statuses).to eq(%w[pending])
+
+ expect(pipeline.reload).to be_pending
+ end
+ end
+ end
+
+ context 'when blocking manual actions are defined', :sidekiq_inline do
+ before do
+ create_build('code:test', stage_idx: 0)
+ create_build('staging:deploy', stage_idx: 1, when: 'manual')
+ create_build('staging:test', stage_idx: 2, when: 'on_success')
+ create_build('production:deploy', stage_idx: 3, when: 'manual')
+ create_build('production:test', stage_idx: 4, when: 'always')
+ end
+
+ context 'when first stage succeeds' do
+ it 'blocks pipeline on stage with first manual action' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+ expect(pipeline.reload.status).to eq 'pending'
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy]
+ expect(builds_statuses).to eq %w[success manual]
+ expect(pipeline.reload).to be_manual
+ end
+ end
+
+ context 'when first stage fails' do
+ it 'does not take blocking action into account' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+ expect(pipeline.reload.status).to eq 'pending'
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w[code:test production:test]
+ expect(builds_statuses).to eq %w[failed pending]
+
+ succeed_running_or_pending
+
+ expect(builds_statuses).to eq %w[failed success]
+ expect(pipeline.reload).to be_failed
+ end
+ end
+
+ context 'when pipeline is promoted sequentially up to the end' do
+ before do
+ # Users need ability to merge into a branch in order to trigger
+ # protected manual actions.
+ #
+ create(:protected_branch, :developers_can_merge,
+ name: 'master', project: project)
+ end
+
+ it 'properly processes entire pipeline' do
+ process_pipeline
+
+ expect(builds_names).to eq %w[code:test]
+ expect(builds_statuses).to eq %w[pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy]
+ expect(builds_statuses).to eq %w[success manual]
+ expect(pipeline.reload).to be_manual
+
+ play_manual_action('staging:deploy')
+
+ expect(builds_statuses).to eq %w[success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test]
+ expect(builds_statuses).to eq %w[success success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy]
+ expect(builds_statuses).to eq %w[success success success manual]
+
+ expect(pipeline.reload).to be_manual
+ expect(pipeline.reload).to be_blocked
+ expect(pipeline.reload).not_to be_active
+ expect(pipeline.reload).not_to be_complete
+
+ play_manual_action('production:deploy')
+
+ expect(builds_statuses).to eq %w[success success success pending]
+ expect(pipeline.reload).to be_running
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy production:test]
+ expect(builds_statuses).to eq %w[success success success success pending]
+ expect(pipeline.reload).to be_running
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[code:test staging:deploy staging:test
+ production:deploy production:test]
+ expect(builds_statuses).to eq %w[success success success success success]
+ expect(pipeline.reload).to be_success
+ end
+ end
+ end
+
+ context 'when second stage has only on_failure jobs', :sidekiq_inline do
+ before do
+ create_build('check', stage_idx: 0)
+ create_build('build', stage_idx: 1, when: 'on_failure')
+ create_build('test', stage_idx: 2)
+
+ process_pipeline
+ end
+
+ it 'skips second stage and continues on third stage' do
+ expect(all_builds_statuses).to eq(%w[pending created created])
+
+ builds.first.success
+
+ expect(all_builds_statuses).to eq(%w[success skipped pending])
+ end
+ end
+
+ context 'when failed build in the middle stage is retried', :sidekiq_inline do
+ context 'when failed build is the only unsuccessful build in the stage' do
+ before do
+ create_build('build:1', stage_idx: 0)
+ create_build('build:2', stage_idx: 0)
+ create_build('test:1', stage_idx: 1)
+ create_build('test:2', stage_idx: 1)
+ create_build('deploy:1', stage_idx: 2)
+ create_build('deploy:2', stage_idx: 2)
+ end
+
+ it 'does trigger builds in the next stage' do
+ expect(process_pipeline).to be_truthy
+ expect(builds_names).to eq ['build:1', 'build:2']
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
+
+ pipeline.builds.find_by(name: 'test:1').success!
+ pipeline.builds.find_by(name: 'test:2').drop!
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
+
+ Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).reset.success!
+
+ expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2',
+ 'test:2', 'deploy:1', 'deploy:2']
+ end
+ end
+ end
+
+ context 'when builds with auto-retries are configured', :sidekiq_inline do
+ before do
+ create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 })
+ create_build('test:1', stage_idx: 1, user: user, when: :on_failure)
+ create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 })
+ end
+
+ it 'automatically retries builds in a valid order' do
+ expect(process_pipeline).to be_truthy
+
+ fail_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1]
+ expect(builds_statuses).to eq %w[failed pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1 test:2]
+ expect(builds_statuses).to eq %w[failed success pending]
+
+ succeed_running_or_pending
+
+ expect(builds_names).to eq %w[build:1 build:1 test:2]
+ expect(builds_statuses).to eq %w[failed success success]
+
+ expect(pipeline.reload).to be_success
+ end
+ end
+
+ context 'when pipeline with needs is created', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
+ let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
+
+ let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
+ let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
+
+ let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
+ let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
+
+ it 'when linux:* finishes first it runs it out of order' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build)
+
+ # we follow the single path of linux
+ linux_build.reset.success!
+
+ expect(stages).to eq(%w(running pending created))
+ expect(builds.success).to contain_exactly(linux_build)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
+
+ linux_rspec.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
+
+ linux_rubocop.reset.success!
+
+ expect(stages).to eq(%w(running running created))
+ expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
+ expect(builds.pending).to contain_exactly(mac_build)
+
+ mac_build.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success pending))
+ expect(builds.success).to contain_exactly(
+ linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+
+ context 'when one of the jobs is run on a failure' do
+ let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure', scheduling_type: :dag) }
+
+ let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
+
+ context 'when another job in build phase fails first' do
+ it 'does skip linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ mac_build.reset.drop!
+ linux_build.reset.success!
+
+ expect(linux_notify.reset).to be_skipped
+ end
+ end
+
+ context 'when linux:build job fails first' do
+ it 'does run linux:notify' do
+ expect(process_pipeline).to be_truthy
+
+ linux_build.reset.drop!
+
+ expect(linux_notify.reset).to be_pending
+ end
+ end
+ end
+
+ context 'when there is a job scheduled with dag but no need (needs: [])' do
+ let!(:deploy_pages) { create_build('deploy_pages', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
+
+ it 'runs deploy_pages without waiting prior stages' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created pending))
+ expect(builds.pending).to contain_exactly(linux_build, mac_build, deploy_pages)
+
+ linux_build.reset.success!
+ deploy_pages.reset.success!
+
+ expect(stages).to eq(%w(running pending running))
+ expect(builds.success).to contain_exactly(linux_build, deploy_pages)
+ expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
+
+ linux_rspec.reset.success!
+ linux_rubocop.reset.success!
+ mac_build.reset.success!
+ mac_rspec.reset.success!
+ mac_rubocop.reset.success!
+
+ expect(stages).to eq(%w(success success running))
+ expect(builds.pending).to contain_exactly(deploy)
+ end
+ end
+ end
+
+ context 'when a needed job is skipped', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
+ let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
+
+ before do
+ create(:ci_build_need, build: deploy, name: 'linux:build')
+ end
+
+ it 'skips the jobs depending on it' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(pending created created))
+ expect(all_builds.pending).to contain_exactly(linux_build)
+
+ linux_build.reset.drop!
+
+ expect(stages).to eq(%w(failed skipped skipped))
+ expect(all_builds.failed).to contain_exactly(linux_build)
+ expect(all_builds.skipped).to contain_exactly(linux_rspec, deploy)
+ end
+ end
+
+ context 'when a needed job is manual', :sidekiq_inline do
+ let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0, when: 'manual', allow_failure: true) }
+ let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 1, scheduling_type: :dag) }
+
+ before do
+ create(:ci_build_need, build: deploy, name: 'linux:build')
+ end
+
+ it 'makes deploy DAG to be skipped' do
+ expect(process_pipeline).to be_truthy
+
+ expect(stages).to eq(%w(skipped skipped))
+ expect(all_builds.manual).to contain_exactly(linux_build)
+ expect(all_builds.skipped).to contain_exactly(deploy)
+ end
+ end
+
+ context 'when a bridge job has parallel:matrix config', :sidekiq_inline do
+ let(:parent_config) do
+ <<-EOY
+ test:
+ stage: test
+ script: echo test
+
+ deploy:
+ stage: deploy
+ trigger:
+ include: .child.yml
+ parallel:
+ matrix:
+ - PROVIDER: ovh
+ STACK: [monitoring, app]
+ EOY
+ end
+
+ let(:child_config) do
+ <<-EOY
+ test:
+ stage: test
+ script: echo test
+ EOY
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository)
+ .to receive(:blob_data_at)
+ .with(an_instance_of(String), '.gitlab-ci.yml')
+ .and_return(parent_config)
+
+ allow(repository)
+ .to receive(:blob_data_at)
+ .with(an_instance_of(String), '.child.yml')
+ .and_return(child_config)
+ end
+ end
+
+ it 'creates pipeline with bridges, then passes the matrix variables to downstream jobs' do
+ expect(all_builds_names).to contain_exactly('test', 'deploy: [ovh, monitoring]', 'deploy: [ovh, app]')
+ expect(all_builds_statuses).to contain_exactly('pending', 'created', 'created')
+
+ succeed_pending
+
+ # bridge jobs directly transition to success
+ expect(all_builds_statuses).to contain_exactly('success', 'success', 'success')
+
+ bridge1 = all_builds.find_by(name: 'deploy: [ovh, monitoring]')
+ bridge2 = all_builds.find_by(name: 'deploy: [ovh, app]')
+
+ downstream_job1 = bridge1.downstream_pipeline.processables.first
+ downstream_job2 = bridge2.downstream_pipeline.processables.first
+
+ expect(downstream_job1.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'monitoring')
+ expect(downstream_job2.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'app')
+ end
+ end
+
+ context 'when a bridge job has invalid downstream project', :sidekiq_inline do
+ let(:config) do
+ <<-EOY
+ test:
+ stage: test
+ script: echo test
+
+ deploy:
+ stage: deploy
+ trigger:
+ project: invalid-project
+ EOY
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'creates a pipeline, then fails the bridge job' do
+ expect(all_builds_names).to contain_exactly('test', 'deploy')
+ expect(all_builds_statuses).to contain_exactly('pending', 'created')
+
+ succeed_pending
+
+ expect(all_builds_names).to contain_exactly('test', 'deploy')
+ expect(all_builds_statuses).to contain_exactly('success', 'failed')
+ end
+ end
+
+ private
+
+ def all_builds
+ pipeline.processables.order(:stage_idx, :id)
+ end
+
+ def builds
+ all_builds.where.not(status: [:created, :skipped])
+ end
+
+ def stages
+ pipeline.reset.stages.map(&:status)
+ end
+
+ def builds_names
+ builds.pluck(:name)
+ end
+
+ def builds_names_and_statuses
+ builds.each_with_object({}) do |b, h|
+ h[b.name.to_sym] = b.status
+ h
+ end
+ end
+
+ def all_builds_names
+ all_builds.pluck(:name)
+ end
+
+ def builds_statuses
+ builds.pluck(:status)
+ end
+
+ def all_builds_statuses
+ all_builds.pluck(:status)
+ end
+
+ def succeed_pending
+ builds.pending.each do |build|
+ build.reset.success
+ end
+ end
+
+ def succeed_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.success
+ end
+ end
+
+ def fail_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.drop
+ end
+ end
+
+ def cancel_running_or_pending
+ pipeline.builds.running_or_pending.each do |build|
+ build.reset.cancel
+ end
+ end
+
+ def play_manual_action(name)
+ builds.find_by(name: name).play(user)
+ end
+
+ def enqueue_scheduled(name)
+ builds.scheduled.find_by(name: name).enqueue_scheduled
+ end
+
+ def retry_build(name)
+ Ci::Build.retry(builds.find_by(name: name), user)
+ end
+
+ def manual_actions
+ pipeline.manual_actions.reload
+ end
+
+ def create_build(name, **opts)
+ create(:ci_build, :created, pipeline: pipeline, name: name, **with_stage_opts(opts))
+ end
+
+ def successful_build(name, **opts)
+ create(:ci_build, :success, pipeline: pipeline, name: name, **with_stage_opts(opts))
+ end
+
+ def with_stage_opts(opts)
+ { stage: "stage-#{opts[:stage_idx].to_i}" }.merge(opts)
+ end
+
+ def delayed_options
+ { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } }
+ end
+
+ def unschedule
+ pipeline.builds.scheduled.map(&:unschedule)
+ end
+ end
private
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
deleted file mode 100644
index 8de9b308429..00000000000
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ /dev/null
@@ -1,1040 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'Pipeline Processing Service' do
- let(:project) { create(:project, :repository) }
- let(:user) { project.owner }
-
- let(:pipeline) do
- create(:ci_empty_pipeline, ref: 'master', project: project)
- end
-
- context 'when simple pipeline is defined' do
- before do
- create_build('linux', stage_idx: 0)
- create_build('mac', stage_idx: 0)
- create_build('rspec', stage_idx: 1)
- create_build('rubocop', stage_idx: 1)
- create_build('deploy', stage_idx: 2)
- end
-
- it 'processes a pipeline', :sidekiq_inline do
- expect(process_pipeline).to be_truthy
-
- succeed_pending
-
- expect(builds.success.count).to eq(2)
-
- succeed_pending
-
- expect(builds.success.count).to eq(4)
-
- succeed_pending
-
- expect(builds.success.count).to eq(5)
- end
-
- it 'does not process pipeline if existing stage is running' do
- expect(process_pipeline).to be_truthy
- expect(builds.pending.count).to eq(2)
-
- expect(process_pipeline).to be_falsey
- expect(builds.pending.count).to eq(2)
- end
- end
-
- context 'custom stage with first job allowed to fail' do
- before do
- create_build('clean_job', stage_idx: 0, allow_failure: true)
- create_build('test_job', stage_idx: 1, allow_failure: true)
- end
-
- it 'automatically triggers a next stage when build finishes', :sidekiq_inline do
- expect(process_pipeline).to be_truthy
- expect(builds_statuses).to eq ['pending']
-
- fail_running_or_pending
-
- expect(builds_statuses).to eq %w(failed pending)
-
- fail_running_or_pending
-
- expect(pipeline.reload).to be_success
- end
- end
-
- context 'when optional manual actions are defined', :sidekiq_inline do
- before do
- create_build('build', stage_idx: 0)
- create_build('test', stage_idx: 1)
- create_build('test_failure', stage_idx: 2, when: 'on_failure')
- create_build('deploy', stage_idx: 3)
- create_build('production', stage_idx: 3, when: 'manual', allow_failure: true)
- create_build('cleanup', stage_idx: 4, when: 'always')
- create_build('clear:cache', stage_idx: 4, when: 'manual', allow_failure: true)
- end
-
- context 'when builds are successful' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production)
- expect(builds_statuses).to eq %w(success success pending manual)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production cleanup clear:cache)
- expect(builds_statuses).to eq %w(success success success manual pending manual)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success success success manual success manual)
- expect(pipeline.reload.status).to eq 'success'
- end
- end
-
- context 'when test job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure)
- expect(builds_statuses).to eq %w(success failed pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed success pending)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success failed success success)
- expect(pipeline.reload.status).to eq 'failed'
- end
- end
-
- context 'when test and test_failure jobs fail' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure)
- expect(builds_statuses).to eq %w(success failed pending)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed failed pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test test_failure cleanup)
- expect(builds_statuses).to eq %w(success failed failed success)
- expect(pipeline.reload.status).to eq('failed')
- end
- end
-
- context 'when deploy job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production)
- expect(builds_statuses).to eq %w(success success pending manual)
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w(build test deploy production cleanup)
- expect(builds_statuses).to eq %w(success success failed manual pending)
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w(success success failed manual success)
- expect(pipeline.reload).to be_failed
- end
- end
-
- context 'when build is canceled in the second stage' do
- it 'does not schedule builds after build has been canceled' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build']
- expect(builds_statuses).to eq ['pending']
-
- succeed_running_or_pending
-
- expect(builds.running_or_pending).not_to be_empty
- expect(builds_names).to eq %w(build test)
- expect(builds_statuses).to eq %w(success pending)
-
- cancel_running_or_pending
-
- expect(builds.running_or_pending).to be_empty
- expect(builds_names).to eq %w[build test]
- expect(builds_statuses).to eq %w[success canceled]
- expect(pipeline.reload).to be_canceled
- end
- end
-
- context 'when listing optional manual actions' do
- it 'returns only for skipped builds' do
- # currently all builds are created
- expect(process_pipeline).to be_truthy
- expect(manual_actions).to be_empty
-
- # succeed stage build
- succeed_running_or_pending
-
- expect(manual_actions).to be_empty
-
- # succeed stage test
- succeed_running_or_pending
-
- expect(manual_actions).to be_one # production
-
- # succeed stage deploy
- succeed_running_or_pending
-
- expect(manual_actions).to be_many # production and clear cache
- end
- end
- end
-
- context 'when delayed jobs are defined', :sidekiq_inline do
- context 'when the scene is timed incremental rollout' do
- before do
- create_build('build', stage_idx: 0)
- create_build('rollout10%', **delayed_options, stage_idx: 1)
- create_build('rollout100%', **delayed_options, stage_idx: 2)
- create_build('cleanup', stage_idx: 3)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- context 'when builds are successful' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('rollout10%')
- end
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('rollout100%')
- end
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'success', 'cleanup': 'success' })
- expect(pipeline.reload.status).to eq 'success'
- end
- end
-
- context 'when build job fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'failed' })
- expect(pipeline.reload.status).to eq 'failed'
- end
- end
-
- context 'when rollout 10% is unscheduled' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- unschedule
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'manual' })
- expect(pipeline.reload.status).to eq 'manual'
- end
-
- context 'when user plays rollout 10%' do
- it 'schedules rollout100%' do
- process_pipeline
- succeed_pending
- unschedule
- play_manual_action('rollout10%')
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
- expect(pipeline.reload.status).to eq 'scheduled'
- end
- end
- end
-
- context 'when rollout 10% fails' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('rollout10%')
- end
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'failed' })
- expect(pipeline.reload.status).to eq 'failed'
- end
-
- context 'when user retries rollout 10%' do
- it 'does not schedule rollout10% again' do
- process_pipeline
- succeed_pending
- enqueue_scheduled('rollout10%')
- fail_running_or_pending
- retry_build('rollout10%')
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
- end
-
- context 'when rollout 10% is played immidiately' do
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'build': 'pending' })
-
- succeed_pending
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
-
- play_manual_action('rollout10%')
-
- expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'pending' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
- end
-
- context 'when only one scheduled job exists in a pipeline' do
- before do
- create_build('delayed', **delayed_options, stage_idx: 0)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'properly processes the pipeline' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
-
- expect(pipeline.reload.status).to eq 'scheduled'
- end
- end
-
- context 'when there are two delayed jobs in a stage' do
- before do
- create_build('delayed1', **delayed_options, stage_idx: 0)
- create_build('delayed2', **delayed_options, stage_idx: 0)
- create_build('job', stage_idx: 1)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'blocks the stage until all scheduled jobs finished' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('delayed1')
- end
-
- expect(builds_names_and_statuses).to eq({ 'delayed1': 'pending', 'delayed2': 'scheduled' })
- expect(pipeline.reload.status).to eq 'running'
- end
- end
-
- context 'when a delayed job is allowed to fail' do
- before do
- create_build('delayed', **delayed_options, allow_failure: true, stage_idx: 0)
- create_build('job', stage_idx: 1)
-
- allow(Ci::BuildScheduleWorker).to receive(:perform_at)
- end
-
- it 'blocks the stage and continues after it failed' do
- expect(process_pipeline).to be_truthy
- expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
-
- travel_to 2.minutes.from_now do
- enqueue_scheduled('delayed')
- end
- fail_running_or_pending
-
- expect(builds_names_and_statuses).to eq({ 'delayed': 'failed', 'job': 'pending' })
- expect(pipeline.reload.status).to eq 'pending'
- end
- end
- end
-
- context 'when an exception is raised during a persistent ref creation' do
- before do
- successful_build('test', stage_idx: 0)
-
- allow_next_instance_of(Ci::PersistentRef) do |instance|
- allow(instance).to receive(:delete_refs) { raise ArgumentError }
- end
- end
-
- it 'process the pipeline' do
- expect { process_pipeline }.not_to raise_error
- end
- end
-
- context 'when there are manual action in earlier stages' do
- context 'when first stage has only optional manual actions' do
- before do
- create_build('build', stage_idx: 0, when: 'manual', allow_failure: true)
- create_build('check', stage_idx: 1)
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'starts from the second stage' do
- expect(all_builds_statuses).to eq %w[manual pending created]
- end
- end
-
- context 'when second stage has only optional manual actions' do
- before do
- create_build('check', stage_idx: 0)
- create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'skips second stage and continues on third stage', :sidekiq_inline do
- expect(all_builds_statuses).to eq(%w[pending created created])
-
- builds.first.success
-
- expect(all_builds_statuses).to eq(%w[success manual pending])
- end
- end
- end
-
- context 'when there are only manual actions in stages' do
- before do
- create_build('image', stage_idx: 0, when: 'manual', allow_failure: true)
- create_build('build', stage_idx: 1, when: 'manual', allow_failure: true)
- create_build('deploy', stage_idx: 2, when: 'manual')
- create_build('check', stage_idx: 3)
-
- process_pipeline
- end
-
- it 'processes all jobs until blocking actions encountered' do
- expect(all_builds_statuses).to eq(%w[manual manual manual created])
- expect(all_builds_names).to eq(%w[image build deploy check])
-
- expect(pipeline.reload).to be_blocked
- end
- end
-
- context 'when there is only one manual action' do
- before do
- create_build('deploy', stage_idx: 0, when: 'manual', allow_failure: true)
-
- process_pipeline
- end
-
- it 'skips the pipeline' do
- expect(pipeline.reload).to be_skipped
- end
-
- context 'when the action was played' do
- before do
- play_manual_action('deploy')
- end
-
- it 'queues the action and pipeline', :sidekiq_inline do
- expect(all_builds_statuses).to eq(%w[pending])
-
- expect(pipeline.reload).to be_pending
- end
- end
- end
-
- context 'when blocking manual actions are defined', :sidekiq_inline do
- before do
- create_build('code:test', stage_idx: 0)
- create_build('staging:deploy', stage_idx: 1, when: 'manual')
- create_build('staging:test', stage_idx: 2, when: 'on_success')
- create_build('production:deploy', stage_idx: 3, when: 'manual')
- create_build('production:test', stage_idx: 4, when: 'always')
- end
-
- context 'when first stage succeeds' do
- it 'blocks pipeline on stage with first manual action' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
- expect(pipeline.reload.status).to eq 'pending'
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy]
- expect(builds_statuses).to eq %w[success manual]
- expect(pipeline.reload).to be_manual
- end
- end
-
- context 'when first stage fails' do
- it 'does not take blocking action into account' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
- expect(pipeline.reload.status).to eq 'pending'
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w[code:test production:test]
- expect(builds_statuses).to eq %w[failed pending]
-
- succeed_running_or_pending
-
- expect(builds_statuses).to eq %w[failed success]
- expect(pipeline.reload).to be_failed
- end
- end
-
- context 'when pipeline is promoted sequentially up to the end' do
- before do
- # Users need ability to merge into a branch in order to trigger
- # protected manual actions.
- #
- create(:protected_branch, :developers_can_merge,
- name: 'master', project: project)
- end
-
- it 'properly processes entire pipeline' do
- process_pipeline
-
- expect(builds_names).to eq %w[code:test]
- expect(builds_statuses).to eq %w[pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy]
- expect(builds_statuses).to eq %w[success manual]
- expect(pipeline.reload).to be_manual
-
- play_manual_action('staging:deploy')
-
- expect(builds_statuses).to eq %w[success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test]
- expect(builds_statuses).to eq %w[success success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy]
- expect(builds_statuses).to eq %w[success success success manual]
-
- expect(pipeline.reload).to be_manual
- expect(pipeline.reload).to be_blocked
- expect(pipeline.reload).not_to be_active
- expect(pipeline.reload).not_to be_complete
-
- play_manual_action('production:deploy')
-
- expect(builds_statuses).to eq %w[success success success pending]
- expect(pipeline.reload).to be_running
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy production:test]
- expect(builds_statuses).to eq %w[success success success success pending]
- expect(pipeline.reload).to be_running
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[code:test staging:deploy staging:test
- production:deploy production:test]
- expect(builds_statuses).to eq %w[success success success success success]
- expect(pipeline.reload).to be_success
- end
- end
- end
-
- context 'when second stage has only on_failure jobs', :sidekiq_inline do
- before do
- create_build('check', stage_idx: 0)
- create_build('build', stage_idx: 1, when: 'on_failure')
- create_build('test', stage_idx: 2)
-
- process_pipeline
- end
-
- it 'skips second stage and continues on third stage' do
- expect(all_builds_statuses).to eq(%w[pending created created])
-
- builds.first.success
-
- expect(all_builds_statuses).to eq(%w[success skipped pending])
- end
- end
-
- context 'when failed build in the middle stage is retried', :sidekiq_inline do
- context 'when failed build is the only unsuccessful build in the stage' do
- before do
- create_build('build:1', stage_idx: 0)
- create_build('build:2', stage_idx: 0)
- create_build('test:1', stage_idx: 1)
- create_build('test:2', stage_idx: 1)
- create_build('deploy:1', stage_idx: 2)
- create_build('deploy:2', stage_idx: 2)
- end
-
- it 'does trigger builds in the next stage' do
- expect(process_pipeline).to be_truthy
- expect(builds_names).to eq ['build:1', 'build:2']
-
- succeed_running_or_pending
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
-
- pipeline.builds.find_by(name: 'test:1').success!
- pipeline.builds.find_by(name: 'test:2').drop!
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2']
-
- Ci::Build.retry(pipeline.builds.find_by(name: 'test:2'), user).reset.success!
-
- expect(builds_names).to eq ['build:1', 'build:2', 'test:1', 'test:2',
- 'test:2', 'deploy:1', 'deploy:2']
- end
- end
- end
-
- context 'when builds with auto-retries are configured', :sidekiq_inline do
- before do
- create_build('build:1', stage_idx: 0, user: user, options: { script: 'aa', retry: 2 })
- create_build('test:1', stage_idx: 1, user: user, when: :on_failure)
- create_build('test:2', stage_idx: 1, user: user, options: { script: 'aa', retry: 1 })
- end
-
- it 'automatically retries builds in a valid order' do
- expect(process_pipeline).to be_truthy
-
- fail_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1]
- expect(builds_statuses).to eq %w[failed pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1 test:2]
- expect(builds_statuses).to eq %w[failed success pending]
-
- succeed_running_or_pending
-
- expect(builds_names).to eq %w[build:1 build:1 test:2]
- expect(builds_statuses).to eq %w[failed success success]
-
- expect(pipeline.reload).to be_success
- end
- end
-
- context 'when pipeline with needs is created', :sidekiq_inline do
- let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
- let!(:mac_build) { create_build('mac:build', stage: 'build', stage_idx: 0) }
- let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
- let!(:linux_rubocop) { create_build('linux:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
- let!(:mac_rspec) { create_build('mac:rspec', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
- let!(:mac_rubocop) { create_build('mac:rubocop', stage: 'test', stage_idx: 1, scheduling_type: :dag) }
- let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2) }
-
- let!(:linux_rspec_on_build) { create(:ci_build_need, build: linux_rspec, name: 'linux:build') }
- let!(:linux_rubocop_on_build) { create(:ci_build_need, build: linux_rubocop, name: 'linux:build') }
-
- let!(:mac_rspec_on_build) { create(:ci_build_need, build: mac_rspec, name: 'mac:build') }
- let!(:mac_rubocop_on_build) { create(:ci_build_need, build: mac_rubocop, name: 'mac:build') }
-
- it 'when linux:* finishes first it runs it out of order' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created created))
- expect(builds.pending).to contain_exactly(linux_build, mac_build)
-
- # we follow the single path of linux
- linux_build.reset.success!
-
- expect(stages).to eq(%w(running pending created))
- expect(builds.success).to contain_exactly(linux_build)
- expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
-
- linux_rspec.reset.success!
-
- expect(stages).to eq(%w(running running created))
- expect(builds.success).to contain_exactly(linux_build, linux_rspec)
- expect(builds.pending).to contain_exactly(mac_build, linux_rubocop)
-
- linux_rubocop.reset.success!
-
- expect(stages).to eq(%w(running running created))
- expect(builds.success).to contain_exactly(linux_build, linux_rspec, linux_rubocop)
- expect(builds.pending).to contain_exactly(mac_build)
-
- mac_build.reset.success!
- mac_rspec.reset.success!
- mac_rubocop.reset.success!
-
- expect(stages).to eq(%w(success success pending))
- expect(builds.success).to contain_exactly(
- linux_build, linux_rspec, linux_rubocop, mac_build, mac_rspec, mac_rubocop)
- expect(builds.pending).to contain_exactly(deploy)
- end
-
- context 'when one of the jobs is run on a failure' do
- let!(:linux_notify) { create_build('linux:notify', stage: 'deploy', stage_idx: 2, when: 'on_failure', scheduling_type: :dag) }
-
- let!(:linux_notify_on_build) { create(:ci_build_need, build: linux_notify, name: 'linux:build') }
-
- context 'when another job in build phase fails first' do
- it 'does skip linux:notify' do
- expect(process_pipeline).to be_truthy
-
- mac_build.reset.drop!
- linux_build.reset.success!
-
- expect(linux_notify.reset).to be_skipped
- end
- end
-
- context 'when linux:build job fails first' do
- it 'does run linux:notify' do
- expect(process_pipeline).to be_truthy
-
- linux_build.reset.drop!
-
- expect(linux_notify.reset).to be_pending
- end
- end
- end
-
- context 'when there is a job scheduled with dag but no need (needs: [])' do
- let!(:deploy_pages) { create_build('deploy_pages', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
-
- it 'runs deploy_pages without waiting prior stages' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created pending))
- expect(builds.pending).to contain_exactly(linux_build, mac_build, deploy_pages)
-
- linux_build.reset.success!
- deploy_pages.reset.success!
-
- expect(stages).to eq(%w(running pending running))
- expect(builds.success).to contain_exactly(linux_build, deploy_pages)
- expect(builds.pending).to contain_exactly(mac_build, linux_rspec, linux_rubocop)
-
- linux_rspec.reset.success!
- linux_rubocop.reset.success!
- mac_build.reset.success!
- mac_rspec.reset.success!
- mac_rubocop.reset.success!
-
- expect(stages).to eq(%w(success success running))
- expect(builds.pending).to contain_exactly(deploy)
- end
- end
- end
-
- context 'when a needed job is skipped', :sidekiq_inline do
- let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0) }
- let!(:linux_rspec) { create_build('linux:rspec', stage: 'test', stage_idx: 1) }
- let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
-
- before do
- create(:ci_build_need, build: deploy, name: 'linux:build')
- end
-
- it 'skips the jobs depending on it' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(pending created created))
- expect(all_builds.pending).to contain_exactly(linux_build)
-
- linux_build.reset.drop!
-
- expect(stages).to eq(%w(failed skipped skipped))
- expect(all_builds.failed).to contain_exactly(linux_build)
- expect(all_builds.skipped).to contain_exactly(linux_rspec, deploy)
- end
- end
-
- context 'when a needed job is manual', :sidekiq_inline do
- let!(:linux_build) { create_build('linux:build', stage: 'build', stage_idx: 0, when: 'manual', allow_failure: true) }
- let!(:deploy) { create_build('deploy', stage: 'deploy', stage_idx: 1, scheduling_type: :dag) }
-
- before do
- create(:ci_build_need, build: deploy, name: 'linux:build')
- end
-
- it 'makes deploy DAG to be skipped' do
- expect(process_pipeline).to be_truthy
-
- expect(stages).to eq(%w(skipped skipped))
- expect(all_builds.manual).to contain_exactly(linux_build)
- expect(all_builds.skipped).to contain_exactly(deploy)
- end
- end
-
- context 'when a bridge job has parallel:matrix config', :sidekiq_inline do
- let(:parent_config) do
- <<-EOY
- test:
- stage: test
- script: echo test
-
- deploy:
- stage: deploy
- trigger:
- include: .child.yml
- parallel:
- matrix:
- - PROVIDER: ovh
- STACK: [monitoring, app]
- EOY
- end
-
- let(:child_config) do
- <<-EOY
- test:
- stage: test
- script: echo test
- EOY
- end
-
- let(:pipeline) do
- Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
- end
-
- before do
- allow_next_instance_of(Repository) do |repository|
- allow(repository)
- .to receive(:blob_data_at)
- .with(an_instance_of(String), '.gitlab-ci.yml')
- .and_return(parent_config)
-
- allow(repository)
- .to receive(:blob_data_at)
- .with(an_instance_of(String), '.child.yml')
- .and_return(child_config)
- end
- end
-
- it 'creates pipeline with bridges, then passes the matrix variables to downstream jobs' do
- expect(all_builds_names).to contain_exactly('test', 'deploy: [ovh, monitoring]', 'deploy: [ovh, app]')
- expect(all_builds_statuses).to contain_exactly('pending', 'created', 'created')
-
- succeed_pending
-
- # bridge jobs directly transition to success
- expect(all_builds_statuses).to contain_exactly('success', 'success', 'success')
-
- bridge1 = all_builds.find_by(name: 'deploy: [ovh, monitoring]')
- bridge2 = all_builds.find_by(name: 'deploy: [ovh, app]')
-
- downstream_job1 = bridge1.downstream_pipeline.processables.first
- downstream_job2 = bridge2.downstream_pipeline.processables.first
-
- expect(downstream_job1.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'monitoring')
- expect(downstream_job2.scoped_variables.to_hash).to include('PROVIDER' => 'ovh', 'STACK' => 'app')
- end
- end
-
- context 'when a bridge job has invalid downstream project', :sidekiq_inline do
- let(:config) do
- <<-EOY
- test:
- stage: test
- script: echo test
-
- deploy:
- stage: deploy
- trigger:
- project: invalid-project
- EOY
- end
-
- let(:pipeline) do
- Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
- end
-
- before do
- stub_ci_pipeline_yaml_file(config)
- end
-
- it 'creates a pipeline, then fails the bridge job' do
- expect(all_builds_names).to contain_exactly('test', 'deploy')
- expect(all_builds_statuses).to contain_exactly('pending', 'created')
-
- succeed_pending
-
- expect(all_builds_names).to contain_exactly('test', 'deploy')
- expect(all_builds_statuses).to contain_exactly('success', 'failed')
- end
- end
-
- private
-
- def all_builds
- pipeline.processables.order(:stage_idx, :id)
- end
-
- def builds
- all_builds.where.not(status: [:created, :skipped])
- end
-
- def stages
- pipeline.reset.stages.map(&:status)
- end
-
- def builds_names
- builds.pluck(:name)
- end
-
- def builds_names_and_statuses
- builds.each_with_object({}) do |b, h|
- h[b.name.to_sym] = b.status
- h
- end
- end
-
- def all_builds_names
- all_builds.pluck(:name)
- end
-
- def builds_statuses
- builds.pluck(:status)
- end
-
- def all_builds_statuses
- all_builds.pluck(:status)
- end
-
- def succeed_pending
- builds.pending.each do |build|
- build.reset.success
- end
- end
-
- def succeed_running_or_pending
- pipeline.builds.running_or_pending.each do |build|
- build.reset.success
- end
- end
-
- def fail_running_or_pending
- pipeline.builds.running_or_pending.each do |build|
- build.reset.drop
- end
- end
-
- def cancel_running_or_pending
- pipeline.builds.running_or_pending.each do |build|
- build.reset.cancel
- end
- end
-
- def play_manual_action(name)
- builds.find_by(name: name).play(user)
- end
-
- def enqueue_scheduled(name)
- builds.scheduled.find_by(name: name).enqueue_scheduled
- end
-
- def retry_build(name)
- Ci::Build.retry(builds.find_by(name: name), user)
- end
-
- def manual_actions
- pipeline.manual_actions.reload
- end
-
- def create_build(name, **opts)
- create(:ci_build, :created, pipeline: pipeline, name: name, **with_stage_opts(opts))
- end
-
- def successful_build(name, **opts)
- create(:ci_build, :success, pipeline: pipeline, name: name, **with_stage_opts(opts))
- end
-
- def with_stage_opts(opts)
- { stage: "stage-#{opts[:stage_idx].to_i}" }.merge(opts)
- end
-
- def delayed_options
- { when: 'delayed', options: { script: %w(echo), start_in: '1 minute' } }
- end
-
- def unschedule
- pipeline.builds.scheduled.map(&:unschedule)
- end
-end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
deleted file mode 100644
index b4ad2512593..00000000000
--- a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { project.owner }
-
- where(:test_file_path) do
- Dir.glob(Rails.root.join('spec/services/ci/pipeline_processing/test_cases/*.yml'))
- end
-
- with_them do
- let(:test_file) { YAML.load_file(test_file_path) }
- let(:pipeline) { Ci::CreatePipelineService.new(project, user, ref: 'master').execute(:pipeline).payload }
-
- before do
- stub_ci_pipeline_yaml_file(YAML.dump(test_file['config']))
- end
-
- it 'follows transitions' do
- expect(pipeline).to be_persisted
- Sidekiq::Worker.drain_all # ensure that all async jobs are executed
- check_expectation(test_file.dig('init', 'expect'), "init")
-
- test_file['transitions'].each_with_index do |transition, idx|
- event_on_jobs(transition['event'], transition['jobs'])
- Sidekiq::Worker.drain_all # ensure that all async jobs are executed
- check_expectation(transition['expect'], "transition:#{idx}")
- end
- end
-
- private
-
- def check_expectation(expectation, message)
- expect(current_state.deep_stringify_keys).to eq(expectation), message
- end
-
- def current_state
- # reload pipeline and all relations
- pipeline.reload
-
- {
- pipeline: pipeline.status,
- stages: pipeline.stages.pluck(:name, :status).to_h,
- jobs: pipeline.latest_statuses.pluck(:name, :status).to_h
- }
- end
-
- def event_on_jobs(event, job_names)
- statuses = pipeline.latest_statuses.by_name(job_names).to_a
- expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
-
- statuses.each do |status|
- if event == 'play'
- status.play(user)
- else
- status.public_send("#{event}!")
- end
- end
- end
- end
-end
diff --git a/spec/services/issues/set_crm_contacts_service_spec.rb b/spec/services/issues/set_crm_contacts_service_spec.rb
index 65b22fe3b35..628f70efad6 100644
--- a/spec/services/issues/set_crm_contacts_service_spec.rb
+++ b/spec/services/issues/set_crm_contacts_service_spec.rb
@@ -22,13 +22,13 @@ RSpec.describe Issues::SetCrmContactsService do
describe '#execute' do
context 'when the user has no permission' do
- let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } }
+ let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array(['You have insufficient permissions to set customer relations contacts for this issue'])
+ expect(response.message).to eq('You have insufficient permissions to set customer relations contacts for this issue')
end
end
@@ -38,20 +38,20 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'when the contact does not exist' do
- let(:params) { { crm_contact_ids: [non_existing_record_id] } }
+ let(:params) { { replace_ids: [non_existing_record_id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array(["Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}"])
+ expect(response.message).to eq("Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}")
end
end
context 'when the contact belongs to a different group' do
let(:group2) { create(:group) }
let(:contact) { create(:contact, group: group2) }
- let(:params) { { crm_contact_ids: [contact.id] } }
+ let(:params) { { replace_ids: [contact.id] } }
before do
group2.add_reporter(user)
@@ -61,12 +61,12 @@ RSpec.describe Issues::SetCrmContactsService do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array(["Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}"])
+ expect(response.message).to eq("Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}")
end
end
context 'replace' do
- let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } }
+ let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
@@ -77,7 +77,18 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'add' do
- let(:params) { { add_crm_contact_ids: [contacts[3].id] } }
+ let(:params) { { add_ids: [contacts[3].id] } }
+
+ it 'updates the issue with correct contacts' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[1], contacts[3]])
+ end
+ end
+
+ context 'add by email' do
+ let(:params) { { add_emails: [contacts[3].email] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
@@ -88,7 +99,18 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'remove' do
- let(:params) { { remove_crm_contact_ids: [contacts[0].id] } }
+ let(:params) { { remove_ids: [contacts[0].id] } }
+
+ it 'updates the issue with correct contacts' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ expect(issue.customer_relations_contacts).to match_array([contacts[1]])
+ end
+ end
+
+ context 'remove by email' do
+ let(:params) { { remove_emails: [contacts[0].email] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
@@ -100,18 +122,18 @@ RSpec.describe Issues::SetCrmContactsService do
context 'when attempting to add more than 6' do
let(:id) { contacts[0].id }
- let(:params) { { add_crm_contact_ids: [id, id, id, id, id, id, id] } }
+ let(:params) { { add_ids: [id, id, id, id, id, id, id] } }
it 'returns expected error message' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array(['You can only add up to 6 contacts at one time'])
+ expect(response.message).to eq('You can only add up to 6 contacts at one time')
end
end
context 'when trying to remove non-existent contact' do
- let(:params) { { remove_crm_contact_ids: [non_existing_record_id] } }
+ let(:params) { { remove_ids: [non_existing_record_id] } }
it 'returns expected error message' do
response = set_crm_contacts
@@ -122,10 +144,10 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'when combining params' do
- let(:error_invalid_params) { 'You cannot combine crm_contact_ids with add_crm_contact_ids or remove_crm_contact_ids' }
+ let(:error_invalid_params) { 'You cannot combine replace_ids with add_ids or remove_ids' }
context 'add and remove' do
- let(:params) { { remove_crm_contact_ids: [contacts[1].id], add_crm_contact_ids: [contacts[3].id] } }
+ let(:params) { { remove_ids: [contacts[1].id], add_ids: [contacts[3].id] } }
it 'updates the issue with correct contacts' do
response = set_crm_contacts
@@ -136,27 +158,57 @@ RSpec.describe Issues::SetCrmContactsService do
end
context 'replace and remove' do
- let(:params) { { crm_contact_ids: [contacts[3].id], remove_crm_contact_ids: [contacts[0].id] } }
+ let(:params) { { replace_ids: [contacts[3].id], remove_ids: [contacts[0].id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array([error_invalid_params])
+ expect(response.message).to eq(error_invalid_params)
end
end
context 'replace and add' do
- let(:params) { { crm_contact_ids: [contacts[3].id], add_crm_contact_ids: [contacts[1].id] } }
+ let(:params) { { replace_ids: [contacts[3].id], add_ids: [contacts[1].id] } }
it 'returns expected error response' do
response = set_crm_contacts
expect(response).to be_error
- expect(response.message).to match_array([error_invalid_params])
+ expect(response.message).to eq(error_invalid_params)
end
end
end
+
+ context 'when trying to add an existing issue contact' do
+ let(:params) { { add_ids: [contacts[0].id] } }
+
+ it 'does not return an error' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ end
+ end
+
+ context 'when trying to add the same contact twice' do
+ let(:params) { { add_ids: [contacts[3].id, contacts[3].id] } }
+
+ it 'does not return an error' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ end
+ end
+
+ context 'when trying to remove a contact not attached to the issue' do
+ let(:params) { { remove_ids: [contacts[3].id] } }
+
+ it 'does not return an error' do
+ response = set_crm_contacts
+
+ expect(response).to be_success
+ end
+ end
end
end
end
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 0a781aee704..19fac3b5095 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -150,7 +150,10 @@ RSpec.describe MergeRequests::MergeToRefService do
merge_request.update!(squash: true)
end
- it_behaves_like 'MergeService for target ref'
+ it_behaves_like 'successfully merges to ref with merge method' do
+ let(:first_parent_ref) { 'refs/heads/master' }
+ let(:target_ref) { merge_request.merge_ref_path }
+ end
it 'does not squash before merging' do
expect(MergeRequests::SquashService).not_to receive(:new)
diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb
index 09f83624e05..af48e8f5dae 100644
--- a/spec/services/merge_requests/squash_service_spec.rb
+++ b/spec/services/merge_requests/squash_service_spec.rb
@@ -55,18 +55,26 @@ RSpec.describe MergeRequests::SquashService do
expect(merge_request).to receive(:commits_count).at_least(:once).and_return(1)
end
- it 'will skip performing the squash, as the outcome would be the same' do
- expect(merge_request.target_project.repository).not_to receive(:squash)
+ it 'will still perform the squash' do
+ expect(merge_request.target_project.repository).to receive(:squash).and_return('sha')
service.execute
end
- it 'will still perform the squash when a custom squash commit message has been provided' do
- service = described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: 'A custom commit message' })
+ context 'when squash message matches commit message' do
+ let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: merge_request.first_commit.safe_message }) }
- expect(merge_request.target_project.repository).to receive(:squash).and_return('sha')
+ it 'returns that commit SHA' do
+ result = service.execute
- service.execute
+ expect(result).to match(status: :success, squash_sha: merge_request.diff_head_sha)
+ end
+
+ it 'does not perform any git actions' do
+ expect(repository).not_to receive(:squash)
+
+ service.execute
+ end
end
end
@@ -113,17 +121,7 @@ RSpec.describe MergeRequests::SquashService do
context 'when there is only one commit in the merge request' do
let(:merge_request) { merge_request_with_one_commit }
- it 'returns that commit SHA' do
- result = service.execute
-
- expect(result).to match(status: :success, squash_sha: merge_request.diff_head_sha)
- end
-
- it 'does not perform any git actions' do
- expect(repository).not_to receive(:popen)
-
- service.execute
- end
+ include_examples 'the squash succeeds'
end
context 'when squashing only new files' do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 611261cd92c..2bdc29a09a0 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe QuickActions::InterpretService do
- let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:public_project) { create(:project, :public, group: group) }
let_it_be(:repository_project) { create(:project, :repository) }
let_it_be(:project) { public_project }
let_it_be(:developer) { create(:user) }
@@ -2233,6 +2234,57 @@ RSpec.describe QuickActions::InterpretService do
end
end
end
+
+ context 'crm_contact commands' do
+ let_it_be(:new_contact) { create(:contact, group: group) }
+ let_it_be(:existing_contact) { create(:contact, group: group) }
+
+ let(:add_command) { service.execute("/add_contacts #{new_contact.email}", issue) }
+ let(:remove_command) { service.execute("/remove_contacts #{existing_contact.email}", issue) }
+
+ before do
+ issue.project.group.add_developer(developer)
+ create(:issue_customer_relations_contact, issue: issue, contact: existing_contact)
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(customer_relations: false)
+ end
+
+ it 'add_contacts command does not add the contact' do
+ add_command
+
+ expect(issue.reload.customer_relations_contacts).to match_array([existing_contact])
+ end
+
+ it 'remove_contacts command does not remove the contact' do
+ remove_command
+
+ expect(issue.reload.customer_relations_contacts).to match_array([existing_contact])
+ end
+ end
+
+ it 'add_contacts command adds the contact' do
+ _, _, message = add_command
+
+ expect(issue.reload.customer_relations_contacts).to match_array([existing_contact, new_contact])
+ expect(message).to eq('One or more contacts were successfully added.')
+ end
+
+ it 'add_contacts command returns the correct error when something goes wrong' do
+ _, _, message = service.execute("/add_contacts #{new_contact.email} #{new_contact.email} #{new_contact.email} #{new_contact.email} #{new_contact.email} #{new_contact.email} #{new_contact.email}", issue)
+
+ expect(message).to eq('You can only add up to 6 contacts at one time')
+ end
+
+ it 'remove_contacts command removes the contact' do
+ _, _, message = remove_command
+
+ expect(issue.reload.customer_relations_contacts).to be_empty
+ expect(message).to eq('One or more contacts were successfully removed.')
+ end
+ end
end
describe '#explain' do
diff --git a/spec/support/helpers/modal_helpers.rb b/spec/support/helpers/modal_helpers.rb
new file mode 100644
index 00000000000..a1f03cc0da5
--- /dev/null
+++ b/spec/support/helpers/modal_helpers.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module Spec
+ module Support
+ module Helpers
+ module ModalHelpers
+ def within_modal
+ page.within('[role="dialog"]') do
+ yield
+ end
+ end
+
+ def accept_gl_confirm(text = nil, button_text: 'OK')
+ yield if block_given?
+
+ within_modal do
+ unless text.nil?
+ expect(page).to have_content(text)
+ end
+
+ click_button button_text
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index 878cbc10a24..6568d51b90e 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -391,7 +391,7 @@ RSpec.shared_examples 'rejects nuget access with invalid target id' do
context 'with a target id with invalid integers' do
using RSpec::Parameterized::TableSyntax
- let(:target) { OpenStruct.new(id: id) }
+ let(:target) { double(id: id) }
where(:id, :status) do
'/../' | :bad_request
@@ -411,7 +411,7 @@ end
RSpec.shared_examples 'rejects nuget access with unknown target id' do
context 'with an unknown target' do
- let(:target) { OpenStruct.new(id: 1234567890) }
+ let(:target) { double(id: 1234567890) }
context 'as anonymous' do
it_behaves_like 'rejects nuget packages access', :anonymous, :unauthorized
diff --git a/yarn.lock b/yarn.lock
index a4ce69ffa7c..639ad1d9be6 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1491,10 +1491,10 @@
dom-accessibility-api "^0.5.1"
pretty-format "^26.4.2"
-"@tiptap/core@^2.0.0-beta.138":
- version "2.0.0-beta.138"
- resolved "https://registry.yarnpkg.com/@tiptap/core/-/core-2.0.0-beta.138.tgz#3a73b32b10f07ba2842142552457b52abf8cfc41"
- integrity sha512-Cg3ig6c+NCBILYaVNf5h8vJdsRynhKzy+zQzH/91kLoWzpNV5J6R2sW32Oufuwvr0Kra1+kKKh/WIGpB3Ia4RA==
+"@tiptap/core@^2.0.0-beta.140":
+ version "2.0.0-beta.140"
+ resolved "https://registry.yarnpkg.com/@tiptap/core/-/core-2.0.0-beta.140.tgz#9d6771edbc0591b77a46182e863076496a164dda"
+ integrity sha512-PUgIaYVcTSIeK1dvaGJ91Q/a6EM+PucUMyOCvO0VhU5nFqCkxiqAsBVUqdAomIXthLZya1WgfaKIEbwNEBexJA==
dependencies:
"@types/prosemirror-commands" "^1.0.4"
"@types/prosemirror-keymap" "^1.0.4"
@@ -1509,7 +1509,7 @@
prosemirror-schema-list "^1.1.6"
prosemirror-state "^1.3.4"
prosemirror-transform "^1.3.3"
- prosemirror-view "^1.22.0"
+ prosemirror-view "^1.23.1"
"@tiptap/extension-blockquote@^2.0.0-beta.24":
version "2.0.0-beta.24"
@@ -1521,31 +1521,31 @@
resolved "https://registry.yarnpkg.com/@tiptap/extension-bold/-/extension-bold-2.0.0-beta.24.tgz#a8d1076922580db528cc6988fde08f731dcfe733"
integrity sha512-2VTCtY2JI0wpDwWT0a2fMFkjbgxDpwD3wvtY3/ndh5pyNX0JQCXtJarFzfZZurWvLNQ8QPRRel73182RBYUOHQ==
-"@tiptap/extension-bubble-menu@^2.0.0-beta.49":
- version "2.0.0-beta.49"
- resolved "https://registry.yarnpkg.com/@tiptap/extension-bubble-menu/-/extension-bubble-menu-2.0.0-beta.49.tgz#f9863b1abad5f87d298d4e6527005484137a6166"
- integrity sha512-JbaSG3otBuMKRyTn0OqVscZnwqJ7c+qyKAnoZJit5EK1RS72cTfGWZvvAxaslAM4DeE9avJeudUi/tN5Iafv4A==
+"@tiptap/extension-bubble-menu@^2.0.0-beta.50":
+ version "2.0.0-beta.50"
+ resolved "https://registry.yarnpkg.com/@tiptap/extension-bubble-menu/-/extension-bubble-menu-2.0.0-beta.50.tgz#96c09e71d84473c018fa63dc072fa3732c2b3e88"
+ integrity sha512-UYnIaUTbI1K759z3A44dahp/NyHKjSaRvIptMnAv7q312l5n+czGwBcjZo41YUxjLPhnwGk1Siny1V+b5+4yBA==
dependencies:
prosemirror-state "^1.3.4"
- prosemirror-view "^1.22.0"
- tippy.js "^6.3.6"
+ prosemirror-view "^1.23.1"
+ tippy.js "^6.3.7"
"@tiptap/extension-bullet-list@^2.0.0-beta.23":
version "2.0.0-beta.23"
resolved "https://registry.yarnpkg.com/@tiptap/extension-bullet-list/-/extension-bullet-list-2.0.0-beta.23.tgz#64698c98039ad301c94a9041bbd117e82957be21"
integrity sha512-ReoUiz9f1IX87RX+GRE+fCaLEzNNwmiP4kli3QH8/qrLK3qxvZYr9N31fUeOHecCctUofPSbQB79B39zSo9Ouw==
-"@tiptap/extension-code-block-lowlight@2.0.0-beta.55":
- version "2.0.0-beta.55"
- resolved "https://registry.yarnpkg.com/@tiptap/extension-code-block-lowlight/-/extension-code-block-lowlight-2.0.0-beta.55.tgz#7ea0a9a64c1cf69514b359dcb0dbeb130afa2976"
- integrity sha512-1Ckq4d3Q0EeEXlIX6QcHcHylvCEg2uj/BZ4jvclHc5rLYC1NndiwRoM5wkjj9xZ3WQHDHN405ocgFk+yUvID6g==
+"@tiptap/extension-code-block-lowlight@2.0.0-beta.57":
+ version "2.0.0-beta.57"
+ resolved "https://registry.yarnpkg.com/@tiptap/extension-code-block-lowlight/-/extension-code-block-lowlight-2.0.0-beta.57.tgz#afaa3a59da8d688f4f402d31ad6e5bea6ff87390"
+ integrity sha512-HbUkhJkTiDusLX+qqaJsqCiTKpWR6LPWScKP/Sk7744EnjMmyiqbmNwyqSx538Z8GIS89TKj1vERLUAAAOGCIQ==
dependencies:
"@tiptap/extension-code-block" "^2.0.0-beta.29"
"@types/lowlight" "^0.0.3"
lowlight "^1.20.0"
prosemirror-model "^1.15.0"
prosemirror-state "^1.3.4"
- prosemirror-view "^1.22.0"
+ prosemirror-view "^1.23.1"
"@tiptap/extension-code-block@^2.0.0-beta.29":
version "2.0.0-beta.29"
@@ -1564,22 +1564,22 @@
resolved "https://registry.yarnpkg.com/@tiptap/extension-document/-/extension-document-2.0.0-beta.15.tgz#5d17a0289244a913ab2ef08e8495a1e46950711e"
integrity sha512-ypENC+xUYD5m2t+KOKNYqyXnanXd5fxyIyhR1qeEEwwQwMXGNrO3kCH6O4mIDCpy+/WqHvVay2tV5dVsXnvY8w==
-"@tiptap/extension-dropcursor@^2.0.0-beta.24":
- version "2.0.0-beta.24"
- resolved "https://registry.yarnpkg.com/@tiptap/extension-dropcursor/-/extension-dropcursor-2.0.0-beta.24.tgz#e0263c8d784304cb885aea299bfd5255d3435765"
- integrity sha512-B4bzY84g82VY78kv6BFNSCgO9Sc3dtgkvzFDJ57X2QweYyLkXbYeZxI8SqO7Nva1QRZadBlFyRPm+aP1rLZsew==
+"@tiptap/extension-dropcursor@^2.0.0-beta.25":
+ version "2.0.0-beta.25"
+ resolved "https://registry.yarnpkg.com/@tiptap/extension-dropcursor/-/extension-dropcursor-2.0.0-beta.25.tgz#962f290a200259533a26194daca5a4b4a53e72d3"
+ integrity sha512-GYf5s6dkZtsDy+TEkrQK6kLbfbitG4qnk02D+FlhlJMI/Nnx8rYCRJbwEHDdqrfX7XwZzULMqqqHvzxZYrEeNg==
dependencies:
"@types/prosemirror-dropcursor" "^1.0.3"
- prosemirror-dropcursor "^1.3.5"
+ prosemirror-dropcursor "^1.4.0"
-"@tiptap/extension-floating-menu@^2.0.0-beta.44":
- version "2.0.0-beta.44"
- resolved "https://registry.yarnpkg.com/@tiptap/extension-floating-menu/-/extension-floating-menu-2.0.0-beta.44.tgz#fca9eefd9bcc74fdf035b012b9eb9fbfda331cf1"
- integrity sha512-R8EF6XXlwoiHvCuXV3qGsEKae5u0OYrKHJNeOgau5SANg/ab+pjFzr3Lrt43NFpOKf5rzD3p2OJxnikudceLSg==
+"@tiptap/extension-floating-menu@^2.0.0-beta.45":
+ version "2.0.0-beta.45"
+ resolved "https://registry.yarnpkg.com/@tiptap/extension-floating-menu/-/extension-floating-menu-2.0.0-beta.45.tgz#0ccba4bc376171ff4cb0ad9b6e032bfa69488010"
+ integrity sha512-UjAXhdrcVInCydCoRq+9IOEHQC2lR6BSNACFiTzgzObY7aFaNmMus/9MZ/WZKSN3Rw8Mk4lr8PXFP/3zuNqbYA==
dependencies:
prosemirror-state "^1.3.4"
- prosemirror-view "^1.22.0"
- tippy.js "^6.3.6"
+ prosemirror-view "^1.23.1"
+ tippy.js "^6.3.7"
"@tiptap/extension-gapcursor@^2.0.0-beta.33":
version "2.0.0-beta.33"
@@ -1677,13 +1677,13 @@
resolved "https://registry.yarnpkg.com/@tiptap/extension-table-row/-/extension-table-row-2.0.0-beta.19.tgz#b45e82f29dfcc7405440ba237b069dbb93d1a94a"
integrity sha512-ldEVDpIUX7ZqbViTy4c/RfyNGRv++O/r3A/Ivuon1PykaDDTbPlp5JM89FunAD39cLAbo2HKtweqdmzCMlZsqA==
-"@tiptap/extension-table@^2.0.0-beta.42":
- version "2.0.0-beta.42"
- resolved "https://registry.yarnpkg.com/@tiptap/extension-table/-/extension-table-2.0.0-beta.42.tgz#bd7e2886f9f4e6d6c53fa1a5fdf24e05bd58a4af"
- integrity sha512-M9vL4ZODthTSiSRn4yC/gPfPgn7fgpoIj0qm6LF0HMcZbsyDA7eH7E33Xed93OwdMaJuLeq2qqdo1Sg71AJwpQ==
+"@tiptap/extension-table@^2.0.0-beta.43":
+ version "2.0.0-beta.43"
+ resolved "https://registry.yarnpkg.com/@tiptap/extension-table/-/extension-table-2.0.0-beta.43.tgz#12fc8513f05dd49cba5f1a0d9dd14df9efc2050a"
+ integrity sha512-PALo2WCf/4RpICfJzBvbTaEOf6rpwmQK78jC3tR8kE9Sz6xOSydZmmbl4vovklXCHy3euaW9LCuyiiX+fjJDxw==
dependencies:
prosemirror-tables "^1.1.1"
- prosemirror-view "^1.22.0"
+ prosemirror-view "^1.23.1"
"@tiptap/extension-task-item@^2.0.0-beta.28":
version "2.0.0-beta.28"
@@ -1700,14 +1700,14 @@
resolved "https://registry.yarnpkg.com/@tiptap/extension-text/-/extension-text-2.0.0-beta.15.tgz#f08cff1b78f1c6996464dfba1fef8ec1e107617f"
integrity sha512-S3j2+HyV2gsXZP8Wg/HA+YVXQsZ3nrXgBM9HmGAxB0ESOO50l7LWfip0f3qcw1oRlh5H3iLPkA6/f7clD2/TFA==
-"@tiptap/vue-2@^2.0.0-beta.68":
- version "2.0.0-beta.68"
- resolved "https://registry.yarnpkg.com/@tiptap/vue-2/-/vue-2-2.0.0-beta.68.tgz#9f67bd57c9860d2263ae4f32b169f6307f7d273b"
- integrity sha512-Denvq8TdO5mzOkFLbQHWpmz/90tFehN2eRBRH20lPBQyrHSW3kxLleNVf2UsmZhu9b9d83s2MrFNJp1wAkhCKg==
+"@tiptap/vue-2@^2.0.0-beta.69":
+ version "2.0.0-beta.69"
+ resolved "https://registry.yarnpkg.com/@tiptap/vue-2/-/vue-2-2.0.0-beta.69.tgz#65ed98e966c28c9614d4c5a6218e99397a2e0a26"
+ integrity sha512-Adm+YvhaihW3mpFIBgPH/3qlxG3hY7/g52gM3Kg0f//SNg/bDqK+AMLroZB5Je06ihCqBaRb1hrSD0/gTVMJ1g==
dependencies:
- "@tiptap/extension-bubble-menu" "^2.0.0-beta.49"
- "@tiptap/extension-floating-menu" "^2.0.0-beta.44"
- prosemirror-view "^1.22.0"
+ "@tiptap/extension-bubble-menu" "^2.0.0-beta.50"
+ "@tiptap/extension-floating-menu" "^2.0.0-beta.45"
+ prosemirror-view "^1.23.1"
"@toast-ui/editor@^2.5.2":
version "2.5.2"
@@ -9790,10 +9790,10 @@ prosemirror-commands@^1.1.12, prosemirror-commands@^1.1.4:
prosemirror-state "^1.0.0"
prosemirror-transform "^1.0.0"
-prosemirror-dropcursor@^1.3.2, prosemirror-dropcursor@^1.3.5:
- version "1.3.5"
- resolved "https://registry.yarnpkg.com/prosemirror-dropcursor/-/prosemirror-dropcursor-1.3.5.tgz#d2808c17089df0e441ad66016aecc2b6457c8a1f"
- integrity sha512-tNUwcF2lPAkwKBZPZRtbxpwljnODRNZ3eiYloN1DSUqDjMT1nBZm0nejaEMS1TvNQ+3amibUSAiV4hX+jpASFA==
+prosemirror-dropcursor@^1.3.2, prosemirror-dropcursor@^1.4.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/prosemirror-dropcursor/-/prosemirror-dropcursor-1.4.0.tgz#91a859d4ee79c99b1c0ba6ee61c093b195c0d9f0"
+ integrity sha512-6+YwTjmqDwlA/Dm+5wK67ezgqgjA/MhSDgaNxKUzH97SmeuWFXyLeDRxxOPZeSo7yTxcDGUCWTEjmQZsVBuMrQ==
dependencies:
prosemirror-state "^1.0.0"
prosemirror-transform "^1.1.0"
@@ -9899,10 +9899,10 @@ prosemirror-transform@^1.0.0, prosemirror-transform@^1.1.0, prosemirror-transfor
dependencies:
prosemirror-model "^1.0.0"
-prosemirror-view@^1.0.0, prosemirror-view@^1.1.0, prosemirror-view@^1.13.3, prosemirror-view@^1.16.5, prosemirror-view@^1.22.0, prosemirror-view@^1.23.1:
- version "1.23.1"
- resolved "https://registry.yarnpkg.com/prosemirror-view/-/prosemirror-view-1.23.1.tgz#ea84e685003fab655b835bf2fe834dba66d1798b"
- integrity sha512-ZB0GqRqqkGvh7ggk7asFyKl3mqu3M5URBg0tf578kuP326RqL7nbIS0jEix95Vfb/U43J1T8PV6OCWQ5fZPVjg==
+prosemirror-view@^1.0.0, prosemirror-view@^1.1.0, prosemirror-view@^1.13.3, prosemirror-view@^1.16.5, prosemirror-view@^1.23.1, prosemirror-view@^1.23.2:
+ version "1.23.2"
+ resolved "https://registry.yarnpkg.com/prosemirror-view/-/prosemirror-view-1.23.2.tgz#20606ab3faad8a6a5320182256e92a2b96a87d31"
+ integrity sha512-iPgRw6tpcN+KH1yKmSnRmDKsJBVkWLFP6laHcz9rh/n0Ndz7YKKCDldtw6FhHBYoWmZeubbhV/rrQW0VCDG9iw==
dependencies:
prosemirror-model "^1.14.3"
prosemirror-state "^1.0.0"
@@ -11554,7 +11554,7 @@ tiny-emitter@^2.0.0:
resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.0.2.tgz#82d27468aca5ade8e5fd1e6d22b57dd43ebdfb7c"
integrity sha512-2NM0auVBGft5tee/OxP4PI3d8WItkDM+fPnaRAVo6xTDI2knbz9eC5ArWGqtGlYqiH3RU5yMpdyTTO7MguC4ow==
-tippy.js@^6.3.6:
+tippy.js@^6.3.7:
version "6.3.7"
resolved "https://registry.yarnpkg.com/tippy.js/-/tippy.js-6.3.7.tgz#8ccfb651d642010ed9a32ff29b0e9e19c5b8c61c"
integrity sha512-E1d3oP2emgJ9dRQZdf3Kkn0qJgI6ZLpyS5z6ZkY1DF3kaQaBsGZsndEpHwx+eC+tYM41HaSNvNtLx8tU57FzTQ==