Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-07-20 15:08:44 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-07-20 15:08:44 +0300
commit96add3eb957ee4256910087070e27850dd61cfe9 (patch)
tree882322f48f911aec24c2223d986b40a24a4c486f
parent6078f74ce914ab0ec4a4a6a21318a64b864ba625 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.gitlab/issue_templates/Service Ping reporting and monitoring.md6
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--app/assets/javascripts/issues/list/components/issues_list_app.vue41
-rw-r--r--app/assets/javascripts/issues/list/constants.js19
-rw-r--r--app/assets/javascripts/vue_shared/issuable/list/constants.js7
-rw-r--r--app/assets/javascripts/work_items/constants.js15
-rw-r--r--app/controllers/groups_controller.rb4
-rw-r--r--app/controllers/projects/issues_controller.rb5
-rw-r--r--app/views/projects/protected_branches/shared/_create_protected_branch.html.haml8
-rw-r--r--config/feature_flags/development/use_traversal_ids.yml2
-rw-r--r--danger/roulette/Dangerfile2
-rw-r--r--doc/administration/geo/replication/troubleshooting.md17
-rw-r--r--doc/development/code_review.md2
-rw-r--r--doc/development/service_ping/metrics_dictionary.md6
-rw-r--r--doc/development/service_ping/metrics_instrumentation.md2
-rw-r--r--doc/development/service_ping/review_guidelines.md6
-rw-r--r--doc/development/snowplow/infrastructure.md6
-rw-r--r--doc/development/snowplow/review_guidelines.md2
-rw-r--r--doc/user/profile/index.md2
-rw-r--r--lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb4
-rw-r--r--lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml2
-rw-r--r--lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml2
-rw-r--r--lib/gitlab/github_import/importer/events/changed_milestone.rb47
-rw-r--r--lib/gitlab/github_import/importer/issue_event_importer.rb3
-rw-r--r--lib/gitlab/github_import/representation/issue_event.rb3
-rw-r--r--lib/gitlab/version_info.rb4
-rw-r--r--rubocop/cop_todo.rb49
-rw-r--r--rubocop/formatter/todo_formatter.rb48
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js87
-rw-r--r--spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb298
-rw-r--r--spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb64
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb14
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_event_spec.rb22
-rw-r--r--spec/lib/gitlab/version_info_spec.rb14
-rw-r--r--spec/rubocop/cop_todo_spec.rb124
-rw-r--r--spec/rubocop/formatter/todo_formatter_spec.rb12
-rw-r--r--tooling/danger/product_intelligence.rb2
37 files changed, 672 insertions, 281 deletions
diff --git a/.gitlab/issue_templates/Service Ping reporting and monitoring.md b/.gitlab/issue_templates/Service Ping reporting and monitoring.md
index 1c0d221318b..9a30f71e42b 100644
--- a/.gitlab/issue_templates/Service Ping reporting and monitoring.md
+++ b/.gitlab/issue_templates/Service Ping reporting and monitoring.md
@@ -1,6 +1,6 @@
-<!-- This issue template is used by https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/ for tracking effort around Service Ping reporting for GitLab.com -->
+<!-- This issue template is used by https://about.gitlab.com/handbook/engineering/development/analytics-section/product-intelligence/ for tracking effort around Service Ping reporting for GitLab.com -->
-The [Product Intelligence group](https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/) runs manual reporting of ServicePing for GitLab.com on a weekly basis. This issue captures:
+The [Product Intelligence group](https://about.gitlab.com/handbook/engineering/development/analytics/product-intelligence/) runs manual reporting of ServicePing for GitLab.com on a weekly basis. This issue:
- Captures the work required to complete the reporting process,.
- Captures the follow-up tasks that are focused on metrics performance verification.
@@ -123,7 +123,7 @@ If you get mentioned, check the failing metric and open an optimization issue.
<!-- Do not edit below this line -->
/confidential
-/label ~"group::product intelligence" ~"devops::growth" ~backend ~"section::growth" ~"Category:Service Ping"
+/label ~"group::product intelligence" ~"devops::analytics" ~backend ~"section::analytics" ~"Category:Service Ping"
/epic https://gitlab.com/groups/gitlab-org/-/epics/6000
/weight 5
/title Monitor and Generate GitLab.com Service Ping
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index 095b6c8b4d6..6194ce8b21f 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-3fc66dc23581de48bdbbf1b5a5d5ca9faf5f925b
+1c907781819bf8810e15578f3d4d2b25e3ca1053
diff --git a/app/assets/javascripts/issues/list/components/issues_list_app.vue b/app/assets/javascripts/issues/list/components/issues_list_app.vue
index f567b0f1d68..d6acd700148 100644
--- a/app/assets/javascripts/issues/list/components/issues_list_app.vue
+++ b/app/assets/javascripts/issues/list/components/issues_list_app.vue
@@ -39,13 +39,13 @@ import {
TOKEN_TITLE_TYPE,
} from '~/vue_shared/components/filtered_search_bar/constants';
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
-import {
- IssuableListTabs,
- IssuableStates,
- IssuableTypes,
-} from '~/vue_shared/issuable/list/constants';
+import { IssuableListTabs, IssuableStates } from '~/vue_shared/issuable/list/constants';
+import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
+import { WORK_ITEM_TYPE_ENUM_TASK } from '~/work_items/constants';
import {
CREATED_DESC,
+ defaultTypeTokenOptions,
+ defaultWorkItemTypes,
i18n,
ISSUE_REFERENCE,
MAX_LIST_SIZE,
@@ -67,6 +67,7 @@ import {
TOKEN_TYPE_ORGANIZATION,
TOKEN_TYPE_RELEASE,
TOKEN_TYPE_TYPE,
+ TYPE_TOKEN_TASK_OPTION,
UPDATED_DESC,
urlSortParams,
} from '../constants';
@@ -107,7 +108,6 @@ const CrmOrganizationToken = () =>
export default {
i18n,
IssuableListTabs,
- IssuableTypes: [IssuableTypes.Issue, IssuableTypes.Incident, IssuableTypes.TestCase],
components: {
CsvImportExportButtons,
GlButton,
@@ -123,6 +123,7 @@ export default {
directives: {
GlTooltip: GlTooltipDirective,
},
+ mixins: [glFeatureFlagMixin()],
inject: [
'autocompleteAwardEmojisPath',
'calendarPath',
@@ -180,9 +181,7 @@ export default {
issues: {
query: getIssuesQuery,
variables() {
- const { types } = this.queryVariables;
-
- return { ...this.queryVariables, types: types ? [types] : this.$options.IssuableTypes };
+ return this.queryVariables;
},
update(data) {
return data[this.namespace]?.issues.nodes ?? [];
@@ -206,9 +205,7 @@ export default {
issuesCounts: {
query: getIssuesCountsQuery,
variables() {
- const { types } = this.queryVariables;
-
- return { ...this.queryVariables, types: types ? [types] : this.$options.IssuableTypes };
+ return this.queryVariables;
},
update(data) {
return data[this.namespace] ?? {};
@@ -240,11 +237,22 @@ export default {
state: this.state,
...this.pageParams,
...this.apiFilterParams,
+ types: this.apiFilterParams.types || this.defaultWorkItemTypes,
};
},
namespace() {
return this.isProject ? ITEM_TYPE.PROJECT : ITEM_TYPE.GROUP;
},
+ defaultWorkItemTypes() {
+ return this.isWorkItemsEnabled
+ ? defaultWorkItemTypes.concat(WORK_ITEM_TYPE_ENUM_TASK)
+ : defaultWorkItemTypes;
+ },
+ typeTokenOptions() {
+ return this.isWorkItemsEnabled
+ ? defaultTypeTokenOptions.concat(TYPE_TOKEN_TASK_OPTION)
+ : defaultTypeTokenOptions;
+ },
hasSearch() {
return (
this.searchQuery ||
@@ -262,6 +270,9 @@ export default {
isOpenTab() {
return this.state === IssuableStates.Opened;
},
+ isWorkItemsEnabled() {
+ return this.glFeatures.workItems;
+ },
showCsvButtons() {
return this.isProject && this.isSignedIn;
},
@@ -340,11 +351,7 @@ export default {
title: TOKEN_TITLE_TYPE,
icon: 'issues',
token: GlFilteredSearchToken,
- options: [
- { icon: 'issue-type-issue', title: 'issue', value: 'issue' },
- { icon: 'issue-type-incident', title: 'incident', value: 'incident' },
- { icon: 'issue-type-test-case', title: 'test_case', value: 'test_case' },
- ],
+ options: this.typeTokenOptions,
},
];
diff --git a/app/assets/javascripts/issues/list/constants.js b/app/assets/javascripts/issues/list/constants.js
index a921eb62e26..38fe4c33792 100644
--- a/app/assets/javascripts/issues/list/constants.js
+++ b/app/assets/javascripts/issues/list/constants.js
@@ -8,6 +8,11 @@ import {
OPERATOR_IS,
OPERATOR_IS_NOT,
} from '~/vue_shared/components/filtered_search_bar/constants';
+import {
+ WORK_ITEM_TYPE_ENUM_INCIDENT,
+ WORK_ITEM_TYPE_ENUM_ISSUE,
+ WORK_ITEM_TYPE_ENUM_TEST_CASE,
+} from '~/work_items/constants';
export const i18n = {
anonymousSearchingMessage: __('You must sign in to search for specific terms.'),
@@ -147,6 +152,20 @@ export const TOKEN_TYPE_WEIGHT = 'weight';
export const TOKEN_TYPE_CONTACT = 'crm_contact';
export const TOKEN_TYPE_ORGANIZATION = 'crm_organization';
+export const TYPE_TOKEN_TASK_OPTION = { icon: 'task-done', title: 'task', value: 'task' };
+
+export const defaultWorkItemTypes = [
+ WORK_ITEM_TYPE_ENUM_ISSUE,
+ WORK_ITEM_TYPE_ENUM_INCIDENT,
+ WORK_ITEM_TYPE_ENUM_TEST_CASE,
+];
+
+export const defaultTypeTokenOptions = [
+ { icon: 'issue-type-issue', title: 'issue', value: 'issue' },
+ { icon: 'issue-type-incident', title: 'incident', value: 'incident' },
+ { icon: 'issue-type-test-case', title: 'test_case', value: 'test_case' },
+];
+
export const filters = {
[TOKEN_TYPE_AUTHOR]: {
[API_PARAM]: {
diff --git a/app/assets/javascripts/vue_shared/issuable/list/constants.js b/app/assets/javascripts/vue_shared/issuable/list/constants.js
index 507f333a34e..f6b864dfde0 100644
--- a/app/assets/javascripts/vue_shared/issuable/list/constants.js
+++ b/app/assets/javascripts/vue_shared/issuable/list/constants.js
@@ -46,13 +46,6 @@ export const AvailableSortOptions = [
},
];
-export const IssuableTypes = {
- Issue: 'ISSUE',
- Incident: 'INCIDENT',
- TestCase: 'TEST_CASE',
- Requirement: 'REQUIREMENT',
-};
-
export const DEFAULT_PAGE_SIZE = 20;
export const DEFAULT_SKELETON_COUNT = 5;
diff --git a/app/assets/javascripts/work_items/constants.js b/app/assets/javascripts/work_items/constants.js
index 2140b418e6d..2b44642877b 100644
--- a/app/assets/javascripts/work_items/constants.js
+++ b/app/assets/javascripts/work_items/constants.js
@@ -8,11 +8,6 @@ export const STATE_EVENT_CLOSE = 'CLOSE';
export const TRACKING_CATEGORY_SHOW = 'workItems:show';
-export const i18n = {
- fetchError: s__('WorkItem|Something went wrong when fetching the work item. Please try again.'),
- updateError: s__('WorkItem|Something went wrong while updating the work item. Please try again.'),
-};
-
export const TASK_TYPE_NAME = 'Task';
export const WIDGET_TYPE_ASSIGNEES = 'ASSIGNEES';
@@ -22,7 +17,15 @@ export const WIDGET_TYPE_WEIGHT = 'WEIGHT';
export const WIDGET_TYPE_HIERARCHY = 'HIERARCHY';
export const WORK_ITEM_VIEWED_STORAGE_KEY = 'gl-show-work-item-banner';
-export const WIDGET_TYPE_TASK_ICON = 'task-done';
+export const WORK_ITEM_TYPE_ENUM_INCIDENT = 'INCIDENT';
+export const WORK_ITEM_TYPE_ENUM_ISSUE = 'ISSUE';
+export const WORK_ITEM_TYPE_ENUM_TASK = 'TASK';
+export const WORK_ITEM_TYPE_ENUM_TEST_CASE = 'TEST_CASE';
+
+export const i18n = {
+ fetchError: s__('WorkItem|Something went wrong when fetching the work item. Please try again.'),
+ updateError: s__('WorkItem|Something went wrong while updating the work item. Please try again.'),
+};
export const WIDGET_ICONS = {
TASK: 'task-done',
diff --git a/app/controllers/groups_controller.rb b/app/controllers/groups_controller.rb
index 327b4832f31..32b187c3260 100644
--- a/app/controllers/groups_controller.rb
+++ b/app/controllers/groups_controller.rb
@@ -34,6 +34,10 @@ class GroupsController < Groups::ApplicationController
before_action :track_experiment_event, only: [:new]
+ before_action only: :issues do
+ push_force_frontend_feature_flag(:work_items, group.work_items_feature_flag_enabled?)
+ end
+
helper_method :captcha_required?
skip_cross_project_access_check :index, :new, :create, :edit, :update,
diff --git a/app/controllers/projects/issues_controller.rb b/app/controllers/projects/issues_controller.rb
index f1c9e2b2653..48f883bcd4b 100644
--- a/app/controllers/projects/issues_controller.rb
+++ b/app/controllers/projects/issues_controller.rb
@@ -44,10 +44,13 @@ class Projects::IssuesController < Projects::ApplicationController
push_frontend_feature_flag(:incident_timeline, project)
end
+ before_action only: [:index, :show] do
+ push_force_frontend_feature_flag(:work_items, project&.work_items_feature_flag_enabled?)
+ end
+
before_action only: :show do
push_frontend_feature_flag(:issue_assignees_widget, project)
push_frontend_feature_flag(:realtime_labels, project)
- push_force_frontend_feature_flag(:work_items, project&.work_items_feature_flag_enabled?)
push_frontend_feature_flag(:work_items_mvc_2)
push_frontend_feature_flag(:work_items_hierarchy, project)
end
diff --git a/app/views/projects/protected_branches/shared/_create_protected_branch.html.haml b/app/views/projects/protected_branches/shared/_create_protected_branch.html.haml
index 3b8294a1dec..35770c32f9f 100644
--- a/app/views/projects/protected_branches/shared/_create_protected_branch.html.haml
+++ b/app/views/projects/protected_branches/shared/_create_protected_branch.html.haml
@@ -1,9 +1,9 @@
= form_for [@project, @protected_branch], html: { class: 'new-protected-branch js-new-protected-branch' } do |f|
%input{ type: 'hidden', name: 'update_section', value: 'js-protected-branches-settings' }
- .card
- .card-header.gl-font-weight-bold
+ = render Pajamas::CardComponent.new(card_options: { class: "gl-mb-5" }) do |c|
+ - c.header do
= s_("ProtectedBranch|Protect a branch")
- .card-body
+ - c.body do
= form_errors(@protected_branch, pajamas_alert: true)
.form-group.row
= f.label :name, s_('ProtectedBranch|Branch:'), class: 'col-sm-12'
@@ -31,5 +31,5 @@
- force_push_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: force_push_docs_url }
= (s_("ProtectedBranch|Allow all users with push access to %{tag_start}force push%{tag_end}.") % { tag_start: force_push_link_start, tag_end: '</a>' }).html_safe
= render_if_exists 'projects/protected_branches/ee/code_owner_approval_form', f: f
- .card-footer
+ - c.footer do
= f.submit s_('ProtectedBranch|Protect'), class: 'gl-button btn btn-confirm', disabled: true, data: { qa_selector: 'protect_button' }
diff --git a/config/feature_flags/development/use_traversal_ids.yml b/config/feature_flags/development/use_traversal_ids.yml
index ab3a2eaf733..3d566ddc3c0 100644
--- a/config/feature_flags/development/use_traversal_ids.yml
+++ b/config/feature_flags/development/use_traversal_ids.yml
@@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/321948
milestone: '13.11'
type: development
group: group::workspace
-default_enabled: false
+default_enabled: true
diff --git a/danger/roulette/Dangerfile b/danger/roulette/Dangerfile
index 0e6af5792cd..c3f7806069c 100644
--- a/danger/roulette/Dangerfile
+++ b/danger/roulette/Dangerfile
@@ -55,7 +55,7 @@ end
OPTIONAL_REVIEW_TEMPLATE = '%{role} review is optional for %{category}'
NOT_AVAILABLE_TEMPLATES = {
default: 'No %{role} available',
- product_intelligence: group_not_available_template('#g_product_intelligence', '@gitlab-org/growth/product-intelligence/engineers'),
+ product_intelligence: group_not_available_template('#g_product_intelligence', '@gitlab-org/analytics-section/product-intelligence/engineers'),
integrations_be: group_not_available_template('#g_ecosystem_integrations', '@gitlab-org/ecosystem-stage/integrations'),
integrations_fe: group_not_available_template('#g_ecosystem_integrations', '@gitlab-org/ecosystem-stage/integrations')
}.freeze
diff --git a/doc/administration/geo/replication/troubleshooting.md b/doc/administration/geo/replication/troubleshooting.md
index 082ecbbb208..d231a977ba8 100644
--- a/doc/administration/geo/replication/troubleshooting.md
+++ b/doc/administration/geo/replication/troubleshooting.md
@@ -1158,6 +1158,23 @@ requests redirected from the secondary to the primary node do not properly send
Authorization header. This may result in either an infinite `Authorization <-> Redirect`
loop, or Authorization error messages.
+### Error: Net::ReadTimeout when pushing through SSH on a Geo secondary
+
+When you push large repositories through SSH on a Geo secondary site, you may encounter a timeout.
+This is because Rails proxies the push to the primary and has a 60 second default timeout,
+[as described in this Geo issue](https://gitlab.com/gitlab-org/gitlab/-/issues/7405).
+
+Current workarounds are:
+
+- Push through HTTP instead, where Workhorse proxies the request to the primary (or redirects to the primary if Geo proxying is not enabled).
+- Push directly to the primary.
+
+Example log (`gitlab-shell.log`):
+
+```plaintext
+Failed to contact primary https://primary.domain.com/namespace/push_test.git\\nError: Net::ReadTimeout\",\"result\":null}" code=500 method=POST pid=5483 url="http://127.0.0.1:3000/api/v4/geo/proxy_git_push_ssh/push"
+```
+
## Recovering from a partial failover
The partial failover to a secondary Geo *site* may be the result of a temporary/transient issue. Therefore, first attempt to run the promote command again.
diff --git a/doc/development/code_review.md b/doc/development/code_review.md
index 1225260e600..4617bea8ad4 100644
--- a/doc/development/code_review.md
+++ b/doc/development/code_review.md
@@ -144,7 +144,7 @@ with [domain expertise](#domain-experts).
by a [Software Engineer in Test](https://about.gitlab.com/handbook/engineering/quality/#individual-contributors)**.
1. If your merge request only includes end-to-end changes (*4*) **or** if the MR author is a [Software Engineer in Test](https://about.gitlab.com/handbook/engineering/quality/#individual-contributors), it must be **approved by a [Quality maintainer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_maintainers_qa)**
1. If your merge request includes a new or updated [application limit](https://about.gitlab.com/handbook/product/product-processes/#introducing-application-limits), it must be **approved by a [product manager](https://about.gitlab.com/company/team/)**.
-1. If your merge request includes Product Intelligence (telemetry or analytics) changes, it should be reviewed and approved by a [Product Intelligence engineer](https://gitlab.com/gitlab-org/growth/product-intelligence/engineers).
+1. If your merge request includes Product Intelligence (telemetry or analytics) changes, it should be reviewed and approved by a [Product Intelligence engineer](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/engineers).
1. If your merge request includes an addition of, or changes to a [Feature spec](testing_guide/testing_levels.md#frontend-feature-tests), it must be **approved by a [Quality maintainer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_maintainers_qa) or [Quality reviewer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_reviewers_qa)**.
1. If your merge request introduces a new service to GitLab (Puma, Sidekiq, Gitaly are examples), it must be **approved by a [product manager](https://about.gitlab.com/company/team/)**. See the [process for adding a service component to GitLab](adding_service_component.md) for details.
1. If your merge request includes changes related to authentication or authorization, it must be **approved by a [Manage:Authentication and Authorization team member](https://about.gitlab.com/company/team/)**. Check the [code review section on the group page](https://about.gitlab.com/handbook/engineering/development/dev/manage/authentication-and-authorization/#additional-considerations) for more details. Patterns for files known to require review from the team are listed in the in the `Authentication and Authorization` section of the [`CODEOWNERS`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/CODEOWNERS) file, and the team will be listed in the approvers section of all merge requests that modify these files.
diff --git a/doc/development/service_ping/metrics_dictionary.md b/doc/development/service_ping/metrics_dictionary.md
index 2adba5d8095..d063c4c7601 100644
--- a/doc/development/service_ping/metrics_dictionary.md
+++ b/doc/development/service_ping/metrics_dictionary.md
@@ -205,8 +205,8 @@ instance unique identifier.
key_path: uuid
description: GitLab instance unique identifier
product_category: collection
-product_section: growth
-product_stage: growth
+product_section: analytics
+product_stage: analytics
product_group: product_intelligence
value_type: string
status: active
@@ -301,7 +301,7 @@ bundle exec rails generate gitlab:usage_metric_definition:redis_hll issues users
## Metrics Dictionary
-[Metrics Dictionary is a separate application](https://gitlab.com/gitlab-org/growth/product-intelligence/metric-dictionary).
+[Metrics Dictionary is a separate application](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/metric-dictionary).
All metrics available in Service Ping are in the [Metrics Dictionary](https://metrics.gitlab.com/).
diff --git a/doc/development/service_ping/metrics_instrumentation.md b/doc/development/service_ping/metrics_instrumentation.md
index e1c51713f3c..c3cb9d0fff0 100644
--- a/doc/development/service_ping/metrics_instrumentation.md
+++ b/doc/development/service_ping/metrics_instrumentation.md
@@ -29,7 +29,7 @@ A metric definition has the [`instrumentation_class`](metrics_dictionary.md) fie
The defined instrumentation class should inherit one of the existing metric classes: `DatabaseMetric`, `RedisMetric`, `RedisHLLMetric`, `NumbersMetric` or `GenericMetric`.
-The current convention is that a single instrumentation class corresponds to a single metric. On a rare occasions, there are exceptions to that convention like [Redis metrics](#redis-metrics). To use a single instrumentation class for more than one metric, please reach out to one of the `@gitlab-org/growth/product-intelligence/engineers` members to consult about your case.
+The current convention is that a single instrumentation class corresponds to a single metric. On rare occasions, there are exceptions to that convention like [Redis metrics](#redis-metrics). To use a single instrumentation class for more than one metric, please reach out to one of the `@gitlab-org/analytics-section/product-intelligence/engineers` members to consult about your case.
Using the instrumentation classes ensures that metrics can fail safe individually, without breaking the entire
process of Service Ping generation.
diff --git a/doc/development/service_ping/review_guidelines.md b/doc/development/service_ping/review_guidelines.md
index 4ce5b2d577c..1b00858be7e 100644
--- a/doc/development/service_ping/review_guidelines.md
+++ b/doc/development/service_ping/review_guidelines.md
@@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Service Ping review guidelines
This page includes introductory material for a
-[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/)
+[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/analytics/product-intelligence/)
review, and is specific to Service Ping related reviews. For broader advice and
general best practices for code reviews, refer to our [code review guide](../code_review.md).
@@ -42,7 +42,7 @@ are regular backend changes.
- Assign both the `~backend` and `~product intelligence` reviews to another Product Intelligence team member.
- Assign the maintainer review to someone outside of the Product Intelligence group.
- Assign an
- [engineer](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) from the Product Intelligence team for a review.
+ [engineer](https://gitlab.com/groups/gitlab-org/analytics-section/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) from the Product Intelligence team for a review.
- Set the correct attributes in the metric's YAML definition:
- `product_section`, `product_stage`, `product_group`, `product_category`
- Provide a clear description of the metric.
@@ -76,7 +76,7 @@ are regular backend changes.
[Danger bot](../dangerbot.md) adds the list of changed Product Intelligence files
and pings the
-[`@gitlab-org/growth/product-intelligence/engineers`](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) group for merge requests
+[`@gitlab-org/analytics-section/product-intelligence/engineers`](https://gitlab.com/groups/gitlab-org/analytics-section/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) group for merge requests
that are not drafts.
Any of the Product Intelligence engineers can be assigned for the Product Intelligence review.
diff --git a/doc/development/snowplow/infrastructure.md b/doc/development/snowplow/infrastructure.md
index 758c850e89f..ea4653dc91d 100644
--- a/doc/development/snowplow/infrastructure.md
+++ b/doc/development/snowplow/infrastructure.md
@@ -50,7 +50,7 @@ See [Snowplow technology 101](https://github.com/snowplow/snowplow/#snowplow-tec
### Pseudonymization
-In contrast to a typical Snowplow pipeline, after enrichment, GitLab Snowplow events go through a [pseudonymization service](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization) in the form of an AWS Lambda service before they are stored in S3 storage.
+In contrast to a typical Snowplow pipeline, after enrichment, GitLab Snowplow events go through a [pseudonymization service](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/snowplow-pseudonymization) in the form of an AWS Lambda service before they are stored in S3 storage.
#### Why events need to be pseudonymized
@@ -85,7 +85,7 @@ There are several tools that monitor Snowplow events tracking in different stage
- The number of events that successfully reach Snowplow collectors.
- The number of events that failed to reach Snowplow collectors.
- The number of backend events that were sent.
-- [AWS CloudWatch dashboard](https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#dashboards:name=SnowPlow;start=P3D) monitors the state of the events in a processing pipeline. The pipeline starts from Snowplow collectors, goes through to enrichers and pseudonymization, and then up to persistence in an S3 bucket. From S3, the events are imported into the Snowflake Data Warehouse. You must have AWS access rights to view this dashboard. For more information, see [monitoring](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization#monitoring) in the Snowplow Events pseudonymization service documentation.
+- [AWS CloudWatch dashboard](https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#dashboards:name=SnowPlow;start=P3D) monitors the state of the events in a processing pipeline. The pipeline starts from Snowplow collectors, goes through to enrichers and pseudonymization, and then up to persistence in an S3 bucket. From S3, the events are imported into the Snowflake Data Warehouse. You must have AWS access rights to view this dashboard. For more information, see [monitoring](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/snowplow-pseudonymization#monitoring) in the Snowplow Events pseudonymization service documentation.
- [Sisense dashboard](https://app.periscopedata.com/app/gitlab/417669/Snowplow-Summary-Dashboard) provides information about the number of good and bad events imported into the Data Warehouse, in addition to the total number of imported Snowplow events.
For more information, see this [video walk-through](https://www.youtube.com/watch?v=NxPS0aKa_oU).
@@ -93,7 +93,7 @@ For more information, see this [video walk-through](https://www.youtube.com/watc
## Related topics
- [Snowplow technology 101](https://github.com/snowplow/snowplow/#snowplow-technology-101)
-- [Snowplow pseudonymization AWS Lambda project](https://gitlab.com/gitlab-org/growth/product-intelligence/snowplow-pseudonymization)
+- [Snowplow pseudonymization AWS Lambda project](https://gitlab.com/gitlab-org/analytics-section/product-intelligence/snowplow-pseudonymization)
- [Product Intelligence Guide](https://about.gitlab.com/handbook/product/product-intelligence-guide/)
- [Data Infrastructure](https://about.gitlab.com/handbook/business-technology/data-team/platform/infrastructure/)
- [Snowplow architecture overview (internal)](https://www.youtube.com/watch?v=eVYJjzspsLU)
diff --git a/doc/development/snowplow/review_guidelines.md b/doc/development/snowplow/review_guidelines.md
index 673166452b7..44de849792c 100644
--- a/doc/development/snowplow/review_guidelines.md
+++ b/doc/development/snowplow/review_guidelines.md
@@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Snowplow review guidelines
This page includes introductory material for a
-[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/growth/product-intelligence/)
+[Product Intelligence](https://about.gitlab.com/handbook/engineering/development/analytics/product-intelligence/)
review, and is specific to Snowplow related reviews. For broader advice and
general best practices for code reviews, refer to our [code review guide](../code_review.md).
diff --git a/doc/user/profile/index.md b/doc/user/profile/index.md
index bf696310158..746794974fe 100644
--- a/doc/user/profile/index.md
+++ b/doc/user/profile/index.md
@@ -36,7 +36,7 @@ To change your password:
1. In the **New password** and **Password confirmation** text box, enter your new password.
1. Select **Save password**.
-If you don't know your current password, select the **I forgot my password** link.
+If you don't know your current password, select the **I forgot my password** link. A password reset email is sent to the account's **primary** email address.
## Change your username
diff --git a/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb b/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb
index 2b27bad3497..845a3c16bbe 100644
--- a/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb
+++ b/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb
@@ -105,9 +105,11 @@ module Gitlab
.joins("INNER JOIN namespaces n2 ON namespaces.parent_id = n2.id")
.select("namespaces.id as project_namespace_id, n2.traversal_ids")
+ # some customers have namespaces.id column type as bigint, which makes array_append(integer[], bigint) to fail
+ # so we just explicitly cast arguments to compatible types
ApplicationRecord.connection.execute <<~SQL
UPDATE namespaces
- SET traversal_ids = array_append(project_namespaces.traversal_ids, project_namespaces.project_namespace_id)
+ SET traversal_ids = array_append(project_namespaces.traversal_ids::bigint[], project_namespaces.project_namespace_id::bigint)
FROM (#{namespaces.to_sql}) as project_namespaces(project_namespace_id, traversal_ids)
WHERE id = project_namespaces.project_namespace_id
SQL
diff --git a/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml
index b6358eb0831..2a11976a7b0 100644
--- a/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml
@@ -31,7 +31,7 @@ kics-iac-sast:
image:
name: "$SAST_ANALYZER_IMAGE"
variables:
- SAST_ANALYZER_IMAGE_TAG: 2
+ SAST_ANALYZER_IMAGE_TAG: 3
SAST_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/kics:$SAST_ANALYZER_IMAGE_TAG$SAST_IMAGE_SUFFIX"
rules:
- if: $SAST_DISABLED
diff --git a/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml
index b6358eb0831..2a11976a7b0 100644
--- a/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml
@@ -31,7 +31,7 @@ kics-iac-sast:
image:
name: "$SAST_ANALYZER_IMAGE"
variables:
- SAST_ANALYZER_IMAGE_TAG: 2
+ SAST_ANALYZER_IMAGE_TAG: 3
SAST_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/kics:$SAST_ANALYZER_IMAGE_TAG$SAST_IMAGE_SUFFIX"
rules:
- if: $SAST_DISABLED
diff --git a/lib/gitlab/github_import/importer/events/changed_milestone.rb b/lib/gitlab/github_import/importer/events/changed_milestone.rb
new file mode 100644
index 00000000000..6f3ab5753de
--- /dev/null
+++ b/lib/gitlab/github_import/importer/events/changed_milestone.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module GithubImport
+ module Importer
+ module Events
+ class ChangedMilestone
+ attr_reader :project, :user_id
+
+ # GitHub API doesn't provide the historical state of an issue for
+ # de/milestoned issue events. So we'll assign the default state to
+ # those events that are imported from GitHub.
+ DEFAULT_STATE = Issue.available_states[:opened]
+
+ def initialize(project, user_id)
+ @project = project
+ @user_id = user_id
+ end
+
+ # issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`.
+ def execute(issue_event)
+ create_event(issue_event)
+ end
+
+ private
+
+ def create_event(issue_event)
+ ResourceMilestoneEvent.create!(
+ issue_id: issue_event.issue_db_id,
+ user_id: user_id,
+ created_at: issue_event.created_at,
+ milestone_id: project.milestones.find_by_title(issue_event.milestone_title)&.id,
+ action: action(issue_event.event),
+ state: DEFAULT_STATE
+ )
+ end
+
+ def action(event_type)
+ return ResourceMilestoneEvent.actions[:remove] if event_type == 'demilestoned'
+
+ ResourceMilestoneEvent.actions[:add]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/github_import/importer/issue_event_importer.rb b/lib/gitlab/github_import/importer/issue_event_importer.rb
index e451af61ec3..1745e53c871 100644
--- a/lib/gitlab/github_import/importer/issue_event_importer.rb
+++ b/lib/gitlab/github_import/importer/issue_event_importer.rb
@@ -30,6 +30,9 @@ module Gitlab
when 'renamed'
Gitlab::GithubImport::Importer::Events::Renamed.new(project, author_id)
.execute(issue_event)
+ when 'milestoned', 'demilestoned'
+ Gitlab::GithubImport::Importer::Events::ChangedMilestone.new(project, author_id)
+ .execute(issue_event)
when 'cross-referenced'
Gitlab::GithubImport::Importer::Events::CrossReferenced.new(project, author_id)
.execute(issue_event)
diff --git a/lib/gitlab/github_import/representation/issue_event.rb b/lib/gitlab/github_import/representation/issue_event.rb
index 9016338db3b..36646121c9d 100644
--- a/lib/gitlab/github_import/representation/issue_event.rb
+++ b/lib/gitlab/github_import/representation/issue_event.rb
@@ -10,7 +10,7 @@ module Gitlab
attr_reader :attributes
expose_attribute :id, :actor, :event, :commit_id, :label_title, :old_title, :new_title,
- :source, :created_at
+ :milestone_title, :source, :created_at
expose_attribute :issue_db_id # set in SingleEndpointIssueEventsImporter#each_associated
# Builds a event from a GitHub API response.
@@ -27,6 +27,7 @@ module Gitlab
new_title: event.rename && event.rename[:to],
source: event.source,
issue_db_id: event.issue_db_id,
+ milestone_title: event.milestone && event.milestone[:title],
created_at: event.created_at
)
end
diff --git a/lib/gitlab/version_info.rb b/lib/gitlab/version_info.rb
index f967a12b959..b79b94f4d54 100644
--- a/lib/gitlab/version_info.rb
+++ b/lib/gitlab/version_info.rb
@@ -62,6 +62,10 @@ module Gitlab
end
end
+ def to_json(*_args)
+ { major: @major, minor: @minor, patch: @patch }.to_json
+ end
+
def suffix
@suffix ||= @suffix_s.strip.gsub('-', '.pre.').scan(/\d+|[a-z]+/i).map do |s|
/^\d+$/ =~ s ? s.to_i : s
diff --git a/rubocop/cop_todo.rb b/rubocop/cop_todo.rb
new file mode 100644
index 00000000000..42e2f9fbe13
--- /dev/null
+++ b/rubocop/cop_todo.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module RuboCop
+ class CopTodo
+ attr_accessor :previously_disabled
+
+ attr_reader :cop_name, :files, :offense_count
+
+ def initialize(cop_name)
+ @cop_name = cop_name
+ @files = Set.new
+ @offense_count = 0
+ @cop_class = self.class.find_cop_by_name(cop_name)
+ @previously_disabled = false
+ end
+
+ def record(file, offense_count)
+ @files << file
+ @offense_count += offense_count
+ end
+
+ def autocorrectable?
+ @cop_class&.support_autocorrect?
+ end
+
+ def to_yaml
+ yaml = []
+ yaml << '---'
+ yaml << '# Cop supports --auto-correct.' if autocorrectable?
+ yaml << "#{cop_name}:"
+
+ if previously_disabled
+ yaml << " # Offense count: #{offense_count}"
+ yaml << ' # Temporarily disabled due to too many offenses'
+ yaml << ' Enabled: false'
+ end
+
+ yaml << ' Exclude:'
+ yaml.concat files.sort.map { |file| " - '#{file}'" }
+ yaml << ''
+
+ yaml.join("\n")
+ end
+
+ def self.find_cop_by_name(cop_name)
+ RuboCop::Cop::Registry.global.find_by_cop_name(cop_name)
+ end
+ end
+end
diff --git a/rubocop/formatter/todo_formatter.rb b/rubocop/formatter/todo_formatter.rb
index 662cc1551ff..789d0418f96 100644
--- a/rubocop/formatter/todo_formatter.rb
+++ b/rubocop/formatter/todo_formatter.rb
@@ -5,6 +5,7 @@ require 'rubocop'
require 'yaml'
require_relative '../todo_dir'
+require_relative '../cop_todo'
module RuboCop
module Formatter
@@ -14,26 +15,6 @@ module RuboCop
# For example, this formatter stores offenses for `RSpec/VariableName`
# in `.rubocop_todo/rspec/variable_name.yml`.
class TodoFormatter < BaseFormatter
- class Todo
- attr_reader :cop_name, :files, :offense_count
-
- def initialize(cop_name)
- @cop_name = cop_name
- @files = Set.new
- @offense_count = 0
- @cop_class = RuboCop::Cop::Registry.global.find_by_cop_name(cop_name)
- end
-
- def record(file, offense_count)
- @files << file
- @offense_count += offense_count
- end
-
- def autocorrectable?
- @cop_class&.support_autocorrect?
- end
- end
-
DEFAULT_BASE_DIRECTORY = File.expand_path('../../.rubocop_todo', __dir__)
class << self
@@ -44,7 +25,7 @@ module RuboCop
def initialize(output, _options = {})
@directory = self.class.base_directory
- @todos = Hash.new { |hash, cop_name| hash[cop_name] = Todo.new(cop_name) }
+ @todos = Hash.new { |hash, cop_name| hash[cop_name] = CopTodo.new(cop_name) }
@todo_dir = TodoDir.new(directory)
@config_inspect_todo_dir = load_config_inspect_todo_dir
@config_old_todo_yml = load_config_old_todo_yml
@@ -65,8 +46,8 @@ module RuboCop
def finished(_inspected_files)
@todos.values.sort_by(&:cop_name).each do |todo|
- yaml = to_yaml(todo)
- path = @todo_dir.write(todo.cop_name, yaml)
+ todo.previously_disabled = previously_disabled?(todo)
+ path = @todo_dir.write(todo.cop_name, todo.to_yaml)
output.puts "Written to #{relative_path(path)}\n"
end
@@ -90,27 +71,6 @@ module RuboCop
path.delete_prefix("#{parent}/")
end
- def to_yaml(todo)
- yaml = []
- yaml << '---'
- yaml << '# Cop supports --auto-correct.' if todo.autocorrectable?
- yaml << "#{todo.cop_name}:"
-
- if previously_disabled?(todo)
- yaml << " # Offense count: #{todo.offense_count}"
- yaml << ' # Temporarily disabled due to too many offenses'
- yaml << ' Enabled: false'
- end
-
- yaml << ' Exclude:'
-
- files = todo.files.sort.map { |file| " - '#{file}'" }
- yaml.concat files
- yaml << ''
-
- yaml.join("\n")
- end
-
def check_multiple_configurations!
cop_names = @config_inspect_todo_dir.keys & @config_old_todo_yml.keys
return if cop_names.empty?
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index 3d3dbfa6853..a39853fd29c 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -52,6 +52,12 @@ import { getSortKey, getSortOptions } from '~/issues/list/utils';
import axios from '~/lib/utils/axios_utils';
import { scrollUp } from '~/lib/utils/scroll_utils';
import { joinPaths } from '~/lib/utils/url_utility';
+import {
+ WORK_ITEM_TYPE_ENUM_INCIDENT,
+ WORK_ITEM_TYPE_ENUM_ISSUE,
+ WORK_ITEM_TYPE_ENUM_TASK,
+ WORK_ITEM_TYPE_ENUM_TEST_CASE,
+} from '~/work_items/constants';
jest.mock('@sentry/browser');
jest.mock('~/flash');
@@ -123,6 +129,7 @@ describe('CE IssuesListApp component', () => {
const mountComponent = ({
provide = {},
data = {},
+ workItems = false,
issuesQueryResponse = mockIssuesQueryResponse,
issuesCountsQueryResponse = mockIssuesCountsQueryResponse,
sortPreferenceMutationResponse = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse),
@@ -141,6 +148,9 @@ describe('CE IssuesListApp component', () => {
apolloProvider: createMockApollo(requestHandlers),
router,
provide: {
+ glFeatures: {
+ workItems,
+ },
...defaultProvide,
...provide,
},
@@ -168,22 +178,6 @@ describe('CE IssuesListApp component', () => {
return waitForPromises();
});
- it('queries list with types `ISSUE` and `INCIDENT', () => {
- const expectedTypes = ['ISSUE', 'INCIDENT', 'TEST_CASE'];
-
- expect(mockIssuesQueryResponse).toHaveBeenCalledWith(
- expect.objectContaining({
- types: expectedTypes,
- }),
- );
-
- expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
- expect.objectContaining({
- types: expectedTypes,
- }),
- );
- });
-
it('renders', () => {
expect(findIssuableList().props()).toMatchObject({
namespace: defaultProvide.fullPath,
@@ -1024,6 +1018,21 @@ describe('CE IssuesListApp component', () => {
});
});
});
+
+ describe('when "page-size-change" event is emitted by IssuableList', () => {
+ it('updates url params with new page size', async () => {
+ wrapper = mountComponent();
+ router.push = jest.fn();
+
+ findIssuableList().vm.$emit('page-size-change', 50);
+ await nextTick();
+
+ expect(router.push).toHaveBeenCalledTimes(1);
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining({ first_page_size: 50 }),
+ });
+ });
+ });
});
describe('public visibility', () => {
@@ -1045,17 +1054,45 @@ describe('CE IssuesListApp component', () => {
});
});
- describe('when "page-size-change" event is emitted by IssuableList', () => {
- it('updates url params with new page size', async () => {
- wrapper = mountComponent();
- router.push = jest.fn();
+ describe('fetching issues', () => {
+ describe('when work_items feature flag is disabled', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ workItems: false });
+ jest.runOnlyPendingTimers();
+ });
- findIssuableList().vm.$emit('page-size-change', 50);
- await nextTick();
+ it('fetches issue, incident, and test case types', () => {
+ const types = [
+ WORK_ITEM_TYPE_ENUM_ISSUE,
+ WORK_ITEM_TYPE_ENUM_INCIDENT,
+ WORK_ITEM_TYPE_ENUM_TEST_CASE,
+ ];
- expect(router.push).toHaveBeenCalledTimes(1);
- expect(router.push).toHaveBeenCalledWith({
- query: expect.objectContaining({ first_page_size: 50 }),
+ expect(mockIssuesQueryResponse).toHaveBeenCalledWith(expect.objectContaining({ types }));
+ expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
+ expect.objectContaining({ types }),
+ );
+ });
+ });
+
+ describe('when work_items feature flag is enabled', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ workItems: true });
+ jest.runOnlyPendingTimers();
+ });
+
+ it('fetches issue, incident, test case, and task types', () => {
+ const types = [
+ WORK_ITEM_TYPE_ENUM_ISSUE,
+ WORK_ITEM_TYPE_ENUM_INCIDENT,
+ WORK_ITEM_TYPE_ENUM_TEST_CASE,
+ WORK_ITEM_TYPE_ENUM_TASK,
+ ];
+
+ expect(mockIssuesQueryResponse).toHaveBeenCalledWith(expect.objectContaining({ types }));
+ expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
+ expect.objectContaining({ types }),
+ );
});
});
});
diff --git a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
index 2ad561ead87..bff803e2035 100644
--- a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
+++ b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
@@ -5,199 +5,211 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNamespaces, :migration, schema: 20220326161803 do
include MigrationsHelpers
- context 'when migrating data', :aggregate_failures do
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
+ RSpec.shared_examples 'backfills project namespaces' do
+ context 'when migrating data', :aggregate_failures do
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
- let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
- let(:parent_group2) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') }
+ let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
+ let(:parent_group2) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') }
- let(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
- let(:parent_group2_project) { projects.create!(name: 'parent_group2_project', path: 'parent_group2_project', namespace_id: parent_group2.id, visibility_level: 20) }
+ let(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
+ let(:parent_group2_project) { projects.create!(name: 'parent_group2_project', path: 'parent_group2_project', namespace_id: parent_group2.id, visibility_level: 20) }
- let(:child_nodes_count) { 2 }
- let(:tree_depth) { 3 }
+ let(:child_nodes_count) { 2 }
+ let(:tree_depth) { 3 }
- let(:backfilled_namespace) { nil }
+ let(:backfilled_namespace) { nil }
- before do
- BackfillProjectNamespaces::TreeGenerator.new(namespaces, projects, [parent_group1, parent_group2], child_nodes_count, tree_depth).build_tree
- end
-
- describe '#up' do
- shared_examples 'back-fill project namespaces' do
- it 'back-fills all project namespaces' do
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- projects_count = ::Project.count
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_count = ::Namespace.where(type: 'Project').count
- migration = described_class.new
-
- expect(projects_count).not_to eq(project_namespaces_count)
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
-
- expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(Namespace.where(type: 'Project'), :count)
-
- expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
- check_projects_in_sync_with(Namespace.where(type: 'Project'))
- end
-
- context 'when passing specific group as parameter' do
- let(:backfilled_namespace) { parent_group1 }
-
- it 'back-fills project namespaces for the specified group hierarchy' do
- backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
- start_id = backfilled_namespace_projects.minimum(:id)
- end_id = backfilled_namespace_projects.maximum(:id)
- group_projects_count = backfilled_namespace_projects.count
- batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
+ before do
+ BackfillProjectNamespaces::TreeGenerator.new(namespaces, projects, [parent_group1, parent_group2], child_nodes_count, tree_depth).build_tree
+ end
+ describe '#up' do
+ shared_examples 'back-fill project namespaces' do
+ it 'back-fills all project namespaces' do
+ start_id = ::Project.minimum(:id)
+ end_id = ::Project.maximum(:id)
+ projects_count = ::Project.count
+ batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ project_namespaces_count = ::Namespace.where(type: 'Project').count
migration = described_class.new
- expect(project_namespaces_in_hierarchy.count).to eq(0)
+ expect(projects_count).not_to eq(project_namespaces_count)
expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
- expect(group_projects_count).to eq(14)
- expect(project_namespaces_in_hierarchy.count).to eq(0)
-
- migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'up')
+ expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(Namespace.where(type: 'Project'), :count)
- expect(project_namespaces_in_hierarchy.count).to eq(14)
- check_projects_in_sync_with(project_namespaces_in_hierarchy)
+ expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
+ check_projects_in_sync_with(Namespace.where(type: 'Project'))
end
- end
- context 'when projects already have project namespaces' do
- before do
- hierarchy1_projects = base_ancestor(parent_group1).first.all_projects
- start_id = hierarchy1_projects.minimum(:id)
- end_id = hierarchy1_projects.maximum(:id)
+ context 'when passing specific group as parameter' do
+ let(:backfilled_namespace) { parent_group1 }
- described_class.new.perform(start_id, end_id, nil, nil, nil, nil, parent_group1.id, 'up')
- end
+ it 'back-fills project namespaces for the specified group hierarchy' do
+ backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
+ start_id = backfilled_namespace_projects.minimum(:id)
+ end_id = backfilled_namespace_projects.maximum(:id)
+ group_projects_count = backfilled_namespace_projects.count
+ batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
- it 'does not duplicate project namespaces' do
- # check there are already some project namespaces but not for all
- projects_count = ::Project.count
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces = ::Namespace.where(type: 'Project')
- migration = described_class.new
+ migration = described_class.new
- expect(project_namespaces_in_hierarchy(base_ancestor(parent_group1)).count).to be >= 14
- expect(project_namespaces_in_hierarchy(base_ancestor(parent_group2)).count).to eq(0)
- expect(projects_count).not_to eq(project_namespaces.count)
+ expect(project_namespaces_in_hierarchy.count).to eq(0)
+ expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
+ expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
+ expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
- # run migration again to test we do not generate extra project namespaces
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
+ expect(group_projects_count).to eq(14)
+ expect(project_namespaces_in_hierarchy.count).to eq(0)
- expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(project_namespaces, :count).by(14)
+ migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'up')
- expect(projects_count).to eq(project_namespaces.count)
+ expect(project_namespaces_in_hierarchy.count).to eq(14)
+ check_projects_in_sync_with(project_namespaces_in_hierarchy)
+ end
end
- end
- end
- it 'checks no project namespaces exist in the defined hierarchies' do
- hierarchy1_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group1))
- hierarchy2_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group2))
- hierarchy1_projects_count = base_ancestor(parent_group1).first.all_projects.count
- hierarchy2_projects_count = base_ancestor(parent_group2).first.all_projects.count
+ context 'when projects already have project namespaces' do
+ before do
+ hierarchy1_projects = base_ancestor(parent_group1).first.all_projects
+ start_id = hierarchy1_projects.minimum(:id)
+ end_id = hierarchy1_projects.maximum(:id)
+
+ described_class.new.perform(start_id, end_id, nil, nil, nil, nil, parent_group1.id, 'up')
+ end
+
+ it 'does not duplicate project namespaces' do
+ # check there are already some project namespaces but not for all
+ projects_count = ::Project.count
+ start_id = ::Project.minimum(:id)
+ end_id = ::Project.maximum(:id)
+ batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ project_namespaces = ::Namespace.where(type: 'Project')
+ migration = described_class.new
+
+ expect(project_namespaces_in_hierarchy(base_ancestor(parent_group1)).count).to be >= 14
+ expect(project_namespaces_in_hierarchy(base_ancestor(parent_group2)).count).to eq(0)
+ expect(projects_count).not_to eq(project_namespaces.count)
+
+ # run migration again to test we do not generate extra project namespaces
+ expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
+ expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
+ expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
+
+ expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(project_namespaces, :count).by(14)
+
+ expect(projects_count).to eq(project_namespaces.count)
+ end
+ end
+ end
- expect(hierarchy1_project_namespaces).to be_empty
- expect(hierarchy2_project_namespaces).to be_empty
- expect(hierarchy1_projects_count).to eq(14)
- expect(hierarchy2_projects_count).to eq(14)
- end
+ it 'checks no project namespaces exist in the defined hierarchies' do
+ hierarchy1_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group1))
+ hierarchy2_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group2))
+ hierarchy1_projects_count = base_ancestor(parent_group1).first.all_projects.count
+ hierarchy2_projects_count = base_ancestor(parent_group2).first.all_projects.count
- context 'back-fill project namespaces in a single batch' do
- it_behaves_like 'back-fill project namespaces'
- end
+ expect(hierarchy1_project_namespaces).to be_empty
+ expect(hierarchy2_project_namespaces).to be_empty
+ expect(hierarchy1_projects_count).to eq(14)
+ expect(hierarchy2_projects_count).to eq(14)
+ end
- context 'back-fill project namespaces in batches' do
- before do
- stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
+ context 'back-fill project namespaces in a single batch' do
+ it_behaves_like 'back-fill project namespaces'
end
- it_behaves_like 'back-fill project namespaces'
- end
- end
+ context 'back-fill project namespaces in batches' do
+ before do
+ stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
+ end
- describe '#down' do
- before do
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- # back-fill first
- described_class.new.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up')
+ it_behaves_like 'back-fill project namespaces'
+ end
end
- shared_examples 'cleanup project namespaces' do
- it 'removes project namespaces' do
- projects_count = ::Project.count
+ describe '#down' do
+ before do
start_id = ::Project.minimum(:id)
end_id = ::Project.maximum(:id)
- migration = described_class.new
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ # back-fill first
+ described_class.new.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up')
+ end
- expect(projects_count).to be > 0
- expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
+ shared_examples 'cleanup project namespaces' do
+ it 'removes project namespaces' do
+ projects_count = ::Project.count
+ start_id = ::Project.minimum(:id)
+ end_id = ::Project.maximum(:id)
+ migration = described_class.new
+ batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
+ expect(projects_count).to be > 0
+ expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
- migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'down')
+ expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
+ expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
- expect(::Project.count).to be > 0
- expect(::Namespace.where(type: 'Project').count).to eq(0)
- end
+ migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'down')
+
+ expect(::Project.count).to be > 0
+ expect(::Namespace.where(type: 'Project').count).to eq(0)
+ end
- context 'when passing specific group as parameter' do
- let(:backfilled_namespace) { parent_group1 }
+ context 'when passing specific group as parameter' do
+ let(:backfilled_namespace) { parent_group1 }
- it 'removes project namespaces only for the specific group hierarchy' do
- backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
- start_id = backfilled_namespace_projects.minimum(:id)
- end_id = backfilled_namespace_projects.maximum(:id)
- group_projects_count = backfilled_namespace_projects.count
- batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
- migration = described_class.new
+ it 'removes project namespaces only for the specific group hierarchy' do
+ backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
+ start_id = backfilled_namespace_projects.minimum(:id)
+ end_id = backfilled_namespace_projects.maximum(:id)
+ group_projects_count = backfilled_namespace_projects.count
+ batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
+ migration = described_class.new
- expect(project_namespaces_in_hierarchy.count).to eq(14)
- expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
+ expect(project_namespaces_in_hierarchy.count).to eq(14)
+ expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
+ expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
- migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'down')
+ migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'down')
- expect(::Namespace.where(type: 'Project').count).to be > 0
- expect(project_namespaces_in_hierarchy.count).to eq(0)
+ expect(::Namespace.where(type: 'Project').count).to be > 0
+ expect(project_namespaces_in_hierarchy.count).to eq(0)
+ end
end
end
- end
- context 'cleanup project namespaces in a single batch' do
- it_behaves_like 'cleanup project namespaces'
- end
-
- context 'cleanup project namespaces in batches' do
- before do
- stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
+ context 'cleanup project namespaces in a single batch' do
+ it_behaves_like 'cleanup project namespaces'
end
- it_behaves_like 'cleanup project namespaces'
+ context 'cleanup project namespaces in batches' do
+ before do
+ stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
+ end
+
+ it_behaves_like 'cleanup project namespaces'
+ end
end
end
end
+ it_behaves_like 'backfills project namespaces'
+
+ context 'when namespaces.id is bigint' do
+ before do
+ namespaces.connection.execute("ALTER TABLE namespaces ALTER COLUMN id TYPE bigint")
+ end
+
+ it_behaves_like 'backfills project namespaces'
+ end
+
def base_ancestor(ancestor)
::Namespace.where(id: ancestor.id)
end
@@ -209,7 +221,7 @@ RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNa
def check_projects_in_sync_with(namespaces)
project_namespaces_attrs = namespaces.order(:id).pluck(:id, :name, :path, :parent_id, :visibility_level, :shared_runners_enabled)
corresponding_projects_attrs = Project.where(project_namespace_id: project_namespaces_attrs.map(&:first))
- .order(:project_namespace_id).pluck(:project_namespace_id, :name, :path, :namespace_id, :visibility_level, :shared_runners_enabled)
+ .order(:project_namespace_id).pluck(:project_namespace_id, :name, :path, :namespace_id, :visibility_level, :shared_runners_enabled)
expect(project_namespaces_attrs).to eq(corresponding_projects_attrs)
end
diff --git a/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb b/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
new file mode 100644
index 00000000000..5db708b9049
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedMilestone do
+ subject(:importer) { described_class.new(project, user.id) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:issue) { create(:issue, project: project) }
+ let!(:milestone) { create(:milestone, project: project) }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'event' => event_type,
+ 'commit_id' => nil,
+ 'milestone_title' => milestone.title,
+ 'issue_db_id' => issue.id,
+ 'created_at' => '2022-04-26 18:30:53 UTC'
+ )
+ end
+
+ let(:event_attrs) do
+ {
+ user_id: user.id,
+ issue_id: issue.id,
+ milestone_id: milestone.id,
+ state: 'opened',
+ created_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ shared_examples 'new event' do
+ it 'creates a new milestone event' do
+ expect { importer.execute(issue_event) }.to change { issue.resource_milestone_events.count }
+ .from(0).to(1)
+ expect(issue.resource_milestone_events.last)
+ .to have_attributes(expected_event_attrs)
+ end
+ end
+
+ describe '#execute' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(milestone.id)
+ end
+
+ context 'when importing a milestoned event' do
+ let(:event_type) { 'milestoned' }
+ let(:expected_event_attrs) { event_attrs.merge(action: 'add') }
+
+ it_behaves_like 'new event'
+ end
+
+ context 'when importing demilestoned event' do
+ let(:event_type) { 'demilestoned' }
+ let(:expected_event_attrs) { event_attrs.merge(action: 'remove') }
+
+ it_behaves_like 'new event'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
index da32a3b3766..41d777fb466 100644
--- a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
@@ -87,6 +87,20 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
Gitlab::GithubImport::Importer::Events::Renamed
end
+ context "when it's milestoned issue event" do
+ let(:event_name) { 'milestoned' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::ChangedMilestone
+ end
+
+ context "when it's demilestoned issue event" do
+ let(:event_name) { 'demilestoned' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::ChangedMilestone
+ end
+
context "when it's cross-referenced issue event" do
let(:event_name) { 'cross-referenced' }
diff --git a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
index 23da8276f64..7382b0e2fff 100644
--- a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
@@ -77,6 +77,20 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
end
end
+ context 'when milestone data is present' do
+ it 'includes the milestone_title' do
+ expect(issue_event.milestone_title).to eq('milestone title')
+ end
+ end
+
+ context 'when milestone data is empty' do
+ let(:with_milestone) { false }
+
+ it 'does not return such info' do
+ expect(issue_event.milestone_title).to eq nil
+ end
+ end
+
it 'includes the created timestamp' do
expect(issue_event.created_at).to eq('2022-04-26 18:30:53 UTC')
end
@@ -93,7 +107,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
let(:response) do
event_resource = Struct.new(
:id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label,
- :rename, :issue_db_id, :created_at, :performed_via_github_app, :source,
+ :rename, :milestone, :source, :issue_db_id, :created_at, :performed_via_github_app,
keyword_init: true
)
user_resource = Struct.new(:id, :login, keyword_init: true)
@@ -106,10 +120,11 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
commit_id: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
commit_url: 'https://api.github.com/repos/octocat/Hello-World/commits'\
'/570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
+ label: with_label ? { name: 'label title' } : nil,
rename: with_rename ? { from: 'old title', to: 'new title' } : nil,
+ milestone: with_milestone ? { title: 'milestone title' } : nil,
source: { type: 'issue', id: 123456 },
issue_db_id: 100500,
- label: with_label ? { name: 'label title' } : nil,
created_at: '2022-04-26 18:30:53 UTC',
performed_via_github_app: nil
)
@@ -118,6 +133,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
let(:with_actor) { true }
let(:with_label) { true }
let(:with_rename) { true }
+ let(:with_milestone) { true }
it_behaves_like 'an IssueEvent' do
let(:issue_event) { described_class.from_api_response(response) }
@@ -139,6 +155,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
'label_title' => (with_label ? 'label title' : nil),
'old_title' => with_rename ? 'old title' : nil,
'new_title' => with_rename ? 'new title' : nil,
+ 'milestone_title' => (with_milestone ? 'milestone title' : nil),
'source' => { 'type' => 'issue', 'id' => 123456 },
"issue_db_id" => 100500,
'created_at' => '2022-04-26 18:30:53 UTC',
@@ -149,6 +166,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
let(:with_actor) { true }
let(:with_label) { true }
let(:with_rename) { true }
+ let(:with_milestone) { true }
let(:issue_event) { described_class.from_json_hash(hash) }
end
diff --git a/spec/lib/gitlab/version_info_spec.rb b/spec/lib/gitlab/version_info_spec.rb
index 6ed094f11c8..6535c42ad4a 100644
--- a/spec/lib/gitlab/version_info_spec.rb
+++ b/spec/lib/gitlab/version_info_spec.rb
@@ -133,6 +133,20 @@ RSpec.describe Gitlab::VersionInfo do
it { expect(@unknown.to_s).to eq("Unknown") }
end
+ describe '.to_json' do
+ let(:correct_version) do
+ "{\"major\":1,\"minor\":0,\"patch\":1}"
+ end
+
+ let(:unknown_version) do
+ "{\"major\":0,\"minor\":0,\"patch\":0}"
+ end
+
+ it { expect(@v1_0_1.to_json).to eq(correct_version) }
+ it { expect(@v1_0_1_rc2.to_json).to eq(correct_version) }
+ it { expect(@unknown.to_json).to eq(unknown_version) }
+ end
+
describe '.hash' do
it { expect(described_class.parse("1.0.0").hash).to eq(@v1_0_0.hash) }
it { expect(described_class.parse("1.0.0.1").hash).to eq(@v1_0_0.hash) }
diff --git a/spec/rubocop/cop_todo_spec.rb b/spec/rubocop/cop_todo_spec.rb
new file mode 100644
index 00000000000..978df2c01ee
--- /dev/null
+++ b/spec/rubocop/cop_todo_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../rubocop/cop_todo'
+
+RSpec.describe RuboCop::CopTodo do
+ let(:cop_name) { 'Cop/Rule' }
+
+ subject(:cop_todo) { described_class.new(cop_name) }
+
+ describe '#initialize' do
+ it 'initializes a cop todo' do
+ expect(cop_todo).to have_attributes(
+ cop_name: cop_name,
+ files: be_empty,
+ offense_count: 0,
+ previously_disabled: false
+ )
+ end
+ end
+
+ describe '#record' do
+ it 'records offenses' do
+ cop_todo.record('a.rb', 1)
+ cop_todo.record('b.rb', 2)
+
+ expect(cop_todo).to have_attributes(
+ files: contain_exactly('a.rb', 'b.rb'),
+ offense_count: 3
+ )
+ end
+ end
+
+ describe '#autocorrectable?' do
+ subject { cop_todo.autocorrectable? }
+
+ context 'when found in rubocop registry' do
+ before do
+ fake_cop = double(:cop, support_autocorrect?: autocorrectable) # rubocop:disable RSpec/VerifiedDoubles
+
+ allow(described_class).to receive(:find_cop_by_name)
+ .with(cop_name).and_return(fake_cop)
+ end
+
+ context 'when autocorrectable' do
+ let(:autocorrectable) { true }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when not autocorrectable' do
+ let(:autocorrectable) { false }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when not found in rubocop registry' do
+ before do
+ allow(described_class).to receive(:find_cop_by_name)
+ .with(cop_name).and_return(nil).and_call_original
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#to_yaml' do
+ subject(:yaml) { cop_todo.to_yaml }
+
+ context 'when autocorrectable' do
+ before do
+ allow(cop_todo).to receive(:autocorrectable?).and_return(true)
+ end
+
+ specify do
+ expect(yaml).to eq(<<~YAML)
+ ---
+ # Cop supports --auto-correct.
+ #{cop_name}:
+ Exclude:
+ YAML
+ end
+ end
+
+ context 'when previously disabled' do
+ specify do
+ cop_todo.record('a.rb', 1)
+ cop_todo.record('b.rb', 2)
+ cop_todo.previously_disabled = true
+
+ expect(yaml).to eq(<<~YAML)
+ ---
+ #{cop_name}:
+ # Offense count: 3
+ # Temporarily disabled due to too many offenses
+ Enabled: false
+ Exclude:
+ - 'a.rb'
+ - 'b.rb'
+ YAML
+ end
+ end
+
+ context 'with multiple files' do
+ before do
+ cop_todo.record('a.rb', 0)
+ cop_todo.record('c.rb', 0)
+ cop_todo.record('b.rb', 0)
+ end
+
+ it 'sorts excludes alphabetically' do
+ expect(yaml).to eq(<<~YAML)
+ ---
+ #{cop_name}:
+ Exclude:
+ - 'a.rb'
+ - 'b.rb'
+ - 'c.rb'
+ YAML
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/formatter/todo_formatter_spec.rb b/spec/rubocop/formatter/todo_formatter_spec.rb
index fcff028f07d..df56ee45931 100644
--- a/spec/rubocop/formatter/todo_formatter_spec.rb
+++ b/spec/rubocop/formatter/todo_formatter_spec.rb
@@ -261,16 +261,12 @@ RSpec.describe RuboCop::Formatter::TodoFormatter do
double(:offense, cop_name: cop_name)
end
- def stub_rubocop_registry(**cops)
- rubocop_registry = double(:rubocop_registry)
-
- allow(RuboCop::Cop::Registry).to receive(:global).and_return(rubocop_registry)
-
- allow(rubocop_registry).to receive(:find_by_cop_name)
- .with(String).and_return(nil)
+ def stub_rubocop_registry(cops)
+ allow(RuboCop::CopTodo).to receive(:find_cop_by_name)
+ .with(String).and_return(nil).and_call_original
cops.each do |cop_name, attributes|
- allow(rubocop_registry).to receive(:find_by_cop_name)
+ allow(RuboCop::CopTodo).to receive(:find_cop_by_name)
.with(cop_name).and_return(fake_cop(**attributes))
end
end
diff --git a/tooling/danger/product_intelligence.rb b/tooling/danger/product_intelligence.rb
index 0f007e970b4..621a7b509b0 100644
--- a/tooling/danger/product_intelligence.rb
+++ b/tooling/danger/product_intelligence.rb
@@ -7,7 +7,7 @@ module Tooling
APPROVED_LABEL = 'product intelligence::approved'
REVIEW_LABEL = 'product intelligence::review pending'
CHANGED_FILES_MESSAGE = <<~MSG
- For the following files, a review from the [Data team and Product Intelligence team](https://gitlab.com/groups/gitlab-org/growth/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) is recommended
+ For the following files, a review from the [Data team and Product Intelligence team](https://gitlab.com/groups/gitlab-org/analytics-section/product-intelligence/engineers/-/group_members?with_inherited_permissions=exclude) is recommended
Please check the ~"product intelligence" [Service Ping guide](https://docs.gitlab.com/ee/development/service_ping/) or the [Snowplow guide](https://docs.gitlab.com/ee/development/snowplow/).
For MR review guidelines, see the [Service Ping review guidelines](https://docs.gitlab.com/ee/development/service_ping/review_guidelines.html) or the [Snowplow review guidelines](https://docs.gitlab.com/ee/development/snowplow/review_guidelines.html).