Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-07-08 15:09:33 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-07-08 15:09:33 +0300
commitc52b72f5772d52e9fc85bd9f4e8b8497a6278c37 (patch)
treebbe0504b4c07a93e24db4a72785a847b2540eef8
parent21341457a8c422d890a9ec30838b597dea565d62 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop_todo.yml2
-rw-r--r--app/assets/javascripts/api.js15
-rw-r--r--app/assets/javascripts/boards/components/board_content.vue2
-rw-r--r--app/assets/javascripts/boards/components/board_form.vue5
-rw-r--r--app/assets/javascripts/boards/components/boards_selector.vue5
-rw-r--r--app/assets/javascripts/boards/components/modal/header.vue4
-rw-r--r--app/assets/javascripts/boards/components/modal/index.vue10
-rw-r--r--app/assets/javascripts/boards/mount_multiple_boards_switcher.js2
-rw-r--r--app/assets/javascripts/milestone_select.js151
-rw-r--r--app/assets/javascripts/static_site_editor/components/edit_area.vue19
-rw-r--r--app/assets/javascripts/static_site_editor/constants.js2
-rw-r--r--app/assets/javascripts/static_site_editor/graphql/resolvers/submit_content_changes.js4
-rw-r--r--app/assets/javascripts/static_site_editor/image_repository.js20
-rw-r--r--app/assets/javascripts/static_site_editor/pages/home.vue7
-rw-r--r--app/assets/javascripts/static_site_editor/services/image_service.js9
-rw-r--r--app/assets/javascripts/static_site_editor/services/submit_content_changes.js32
-rw-r--r--app/assets/javascripts/vue_shared/components/markdown/suggestion_diff.vue1
-rw-r--r--app/assets/javascripts/vue_shared/components/markdown/suggestion_diff_header.vue9
-rw-r--r--app/assets/javascripts/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal.vue12
-rw-r--r--app/assets/javascripts/vue_shared/components/rich_content_editor/rich_content_editor.vue13
-rw-r--r--app/assets/javascripts/vue_shared/components/rich_content_editor/services/image_service.js2
-rw-r--r--app/assets/stylesheets/framework/system_messages.scss3
-rw-r--r--app/assets/stylesheets/pages/boards.scss26
-rw-r--r--app/assets/stylesheets/utilities.scss9
-rw-r--r--app/graphql/resolvers/milestone_resolver.rb6
-rw-r--r--app/helpers/clusters_helper.rb5
-rw-r--r--app/models/clusters/cluster.rb8
-rw-r--r--app/models/concerns/deployment_platform.rb22
-rw-r--r--app/models/packages.rb6
-rw-r--r--app/models/packages/build_info.rb6
-rw-r--r--app/models/packages/composer/metadatum.rb14
-rw-r--r--app/models/packages/conan.rb8
-rw-r--r--app/models/packages/conan/file_metadatum.rb32
-rw-r--r--app/models/packages/conan/metadatum.rb41
-rw-r--r--app/models/packages/dependency.rb47
-rw-r--r--app/models/packages/dependency_link.rb19
-rw-r--r--app/models/packages/go/module.rb93
-rw-r--r--app/models/packages/go/module_version.rb115
-rw-r--r--app/models/packages/maven.rb8
-rw-r--r--app/models/packages/maven/metadatum.rb28
-rw-r--r--app/models/packages/nuget.rb8
-rw-r--r--app/models/packages/nuget/dependency_link_metadatum.rb19
-rw-r--r--app/models/packages/nuget/metadatum.rb27
-rw-r--r--app/models/packages/package.rb195
-rw-r--r--app/models/packages/package_file.rb56
-rw-r--r--app/models/packages/pypi.rb8
-rw-r--r--app/models/packages/pypi/metadatum.rb19
-rw-r--r--app/models/packages/sem_ver.rb54
-rw-r--r--app/models/packages/tag.rb18
-rw-r--r--app/models/project.rb24
-rw-r--r--app/models/prometheus_metric.rb1
-rw-r--r--app/presenters/clusterable_presenter.rb14
-rw-r--r--app/services/clusters/create_service.rb11
-rw-r--r--app/services/metrics/dashboard/clone_dashboard_service.rb46
-rw-r--r--app/services/metrics/dashboard/cluster_dashboard_service.rb31
-rw-r--r--app/services/metrics/dashboard/cluster_metrics_embed_service.rb37
-rw-r--r--app/services/packages/conan/create_package_file_service.rb31
-rw-r--r--app/services/packages/conan/create_package_service.rb19
-rw-r--r--app/services/packages/conan/search_service.rb58
-rw-r--r--app/services/packages/create_dependency_service.rb82
-rw-r--r--app/services/packages/create_package_file_service.rb22
-rw-r--r--app/services/packages/maven/create_package_service.rb28
-rw-r--r--app/services/packages/maven/find_or_create_package_service.rb41
-rw-r--r--app/services/packages/npm/create_package_service.rb91
-rw-r--r--app/services/packages/npm/create_tag_service.rb34
-rw-r--r--app/services/packages/nuget/create_dependency_service.rb71
-rw-r--r--app/services/packages/nuget/create_package_service.rb23
-rw-r--r--app/services/packages/nuget/metadata_extraction_service.rb106
-rw-r--r--app/services/packages/nuget/search_service.rb101
-rw-r--r--app/services/packages/nuget/sync_metadatum_service.rb50
-rw-r--r--app/services/packages/nuget/update_package_from_metadata_service.rb125
-rw-r--r--app/services/packages/pypi/create_package_service.rb40
-rw-r--r--app/services/packages/remove_tag_service.rb16
-rw-r--r--app/services/packages/update_tags_service.rb41
-rw-r--r--app/uploaders/packages/package_file_uploader.rb30
-rw-r--r--app/views/clusters/clusters/_multiple_clusters_message.html.haml6
-rw-r--r--app/views/clusters/clusters/_sidebar.html.haml2
-rw-r--r--app/views/devise/mailer/_confirmation_instructions_secondary.html.haml2
-rw-r--r--app/views/devise/mailer/_confirmation_instructions_secondary.text.erb2
-rw-r--r--app/views/shared/boards/components/sidebar/_milestone.html.haml3
-rw-r--r--app/views/shared/issuable/_sidebar.html.haml3
-rw-r--r--changelogs/unreleased/196066-add-milestone-expired-info.yml5
-rw-r--r--changelogs/unreleased/212229-move-features-to-core-multiple-kubernetes-clusters.yml5
-rw-r--r--changelogs/unreleased/219455-fe-inapplicable-tooltip-message.yml5
-rw-r--r--changelogs/unreleased/226874-fix-pages-url-path.yml5
-rw-r--r--changelogs/unreleased/groups_routing_priority.yml5
-rw-r--r--changelogs/unreleased/rc-enforce_unique_metrics_id_across_project.yml5
-rw-r--r--changelogs/unreleased/remove-group_milestone_descendants.yml5
-rw-r--r--changelogs/unreleased/unconfirm-wrongfully-verified-email-records.yml5
-rw-r--r--config/initializers/1_settings.rb10
-rw-r--r--config/prometheus/cluster_metrics.yml91
-rw-r--r--config/prometheus/queries_cluster_metrics.yml65
-rw-r--r--config/routes.rb3
-rw-r--r--db/migrate/20200629192638_add_uniq_index_on_metric_identifier_and_project_id.rb17
-rw-r--r--db/post_migrate/20200615111857_unconfirm_wrongfully_verified_emails.rb31
-rw-r--r--db/structure.sql6
-rw-r--r--doc/README.md4
-rw-r--r--doc/administration/troubleshooting/elasticsearch.md3
-rw-r--r--doc/api/group_milestones.md1
-rw-r--r--doc/api/milestones.md3
-rw-r--r--doc/ci/variables/README.md4
-rw-r--r--doc/ci/yaml/README.md38
-rw-r--r--doc/development/code_review.md7
-rw-r--r--doc/development/telemetry/usage_ping.md4
-rw-r--r--doc/topics/autodevops/index.md4
-rw-r--r--doc/topics/autodevops/quick_start_guide.md2
-rw-r--r--doc/user/group/clusters/index.md9
-rw-r--r--doc/user/group/roadmap/index.md8
-rw-r--r--doc/user/project/clusters/index.md7
-rw-r--r--lib/api/entities/milestone.rb1
-rw-r--r--lib/api/group_clusters.rb16
-rw-r--r--lib/api/project_clusters.rb16
-rw-r--r--lib/gitlab/background_migration/mailers/unconfirm_mailer.rb24
-rw-r--r--lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml19
-rw-r--r--lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb14
-rw-r--r--lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb97
-rw-r--r--lib/gitlab/danger/teammate.rb9
-rw-r--r--lib/gitlab/database.rb8
-rw-r--r--lib/gitlab/metrics/dashboard/service_selector.rb4
-rw-r--r--lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb83
-rw-r--r--lib/gitlab/metrics/dashboard/url.rb16
-rw-r--r--lib/gitlab/usage_data/topology.rb34
-rw-r--r--locale/gitlab.pot28
-rw-r--r--package.json4
-rw-r--r--scripts/review_apps/base-config.yaml4
-rw-r--r--spec/factories/go_module_commits.rb82
-rw-r--r--spec/factories/go_module_versions.rb77
-rw-r--r--spec/factories/go_modules.rb13
-rw-r--r--spec/factories/packages.rb355
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb26
-rw-r--r--spec/features/projects/clusters_spec.rb162
-rw-r--r--spec/features/projects/issues/design_management/user_uploads_designs_spec.rb2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/milestone.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json4
-rw-r--r--spec/frontend/api_spec.js46
-rw-r--r--spec/frontend/boards/components/board_form_spec.js1
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js1
-rw-r--r--spec/frontend/static_site_editor/mock_data.js7
-rw-r--r--spec/frontend/static_site_editor/services/submit_content_changes_spec.js48
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js20
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js3
-rw-r--r--spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap1
-rw-r--r--spec/helpers/clusters_helper_spec.rb6
-rw-r--r--spec/lib/gitlab/background_migration/mailers/unconfirm_mailer_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb155
-rw-r--r--spec/lib/gitlab/danger/teammate_spec.rb2
-rw-r--r--spec/lib/gitlab/database_spec.rb30
-rw-r--r--spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb40
-rw-r--r--spec/lib/gitlab/metrics/dashboard/url_spec.rb29
-rw-r--r--spec/lib/gitlab/usage_data/topology_spec.rb36
-rw-r--r--spec/migrations/unconfirm_wrongfully_verified_emails_spec.rb21
-rw-r--r--spec/models/clusters/cluster_spec.rb74
-rw-r--r--spec/models/concerns/deployment_platform_spec.rb235
-rw-r--r--spec/models/packages/composer/metadatum_spec.rb14
-rw-r--r--spec/models/packages/conan/file_metadatum_spec.rb106
-rw-r--r--spec/models/packages/conan/metadatum_spec.rb90
-rw-r--r--spec/models/packages/dependency_link_spec.rb56
-rw-r--r--spec/models/packages/dependency_spec.rb113
-rw-r--r--spec/models/packages/go/module_spec.rb59
-rw-r--r--spec/models/packages/go/module_version_spec.rb114
-rw-r--r--spec/models/packages/maven/metadatum_spec.rb40
-rw-r--r--spec/models/packages/nuget/dependency_link_metadatum_spec.rb32
-rw-r--r--spec/models/packages/nuget/metadatum_spec.rb44
-rw-r--r--spec/models/packages/package_file_spec.rb69
-rw-r--r--spec/models/packages/package_spec.rb485
-rw-r--r--spec/models/packages/pypi/metadatum_spec.rb22
-rw-r--r--spec/models/packages/sem_ver_spec.rb42
-rw-r--r--spec/models/packages/tag_spec.rb62
-rw-r--r--spec/models/project_spec.rb83
-rw-r--r--spec/models/prometheus_metric_spec.rb1
-rw-r--r--spec/presenters/alert_management/prometheus_alert_presenter_spec.rb24
-rw-r--r--spec/presenters/projects/prometheus/alert_presenter_spec.rb104
-rw-r--r--spec/requests/api/graphql/group/milestones_spec.rb15
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/metrics_dashboard_url_spec.rb62
-rw-r--r--spec/requests/api/graphql/project/alert_management/alerts_spec.rb22
-rw-r--r--spec/requests/api/group_clusters_spec.rb48
-rw-r--r--spec/requests/api/project_clusters_spec.rb53
-rw-r--r--spec/routing/group_routing_spec.rb24
-rw-r--r--spec/routing/project_routing_spec.rb4
-rw-r--r--spec/services/clusters/create_service_spec.rb47
-rw-r--r--spec/services/metrics/dashboard/clone_dashboard_service_spec.rb13
-rw-r--r--spec/services/metrics/dashboard/cluster_dashboard_service_spec.rb51
-rw-r--r--spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb72
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb35
-rw-r--r--spec/support/services/clusters/create_service_shared.rb9
-rw-r--r--spec/support/shared_contexts/prometheus/alert_shared_context.rb76
-rw-r--r--yarn.lock18
188 files changed, 6145 insertions, 616 deletions
diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml
index 64e06cb1240..4211c7ce8b5 100644
--- a/.rubocop_todo.yml
+++ b/.rubocop_todo.yml
@@ -859,7 +859,7 @@ Rails/SaveBang:
- 'ee/spec/models/operations/feature_flag_spec.rb'
- 'ee/spec/models/operations/feature_flags/strategy_spec.rb'
- 'ee/spec/models/operations/feature_flags/user_list_spec.rb'
- - 'ee/spec/models/packages/package_spec.rb'
+ - 'spec/models/packages/package_spec.rb'
- 'ee/spec/models/project_ci_cd_setting_spec.rb'
- 'ee/spec/models/project_services/github_service_spec.rb'
- 'ee/spec/models/project_services/jenkins_service_spec.rb'
diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js
index 94d155840ea..460611356c0 100644
--- a/app/assets/javascripts/api.js
+++ b/app/assets/javascripts/api.js
@@ -9,6 +9,7 @@ const Api = {
groupsPath: '/api/:version/groups.json',
groupPath: '/api/:version/groups/:id',
groupMembersPath: '/api/:version/groups/:id/members',
+ groupMilestonesPath: '/api/:version/groups/:id/milestones',
subgroupsPath: '/api/:version/groups/:id/subgroups',
namespacesPath: '/api/:version/namespaces.json',
groupProjectsPath: '/api/:version/groups/:id/projects.json',
@@ -98,6 +99,14 @@ const Api = {
return axios.get(url).then(({ data }) => data);
},
+ groupMilestones(groupId, params = {}) {
+ const url = Api.buildUrl(Api.groupMilestonesPath).replace(':id', encodeURIComponent(groupId));
+
+ return axios.get(url, {
+ params,
+ });
+ },
+
// Return namespaces list. Filtered by query
namespaces(query, callback) {
const url = Api.buildUrl(Api.namespacesPath);
@@ -262,10 +271,12 @@ const Api = {
});
},
- projectMilestones(id) {
+ projectMilestones(id, params = {}) {
const url = Api.buildUrl(Api.projectMilestonesPath).replace(':id', encodeURIComponent(id));
- return axios.get(url);
+ return axios.get(url, {
+ params,
+ });
},
mergeRequests(params = {}) {
diff --git a/app/assets/javascripts/boards/components/board_content.vue b/app/assets/javascripts/boards/components/board_content.vue
index f0497ea0b64..6d0235aeaae 100644
--- a/app/assets/javascripts/boards/components/board_content.vue
+++ b/app/assets/javascripts/boards/components/board_content.vue
@@ -54,7 +54,7 @@ export default {
<div>
<div
v-if="!isSwimlanesOn"
- class="boards-list w-100 py-3 px-2 text-nowrap"
+ class="boards-list gl-w-full gl-py-5 gl-px-3 gl-white-space-nowrap"
data-qa-selector="boards_list"
>
<board-column
diff --git a/app/assets/javascripts/boards/components/board_form.vue b/app/assets/javascripts/boards/components/board_form.vue
index fbe221041c1..8ff493ae8b1 100644
--- a/app/assets/javascripts/boards/components/board_form.vue
+++ b/app/assets/javascripts/boards/components/board_form.vue
@@ -25,10 +25,6 @@ export default {
type: Boolean,
required: true,
},
- milestonePath: {
- type: String,
- required: true,
- },
labelsPath: {
type: String,
required: true,
@@ -201,7 +197,6 @@ export default {
:collapse-scope="isNewForm"
:board="board"
:can-admin-board="canAdminBoard"
- :milestone-path="milestonePath"
:labels-path="labelsPath"
:enable-scoped-labels="enableScopedLabels"
:project-id="projectId"
diff --git a/app/assets/javascripts/boards/components/boards_selector.vue b/app/assets/javascripts/boards/components/boards_selector.vue
index 80db9930259..56fb4f1a6c4 100644
--- a/app/assets/javascripts/boards/components/boards_selector.vue
+++ b/app/assets/javascripts/boards/components/boards_selector.vue
@@ -36,10 +36,6 @@ export default {
type: Object,
required: true,
},
- milestonePath: {
- type: String,
- required: true,
- },
throttleDuration: {
type: Number,
default: 200,
@@ -335,7 +331,6 @@ export default {
<board-form
v-if="currentPage"
- :milestone-path="milestonePath"
:labels-path="labelsPath"
:project-id="projectId"
:group-id="groupId"
diff --git a/app/assets/javascripts/boards/components/modal/header.vue b/app/assets/javascripts/boards/components/modal/header.vue
index a42e691dcf3..8b0d836f27a 100644
--- a/app/assets/javascripts/boards/components/modal/header.vue
+++ b/app/assets/javascripts/boards/components/modal/header.vue
@@ -17,10 +17,6 @@ export default {
type: Number,
required: true,
},
- milestonePath: {
- type: String,
- required: true,
- },
labelPath: {
type: String,
required: true,
diff --git a/app/assets/javascripts/boards/components/modal/index.vue b/app/assets/javascripts/boards/components/modal/index.vue
index 20344b66140..fb2d7b6dbc5 100644
--- a/app/assets/javascripts/boards/components/modal/index.vue
+++ b/app/assets/javascripts/boards/components/modal/index.vue
@@ -38,10 +38,6 @@ export default {
type: Number,
required: true,
},
- milestonePath: {
- type: String,
- required: true,
- },
labelPath: {
type: String,
required: true,
@@ -149,11 +145,7 @@ export default {
class="add-issues-modal d-flex position-fixed position-top-0 position-bottom-0 position-left-0 position-right-0 h-100"
>
<div class="add-issues-container d-flex flex-column m-auto rounded">
- <modal-header
- :project-id="projectId"
- :milestone-path="milestonePath"
- :label-path="labelPath"
- />
+ <modal-header :project-id="projectId" :label-path="labelPath" />
<modal-list
v-if="!loading && showList && !filterLoading"
:issue-link-base="issueLinkBase"
diff --git a/app/assets/javascripts/boards/mount_multiple_boards_switcher.js b/app/assets/javascripts/boards/mount_multiple_boards_switcher.js
index 73d37459bfe..51bb72b7657 100644
--- a/app/assets/javascripts/boards/mount_multiple_boards_switcher.js
+++ b/app/assets/javascripts/boards/mount_multiple_boards_switcher.js
@@ -27,7 +27,7 @@ export default () => {
hasMissingBoards: parseBoolean(dataset.hasMissingBoards),
canAdminBoard: parseBoolean(dataset.canAdminBoard),
multipleIssueBoardsAvailable: parseBoolean(dataset.multipleIssueBoardsAvailable),
- projectId: Number(dataset.projectId),
+ projectId: dataset.projectId ? Number(dataset.projectId) : 0,
groupId: Number(dataset.groupId),
scopedIssueBoardFeatureEnabled: parseBoolean(dataset.scopedIssueBoardFeatureEnabled),
weights: JSON.parse(dataset.weights),
diff --git a/app/assets/javascripts/milestone_select.js b/app/assets/javascripts/milestone_select.js
index caa45184bfc..8213f057b0b 100644
--- a/app/assets/javascripts/milestone_select.js
+++ b/app/assets/javascripts/milestone_select.js
@@ -4,10 +4,11 @@
import $ from 'jquery';
import { template, escape } from 'lodash';
-import { __ } from '~/locale';
+import { __, sprintf } from '~/locale';
import '~/gl_dropdown';
+import Api from '~/api';
import axios from './lib/utils/axios_utils';
-import { timeFor } from './lib/utils/datetime_utility';
+import { timeFor, parsePikadayDate, dateInWords } from './lib/utils/datetime_utility';
import ModalStore from './boards/stores/modal_store';
import boardsStore, {
boardStoreIssueSet,
@@ -34,10 +35,10 @@ export default class MilestoneSelect {
$els.each((i, dropdown) => {
let milestoneLinkNoneTemplate,
milestoneLinkTemplate,
+ milestoneExpiredLinkTemplate,
selectedMilestone,
selectedMilestoneDefault;
const $dropdown = $(dropdown);
- const milestonesUrl = $dropdown.data('milestones');
const issueUpdateURL = $dropdown.data('issueUpdate');
const showNo = $dropdown.data('showNo');
const showAny = $dropdown.data('showAny');
@@ -63,58 +64,101 @@ export default class MilestoneSelect {
milestoneLinkTemplate = template(
'<a href="<%- web_url %>" class="bold has-tooltip" data-container="body" title="<%- remaining %>"><%- title %></a>',
);
+ milestoneExpiredLinkTemplate = template(
+ '<a href="<%- web_url %>" class="bold has-tooltip" data-container="body" title="<%- remaining %>"><%- title %> (Past due)</a>',
+ );
milestoneLinkNoneTemplate = `<span class="no-value">${__('None')}</span>`;
}
return $dropdown.glDropdown({
showMenuAbove,
- data: (term, callback) =>
- axios.get(milestonesUrl).then(({ data }) => {
- const extraOptions = [];
- if (showAny) {
- extraOptions.push({
- id: null,
- name: null,
- title: __('Any milestone'),
- });
- }
- if (showNo) {
- extraOptions.push({
- id: -1,
- name: __('No milestone'),
- title: __('No milestone'),
- });
- }
- if (showUpcoming) {
- extraOptions.push({
- id: -2,
- name: '#upcoming',
- title: __('Upcoming'),
- });
- }
- if (showStarted) {
- extraOptions.push({
- id: -3,
- name: '#started',
- title: __('Started'),
- });
- }
- if (extraOptions.length) {
- extraOptions.push({ type: 'divider' });
- }
+ data: (term, callback) => {
+ let contextId = $dropdown.get(0).dataset.projectId;
+ let getMilestones = Api.projectMilestones;
- callback(extraOptions.concat(data));
- if (showMenuAbove) {
- $dropdown.data('glDropdown').positionMenuAbove();
- }
- $(`[data-milestone-id="${escape(selectedMilestone)}"] > a`).addClass('is-active');
- }),
- renderRow: milestone => `
- <li data-milestone-id="${escape(milestone.name)}">
+ if (!contextId) {
+ contextId = $dropdown.get(0).dataset.groupId;
+ getMilestones = Api.groupMilestones;
+ }
+
+ // We don't use $.data() as it caches initial value and never updates!
+ return getMilestones(contextId, { state: 'active' })
+ .then(({ data }) =>
+ data
+ .map(m => ({
+ ...m,
+ // Public API includes `title` instead of `name`.
+ name: m.title,
+ }))
+ .sort((mA, mB) => {
+ // Move all expired milestones to the bottom.
+ if (mA.expired) {
+ return 1;
+ }
+ if (mB.expired) {
+ return -1;
+ }
+ return 0;
+ }),
+ )
+ .then(data => {
+ const extraOptions = [];
+ if (showAny) {
+ extraOptions.push({
+ id: null,
+ name: null,
+ title: __('Any milestone'),
+ });
+ }
+ if (showNo) {
+ extraOptions.push({
+ id: -1,
+ name: __('No milestone'),
+ title: __('No milestone'),
+ });
+ }
+ if (showUpcoming) {
+ extraOptions.push({
+ id: -2,
+ name: '#upcoming',
+ title: __('Upcoming'),
+ });
+ }
+ if (showStarted) {
+ extraOptions.push({
+ id: -3,
+ name: '#started',
+ title: __('Started'),
+ });
+ }
+ if (extraOptions.length) {
+ extraOptions.push({ type: 'divider' });
+ }
+
+ callback(extraOptions.concat(data));
+ if (showMenuAbove) {
+ $dropdown.data('glDropdown').positionMenuAbove();
+ }
+ $(`[data-milestone-id="${selectedMilestone}"] > a`).addClass('is-active');
+ });
+ },
+ renderRow: milestone => {
+ const milestoneName = milestone.title || milestone.name;
+ let milestoneDisplayName = escape(milestoneName);
+
+ if (milestone.expired) {
+ milestoneDisplayName = sprintf(__('%{milestone} (expired)'), {
+ milestone: milestoneDisplayName,
+ });
+ }
+
+ return `
+ <li data-milestone-id="${escape(milestoneName)}">
<a href='#' class='dropdown-menu-milestone-link'>
- ${escape(milestone.title)}
+ ${milestoneDisplayName}
</a>
</li>
- `,
+ `;
+ },
filterable: true,
search: {
fields: ['title'],
@@ -149,7 +193,7 @@ export default class MilestoneSelect {
selectedMilestone = $dropdown[0].dataset.selected || selectedMilestoneDefault;
}
$('a.is-active', $el).removeClass('is-active');
- $(`[data-milestone-id="${escape(selectedMilestone)}"] > a`, $el).addClass('is-active');
+ $(`[data-milestone-id="${selectedMilestone}"] > a`, $el).addClass('is-active');
},
vue: $dropdown.hasClass('js-issue-board-sidebar'),
clicked: clickEvent => {
@@ -237,7 +281,16 @@ export default class MilestoneSelect {
if (data.milestone != null) {
data.milestone.remaining = timeFor(data.milestone.due_date);
data.milestone.name = data.milestone.title;
- $value.html(milestoneLinkTemplate(data.milestone));
+ $value.html(
+ data.milestone.expired
+ ? milestoneExpiredLinkTemplate({
+ ...data.milestone,
+ remaining: sprintf(__('%{due_date} (Past due)'), {
+ due_date: dateInWords(parsePikadayDate(data.milestone.due_date)),
+ }),
+ })
+ : milestoneLinkTemplate(data.milestone),
+ );
return $sidebarCollapsedValue
.attr(
'data-original-title',
diff --git a/app/assets/javascripts/static_site_editor/components/edit_area.vue b/app/assets/javascripts/static_site_editor/components/edit_area.vue
index b052c211542..84a16f327d9 100644
--- a/app/assets/javascripts/static_site_editor/components/edit_area.vue
+++ b/app/assets/javascripts/static_site_editor/components/edit_area.vue
@@ -5,6 +5,8 @@ import EditHeader from './edit_header.vue';
import UnsavedChangesConfirmDialog from './unsaved_changes_confirm_dialog.vue';
import parseSourceFile from '~/static_site_editor/services/parse_source_file';
import { EDITOR_TYPES } from '~/vue_shared/components/rich_content_editor/constants';
+import { DEFAULT_IMAGE_UPLOAD_PATH } from '../constants';
+import imageRepository from '../image_repository';
export default {
components: {
@@ -31,6 +33,12 @@ export default {
required: false,
default: '',
},
+ imageRoot: {
+ type: String,
+ required: false,
+ default: DEFAULT_IMAGE_UPLOAD_PATH,
+ validator: prop => prop.endsWith('/'),
+ },
},
data() {
return {
@@ -40,6 +48,7 @@ export default {
isModified: false,
};
},
+ imageRepository: imageRepository(),
computed: {
editableContent() {
return this.parsedSource.content(this.isWysiwygMode);
@@ -57,8 +66,14 @@ export default {
this.editorMode = mode;
this.$refs.editor.resetInitialValue(this.editableContent);
},
+ onUploadImage({ file, imageUrl }) {
+ this.$options.imageRepository.add(file, imageUrl);
+ },
onSubmit() {
- this.$emit('submit', { content: this.parsedSource.content() });
+ this.$emit('submit', {
+ content: this.parsedSource.content(),
+ images: this.$options.imageRepository.getAll(),
+ });
},
},
};
@@ -70,9 +85,11 @@ export default {
ref="editor"
:content="editableContent"
:initial-edit-type="editorMode"
+ :image-root="imageRoot"
class="mb-9 h-100"
@modeChange="onModeChange"
@input="onInputChange"
+ @uploadImage="onUploadImage"
/>
<unsaved-changes-confirm-dialog :modified="isModified" />
<publish-toolbar
diff --git a/app/assets/javascripts/static_site_editor/constants.js b/app/assets/javascripts/static_site_editor/constants.js
index 947347922f2..49db9ab7ca5 100644
--- a/app/assets/javascripts/static_site_editor/constants.js
+++ b/app/assets/javascripts/static_site_editor/constants.js
@@ -19,3 +19,5 @@ export const DEFAULT_HEADING = s__('StaticSiteEditor|Static site editor');
export const TRACKING_ACTION_CREATE_COMMIT = 'create_commit';
export const TRACKING_ACTION_CREATE_MERGE_REQUEST = 'create_merge_request';
export const TRACKING_ACTION_INITIALIZE_EDITOR = 'initialize_editor';
+
+export const DEFAULT_IMAGE_UPLOAD_PATH = 'source/images/uploads/';
diff --git a/app/assets/javascripts/static_site_editor/graphql/resolvers/submit_content_changes.js b/app/assets/javascripts/static_site_editor/graphql/resolvers/submit_content_changes.js
index 6c4e3a4d973..0cb26f88785 100644
--- a/app/assets/javascripts/static_site_editor/graphql/resolvers/submit_content_changes.js
+++ b/app/assets/javascripts/static_site_editor/graphql/resolvers/submit_content_changes.js
@@ -3,10 +3,10 @@ import savedContentMetaQuery from '../queries/saved_content_meta.query.graphql';
const submitContentChangesResolver = (
_,
- { input: { project: projectId, username, sourcePath, content } },
+ { input: { project: projectId, username, sourcePath, content, images } },
{ cache },
) => {
- return submitContentChanges({ projectId, username, sourcePath, content }).then(
+ return submitContentChanges({ projectId, username, sourcePath, content, images }).then(
savedContentMeta => {
cache.writeQuery({
query: savedContentMetaQuery,
diff --git a/app/assets/javascripts/static_site_editor/image_repository.js b/app/assets/javascripts/static_site_editor/image_repository.js
new file mode 100644
index 00000000000..541d581bda8
--- /dev/null
+++ b/app/assets/javascripts/static_site_editor/image_repository.js
@@ -0,0 +1,20 @@
+import { __ } from '~/locale';
+import Flash from '~/flash';
+import { getBinary } from './services/image_service';
+
+const imageRepository = () => {
+ const images = new Map();
+ const flash = message => new Flash(message);
+
+ const add = (file, url) => {
+ getBinary(file)
+ .then(content => images.set(url, content))
+ .catch(() => flash(__('Something went wrong while inserting your image. Please try again.')));
+ };
+
+ const getAll = () => images;
+
+ return { add, getAll };
+};
+
+export default imageRepository;
diff --git a/app/assets/javascripts/static_site_editor/pages/home.vue b/app/assets/javascripts/static_site_editor/pages/home.vue
index a1314c8a478..156b815e07a 100644
--- a/app/assets/javascripts/static_site_editor/pages/home.vue
+++ b/app/assets/javascripts/static_site_editor/pages/home.vue
@@ -67,11 +67,11 @@ export default {
onDismissError() {
this.submitChangesError = null;
},
- onSubmit({ content }) {
+ onSubmit({ content, images }) {
this.content = content;
- this.submitChanges();
+ this.submitChanges(images);
},
- submitChanges() {
+ submitChanges(images) {
this.isSavingChanges = true;
this.$apollo
@@ -83,6 +83,7 @@ export default {
username: this.appData.username,
sourcePath: this.appData.sourcePath,
content: this.content,
+ images,
},
},
})
diff --git a/app/assets/javascripts/static_site_editor/services/image_service.js b/app/assets/javascripts/static_site_editor/services/image_service.js
new file mode 100644
index 00000000000..edc69d0579a
--- /dev/null
+++ b/app/assets/javascripts/static_site_editor/services/image_service.js
@@ -0,0 +1,9 @@
+// eslint-disable-next-line import/prefer-default-export
+export const getBinary = file => {
+ return new Promise((resolve, reject) => {
+ const reader = new FileReader();
+ reader.readAsDataURL(file);
+ reader.onload = () => resolve(reader.result.split(',')[1]);
+ reader.onerror = error => reject(error);
+ });
+};
diff --git a/app/assets/javascripts/static_site_editor/services/submit_content_changes.js b/app/assets/javascripts/static_site_editor/services/submit_content_changes.js
index fce7c1f918f..da62d3fa4fc 100644
--- a/app/assets/javascripts/static_site_editor/services/submit_content_changes.js
+++ b/app/assets/javascripts/static_site_editor/services/submit_content_changes.js
@@ -21,7 +21,32 @@ const createBranch = (projectId, branch) =>
throw new Error(SUBMIT_CHANGES_BRANCH_ERROR);
});
-const commitContent = (projectId, message, branch, sourcePath, content) => {
+const createImageActions = (images, markdown) => {
+ const actions = [];
+
+ if (!markdown) {
+ return actions;
+ }
+
+ images.forEach((imageContent, filePath) => {
+ const imageExistsInMarkdown = path => new RegExp(`!\\[([^[\\]\\n]*)\\](\\(${path})\\)`); // matches the image markdown syntax: ![<any-string-except-newline>](<path>)
+
+ if (imageExistsInMarkdown(filePath).test(markdown)) {
+ actions.push(
+ convertObjectPropsToSnakeCase({
+ encoding: 'base64',
+ action: 'create',
+ content: imageContent,
+ filePath,
+ }),
+ );
+ }
+ });
+
+ return actions;
+};
+
+const commitContent = (projectId, message, branch, sourcePath, content, images) => {
Tracking.event(document.body.dataset.page, TRACKING_ACTION_CREATE_COMMIT);
return Api.commitMultiple(
@@ -35,6 +60,7 @@ const commitContent = (projectId, message, branch, sourcePath, content) => {
filePath: sourcePath,
content,
}),
+ ...createImageActions(images, content),
],
}),
).catch(() => {
@@ -62,7 +88,7 @@ const createMergeRequest = (
});
};
-const submitContentChanges = ({ username, projectId, sourcePath, content }) => {
+const submitContentChanges = ({ username, projectId, sourcePath, content, images }) => {
const branch = generateBranchName(username);
const mergeRequestTitle = sprintf(s__(`StaticSiteEditor|Update %{sourcePath} file`), {
sourcePath,
@@ -73,7 +99,7 @@ const submitContentChanges = ({ username, projectId, sourcePath, content }) => {
.then(({ data: { web_url: url } }) => {
Object.assign(meta, { branch: { label: branch, url } });
- return commitContent(projectId, mergeRequestTitle, branch, sourcePath, content);
+ return commitContent(projectId, mergeRequestTitle, branch, sourcePath, content, images);
})
.then(({ data: { short_id: label, web_url: url } }) => {
Object.assign(meta, { commit: { label, url } });
diff --git a/app/assets/javascripts/vue_shared/components/markdown/suggestion_diff.vue b/app/assets/javascripts/vue_shared/components/markdown/suggestion_diff.vue
index 6dac448d5de..13c42d35b04 100644
--- a/app/assets/javascripts/vue_shared/components/markdown/suggestion_diff.vue
+++ b/app/assets/javascripts/vue_shared/components/markdown/suggestion_diff.vue
@@ -68,6 +68,7 @@ export default {
:is-applying-batch="suggestion.is_applying_batch"
:batch-suggestions-count="batchSuggestionsCount"
:help-page-path="helpPagePath"
+ :inapplicable-reason="suggestion.inapplicable_reason"
@apply="applySuggestion"
@applyBatch="applySuggestionBatch"
@addToBatch="addSuggestionToBatch"
diff --git a/app/assets/javascripts/vue_shared/components/markdown/suggestion_diff_header.vue b/app/assets/javascripts/vue_shared/components/markdown/suggestion_diff_header.vue
index 54e837882ce..4de80e9b4c2 100644
--- a/app/assets/javascripts/vue_shared/components/markdown/suggestion_diff_header.vue
+++ b/app/assets/javascripts/vue_shared/components/markdown/suggestion_diff_header.vue
@@ -38,6 +38,11 @@ export default {
type: String,
required: true,
},
+ inapplicableReason: {
+ type: String,
+ required: false,
+ default: null,
+ },
},
data() {
return {
@@ -52,9 +57,7 @@ export default {
return this.isApplyingSingle || this.isApplyingBatch;
},
tooltipMessage() {
- return this.canApply
- ? __('This also resolves this thread')
- : __("Can't apply as this line has changed or the suggestion already matches its content.");
+ return this.canApply ? __('This also resolves this thread') : this.inapplicableReason;
},
isDisableButton() {
return this.isApplying || !this.canApply;
diff --git a/app/assets/javascripts/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal.vue b/app/assets/javascripts/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal.vue
index dce5d1778b3..0a444b2295d 100644
--- a/app/assets/javascripts/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal.vue
+++ b/app/assets/javascripts/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal.vue
@@ -16,8 +16,15 @@ export default {
GlTab,
},
mixins: [glFeatureFlagMixin()],
+ props: {
+ imageRoot: {
+ type: String,
+ required: true,
+ },
+ },
data() {
return {
+ file: null,
urlError: null,
imageUrl: null,
description: null,
@@ -38,6 +45,7 @@ export default {
},
methods: {
show() {
+ this.file = null;
this.urlError = null;
this.imageUrl = null;
this.description = null;
@@ -66,7 +74,9 @@ export default {
return;
}
- this.$emit('addImage', { file, altText: altText || file.name });
+ const imageUrl = `${this.imageRoot}${file.name}`;
+
+ this.$emit('addImage', { imageUrl, file, altText: altText || file.name });
},
submitURL(event) {
if (!this.validateUrl()) {
diff --git a/app/assets/javascripts/vue_shared/components/rich_content_editor/rich_content_editor.vue b/app/assets/javascripts/vue_shared/components/rich_content_editor/rich_content_editor.vue
index a32114b6a29..193310d1dc9 100644
--- a/app/assets/javascripts/vue_shared/components/rich_content_editor/rich_content_editor.vue
+++ b/app/assets/javascripts/vue_shared/components/rich_content_editor/rich_content_editor.vue
@@ -19,8 +19,6 @@ import {
getMarkdown,
} from './services/editor_service';
-import { getUrl } from './services/image_service';
-
export default {
components: {
ToastEditor: () =>
@@ -54,6 +52,11 @@ export default {
required: false,
default: EDITOR_PREVIEW_STYLE,
},
+ imageRoot: {
+ type: String,
+ required: true,
+ validator: prop => prop.endsWith('/'),
+ },
},
data() {
return {
@@ -104,10 +107,8 @@ export default {
const image = { imageUrl, altText };
if (file) {
- image.imageUrl = getUrl(file);
- // TODO - persist images locally (local image repository)
+ this.$emit('uploadImage', { file, imageUrl });
// TODO - ensure that the actual repo URL for the image is used in Markdown mode
- // TODO - upload images to the project repository (on submit)
}
addImage(this.editorInstance, image);
@@ -130,6 +131,6 @@ export default {
@change="onContentChanged"
@load="onLoad"
/>
- <add-image-modal ref="addImageModal" @addImage="onAddImage" />
+ <add-image-modal ref="addImageModal" :image-root="imageRoot" @addImage="onAddImage" />
</div>
</template>
diff --git a/app/assets/javascripts/vue_shared/components/rich_content_editor/services/image_service.js b/app/assets/javascripts/vue_shared/components/rich_content_editor/services/image_service.js
deleted file mode 100644
index a66e464e702..00000000000
--- a/app/assets/javascripts/vue_shared/components/rich_content_editor/services/image_service.js
+++ /dev/null
@@ -1,2 +0,0 @@
-// eslint-disable-next-line import/prefer-default-export
-export const getUrl = file => URL.createObjectURL(file);
diff --git a/app/assets/stylesheets/framework/system_messages.scss b/app/assets/stylesheets/framework/system_messages.scss
index 4f66d6bf354..10796f319bf 100644
--- a/app/assets/stylesheets/framework/system_messages.scss
+++ b/app/assets/stylesheets/framework/system_messages.scss
@@ -94,7 +94,8 @@
margin-bottom: 16px;
}
- .boards-list {
+ .boards-list,
+ .board-swimlanes {
height: calc(100vh - #{$header-height + $breadcrumb-min-height + $performance-bar-height + $system-footer-height + $gl-padding-32});
}
}
diff --git a/app/assets/stylesheets/pages/boards.scss b/app/assets/stylesheets/pages/boards.scss
index c1f5b3a3c7b..049660220df 100644
--- a/app/assets/stylesheets/pages/boards.scss
+++ b/app/assets/stylesheets/pages/boards.scss
@@ -45,7 +45,8 @@
}
}
-.boards-list {
+.boards-list,
+.board-swimlanes {
height: calc(100vh - #{$issue-board-list-difference-xs});
overflow-x: scroll;
min-height: 200px;
@@ -576,29 +577,8 @@
}
}
-.board-epics-swimlanes {
+.board-swimlanes {
overflow-x: auto;
- min-height: calc(100vh - #{$issue-board-list-difference-xs});
-
- @include media-breakpoint-only(sm) {
- min-height: calc(100vh - #{$issue-board-list-difference-sm});
- }
-
- @include media-breakpoint-up(md) {
- min-height: calc(100vh - #{$issue-board-list-difference-md});
- }
-
- .with-performance-bar & {
- min-height: calc(100vh - #{$issue-board-list-difference-xs} - #{$performance-bar-height});
-
- @include media-breakpoint-only(sm) {
- min-height: calc(100vh - #{$issue-board-list-difference-sm} - #{$performance-bar-height});
- }
-
- @include media-breakpoint-up(md) {
- min-height: calc(100vh - #{$issue-board-list-difference-md} - #{$performance-bar-height});
- }
- }
}
.board-header-collapsed-info-icon:hover {
diff --git a/app/assets/stylesheets/utilities.scss b/app/assets/stylesheets/utilities.scss
index 94af1df2ccb..8daa622dc7c 100644
--- a/app/assets/stylesheets/utilities.scss
+++ b/app/assets/stylesheets/utilities.scss
@@ -108,12 +108,3 @@
.gl-transition-property-stroke {
transition-property: stroke;
}
-
-// temporary class till giltab-ui one is merged
-.gl-border-t-2 {
- border-top-width: $gl-border-size-2;
-}
-
-.gl-border-b-2 {
- border-bottom-width: $gl-border-size-2;
-}
diff --git a/app/graphql/resolvers/milestone_resolver.rb b/app/graphql/resolvers/milestone_resolver.rb
index 6c6513e0ee4..bcfbc63c31f 100644
--- a/app/graphql/resolvers/milestone_resolver.rb
+++ b/app/graphql/resolvers/milestone_resolver.rb
@@ -52,7 +52,7 @@ module Resolvers
end
def group_parameters(args)
- return { group_ids: parent.id } unless include_descendants?(args)
+ return { group_ids: parent.id } unless args[:include_descendants].present?
{
group_ids: parent.self_and_descendants.public_or_visible_to_user(current_user).select(:id),
@@ -60,10 +60,6 @@ module Resolvers
}
end
- def include_descendants?(args)
- args[:include_descendants].present? && Feature.enabled?(:group_milestone_descendants, parent)
- end
-
def group_projects
GroupProjectsFinder.new(
group: parent,
diff --git a/app/helpers/clusters_helper.rb b/app/helpers/clusters_helper.rb
index 1204f882707..005070cca5c 100644
--- a/app/helpers/clusters_helper.rb
+++ b/app/helpers/clusters_helper.rb
@@ -1,9 +1,8 @@
# frozen_string_literal: true
module ClustersHelper
- # EE overrides this
def has_multiple_clusters?
- false
+ true
end
def create_new_cluster_label(provider: nil)
@@ -95,5 +94,3 @@ module ClustersHelper
can?(user, :admin_cluster, cluster)
end
end
-
-ClustersHelper.prepend_if_ee('EE::ClustersHelper')
diff --git a/app/models/clusters/cluster.rb b/app/models/clusters/cluster.rb
index d4f5d499b65..0c36bf5cf48 100644
--- a/app/models/clusters/cluster.rb
+++ b/app/models/clusters/cluster.rb
@@ -2,6 +2,7 @@
module Clusters
class Cluster < ApplicationRecord
+ prepend HasEnvironmentScope
include Presentable
include Gitlab::Utils::StrongMemoize
include FromUnion
@@ -81,6 +82,7 @@ module Clusters
validate :no_groups, unless: :group_type?
validate :no_projects, unless: :project_type?
validate :unique_management_project_environment_scope
+ validate :unique_environment_scope
after_save :clear_reactive_cache!
@@ -354,6 +356,12 @@ module Clusters
end
end
+ def unique_environment_scope
+ if clusterable.present? && clusterable.clusters.where(environment_scope: environment_scope).where.not(id: id).exists?
+ errors.add(:environment_scope, 'cannot add duplicated environment scope')
+ end
+ end
+
def managed_namespace(environment)
Clusters::KubernetesNamespaceFinder.new(
self,
diff --git a/app/models/concerns/deployment_platform.rb b/app/models/concerns/deployment_platform.rb
index 3b893a56bd6..02f7711e927 100644
--- a/app/models/concerns/deployment_platform.rb
+++ b/app/models/concerns/deployment_platform.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
module DeploymentPlatform
- # EE would override this and utilize environment argument
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def deployment_platform(environment: nil)
@deployment_platform ||= {}
@@ -20,16 +19,27 @@ module DeploymentPlatform
find_instance_cluster_platform_kubernetes(environment: environment)
end
- # EE would override this and utilize environment argument
- def find_platform_kubernetes_with_cte(_environment)
- Clusters::ClustersHierarchy.new(self, include_management_project: cluster_management_project_enabled?).base_and_ancestors
+ def find_platform_kubernetes_with_cte(environment)
+ if environment
+ ::Clusters::ClustersHierarchy.new(self, include_management_project: cluster_management_project_enabled?)
+ .base_and_ancestors
+ .enabled
+ .on_environment(environment, relevant_only: true)
+ .first&.platform_kubernetes
+ else
+ Clusters::ClustersHierarchy.new(self, include_management_project: cluster_management_project_enabled?).base_and_ancestors
.enabled.default_environment
.first&.platform_kubernetes
+ end
end
- # EE would override this and utilize environment argument
def find_instance_cluster_platform_kubernetes(environment: nil)
- Clusters::Instance.new.clusters.enabled.default_environment
+ if environment
+ ::Clusters::Instance.new.clusters.enabled.on_environment(environment, relevant_only: true)
.first&.platform_kubernetes
+ else
+ Clusters::Instance.new.clusters.enabled.default_environment
+ .first&.platform_kubernetes
+ end
end
end
diff --git a/app/models/packages.rb b/app/models/packages.rb
new file mode 100644
index 00000000000..e14c9290093
--- /dev/null
+++ b/app/models/packages.rb
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+module Packages
+ def self.table_name_prefix
+ 'packages_'
+ end
+end
diff --git a/app/models/packages/build_info.rb b/app/models/packages/build_info.rb
new file mode 100644
index 00000000000..df8cf68490e
--- /dev/null
+++ b/app/models/packages/build_info.rb
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+
+class Packages::BuildInfo < ApplicationRecord
+ belongs_to :package, inverse_of: :build_info
+ belongs_to :pipeline, class_name: 'Ci::Pipeline'
+end
diff --git a/app/models/packages/composer/metadatum.rb b/app/models/packages/composer/metadatum.rb
new file mode 100644
index 00000000000..3026f5ea878
--- /dev/null
+++ b/app/models/packages/composer/metadatum.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Packages
+ module Composer
+ class Metadatum < ApplicationRecord
+ self.table_name = 'packages_composer_metadata'
+ self.primary_key = :package_id
+
+ belongs_to :package, -> { where(package_type: :composer) }, inverse_of: :composer_metadatum
+
+ validates :package, :target_sha, :composer_json, presence: true
+ end
+ end
+end
diff --git a/app/models/packages/conan.rb b/app/models/packages/conan.rb
new file mode 100644
index 00000000000..01007c3fa78
--- /dev/null
+++ b/app/models/packages/conan.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+module Packages
+ module Conan
+ def self.table_name_prefix
+ 'packages_conan_'
+ end
+ end
+end
diff --git a/app/models/packages/conan/file_metadatum.rb b/app/models/packages/conan/file_metadatum.rb
new file mode 100644
index 00000000000..e1ef62b3959
--- /dev/null
+++ b/app/models/packages/conan/file_metadatum.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+class Packages::Conan::FileMetadatum < ApplicationRecord
+ belongs_to :package_file, inverse_of: :conan_file_metadatum
+
+ validates :package_file, presence: true
+
+ validates :recipe_revision,
+ presence: true,
+ format: { with: Gitlab::Regex.conan_revision_regex }
+
+ validates :package_revision, absence: true, if: :recipe_file?
+ validates :package_revision, format: { with: Gitlab::Regex.conan_revision_regex }, if: :package_file?
+
+ validates :conan_package_reference, absence: true, if: :recipe_file?
+ validates :conan_package_reference, format: { with: Gitlab::Regex.conan_package_reference_regex }, if: :package_file?
+ validate :conan_package_type
+
+ enum conan_file_type: { recipe_file: 1, package_file: 2 }
+
+ RECIPE_FILES = ::Gitlab::Regex::Packages::CONAN_RECIPE_FILES
+ PACKAGE_FILES = ::Gitlab::Regex::Packages::CONAN_PACKAGE_FILES
+ PACKAGE_BINARY = 'conan_package.tgz'
+
+ private
+
+ def conan_package_type
+ unless package_file&.package&.conan?
+ errors.add(:base, _('Package type must be Conan'))
+ end
+ end
+end
diff --git a/app/models/packages/conan/metadatum.rb b/app/models/packages/conan/metadatum.rb
new file mode 100644
index 00000000000..7ec2641177a
--- /dev/null
+++ b/app/models/packages/conan/metadatum.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+class Packages::Conan::Metadatum < ApplicationRecord
+ belongs_to :package, -> { where(package_type: :conan) }, inverse_of: :conan_metadatum
+
+ validates :package, presence: true
+
+ validates :package_username,
+ presence: true,
+ format: { with: Gitlab::Regex.conan_recipe_component_regex }
+
+ validates :package_channel,
+ presence: true,
+ format: { with: Gitlab::Regex.conan_recipe_component_regex }
+
+ validate :conan_package_type
+
+ def recipe
+ "#{package.name}/#{package.version}@#{package_username}/#{package_channel}"
+ end
+
+ def recipe_path
+ recipe.tr('@', '/')
+ end
+
+ def self.package_username_from(full_path:)
+ full_path.tr('/', '+')
+ end
+
+ def self.full_path_from(package_username:)
+ package_username.tr('+', '/')
+ end
+
+ private
+
+ def conan_package_type
+ unless package&.conan?
+ errors.add(:base, _('Package type must be Conan'))
+ end
+ end
+end
diff --git a/app/models/packages/dependency.rb b/app/models/packages/dependency.rb
new file mode 100644
index 00000000000..51b80934827
--- /dev/null
+++ b/app/models/packages/dependency.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+class Packages::Dependency < ApplicationRecord
+ has_many :dependency_links, class_name: 'Packages::DependencyLink'
+
+ validates :name, :version_pattern, presence: true
+
+ validates :name, uniqueness: { scope: :version_pattern }
+
+ NAME_VERSION_PATTERN_TUPLE_MATCHING = '(name, version_pattern) = (?, ?)'.freeze
+ MAX_STRING_LENGTH = 255.freeze
+ MAX_CHUNKED_QUERIES_COUNT = 10.freeze
+
+ def self.ids_for_package_names_and_version_patterns(names_and_version_patterns = {}, chunk_size = 50, max_rows_limit = 200)
+ names_and_version_patterns.reject! { |key, value| key.size > MAX_STRING_LENGTH || value.size > MAX_STRING_LENGTH }
+ raise ArgumentError, 'Too many names_and_version_patterns' if names_and_version_patterns.size > MAX_CHUNKED_QUERIES_COUNT * chunk_size
+
+ matched_ids = []
+ names_and_version_patterns.each_slice(chunk_size) do |tuples|
+ where_statement = Array.new(tuples.size, NAME_VERSION_PATTERN_TUPLE_MATCHING)
+ .join(' OR ')
+ ids = where(where_statement, *tuples.flatten)
+ .limit(max_rows_limit + 1)
+ .pluck(:id)
+ matched_ids.concat(ids)
+
+ raise ArgumentError, 'Too many Dependencies selected' if matched_ids.size > max_rows_limit
+ end
+
+ matched_ids
+ end
+
+ def self.for_package_names_and_version_patterns(names_and_version_patterns = {}, chunk_size = 50, max_rows_limit = 200)
+ ids = ids_for_package_names_and_version_patterns(names_and_version_patterns, chunk_size, max_rows_limit)
+
+ return none if ids.empty?
+
+ id_in(ids)
+ end
+
+ def self.pluck_ids_and_names
+ pluck(:id, :name)
+ end
+
+ def orphaned?
+ self.dependency_links.empty?
+ end
+end
diff --git a/app/models/packages/dependency_link.rb b/app/models/packages/dependency_link.rb
new file mode 100644
index 00000000000..51018602bdc
--- /dev/null
+++ b/app/models/packages/dependency_link.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+class Packages::DependencyLink < ApplicationRecord
+ belongs_to :package, inverse_of: :dependency_links
+ belongs_to :dependency, inverse_of: :dependency_links, class_name: 'Packages::Dependency'
+ has_one :nuget_metadatum, inverse_of: :dependency_link, class_name: 'Packages::Nuget::DependencyLinkMetadatum'
+
+ validates :package, :dependency, presence: true
+
+ validates :dependency_type,
+ uniqueness: { scope: %i[package_id dependency_id] }
+
+ enum dependency_type: { dependencies: 1, devDependencies: 2, bundleDependencies: 3, peerDependencies: 4 }
+
+ scope :with_dependency_type, ->(dependency_type) { where(dependency_type: dependency_type) }
+ scope :includes_dependency, -> { includes(:dependency) }
+ scope :for_package, ->(package) { where(package_id: package.id) }
+ scope :preload_dependency, -> { preload(:dependency) }
+ scope :preload_nuget_metadatum, -> { preload(:nuget_metadatum) }
+end
diff --git a/app/models/packages/go/module.rb b/app/models/packages/go/module.rb
new file mode 100644
index 00000000000..b38b691ed6c
--- /dev/null
+++ b/app/models/packages/go/module.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+module Packages
+ module Go
+ class Module
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :project, :name, :path
+
+ def initialize(project, name, path)
+ @project = project
+ @name = name
+ @path = path
+ end
+
+ def versions
+ strong_memoize(:versions) { Packages::Go::VersionFinder.new(self).execute }
+ end
+
+ def version_by(ref: nil, commit: nil)
+ raise ArgumentError.new 'no filter specified' unless ref || commit
+ raise ArgumentError.new 'ref and commit are mutually exclusive' if ref && commit
+
+ if commit
+ return version_by_sha(commit) if commit.is_a? String
+
+ return version_by_commit(commit)
+ end
+
+ return version_by_name(ref) if ref.is_a? String
+
+ version_by_ref(ref)
+ end
+
+ def path_valid?(major)
+ m = /\/v(\d+)$/i.match(@name)
+
+ case major
+ when 0, 1
+ m.nil?
+ else
+ !m.nil? && m[1].to_i == major
+ end
+ end
+
+ def gomod_valid?(gomod)
+ if Feature.enabled?(:go_proxy_disable_gomod_validation, @project)
+ return gomod&.start_with?("module ")
+ end
+
+ gomod&.split("\n", 2)&.first == "module #{@name}"
+ end
+
+ private
+
+ def version_by_name(name)
+ # avoid a Gitaly call if possible
+ if strong_memoized?(:versions)
+ v = versions.find { |v| v.name == ref }
+ return v if v
+ end
+
+ ref = @project.repository.find_tag(name) || @project.repository.find_branch(name)
+ return unless ref
+
+ version_by_ref(ref)
+ end
+
+ def version_by_ref(ref)
+ # reuse existing versions
+ if strong_memoized?(:versions)
+ v = versions.find { |v| v.ref == ref }
+ return v if v
+ end
+
+ commit = ref.dereferenced_target
+ semver = Packages::SemVer.parse(ref.name, prefixed: true)
+ Packages::Go::ModuleVersion.new(self, :ref, commit, ref: ref, semver: semver)
+ end
+
+ def version_by_sha(sha)
+ commit = @project.commit_by(oid: sha)
+ return unless ref
+
+ version_by_commit(commit)
+ end
+
+ def version_by_commit(commit)
+ Packages::Go::ModuleVersion.new(self, :commit, commit)
+ end
+ end
+ end
+end
diff --git a/app/models/packages/go/module_version.rb b/app/models/packages/go/module_version.rb
new file mode 100644
index 00000000000..a50c78f8e69
--- /dev/null
+++ b/app/models/packages/go/module_version.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+module Packages
+ module Go
+ class ModuleVersion
+ include Gitlab::Utils::StrongMemoize
+
+ VALID_TYPES = %i[ref commit pseudo].freeze
+
+ attr_reader :mod, :type, :ref, :commit
+
+ delegate :major, to: :@semver, allow_nil: true
+ delegate :minor, to: :@semver, allow_nil: true
+ delegate :patch, to: :@semver, allow_nil: true
+ delegate :prerelease, to: :@semver, allow_nil: true
+ delegate :build, to: :@semver, allow_nil: true
+
+ def initialize(mod, type, commit, name: nil, semver: nil, ref: nil)
+ raise ArgumentError.new("invalid type '#{type}'") unless VALID_TYPES.include? type
+ raise ArgumentError.new("mod is required") unless mod
+ raise ArgumentError.new("commit is required") unless commit
+
+ if type == :ref
+ raise ArgumentError.new("ref is required") unless ref
+ elsif type == :pseudo
+ raise ArgumentError.new("name is required") unless name
+ raise ArgumentError.new("semver is required") unless semver
+ end
+
+ @mod = mod
+ @type = type
+ @commit = commit
+ @name = name if name
+ @semver = semver if semver
+ @ref = ref if ref
+ end
+
+ def name
+ @name || @ref&.name
+ end
+
+ def full_name
+ "#{mod.name}@#{name || commit.sha}"
+ end
+
+ def gomod
+ strong_memoize(:gomod) do
+ if strong_memoized?(:blobs)
+ blob_at(@mod.path + '/go.mod')
+ elsif @mod.path.empty?
+ @mod.project.repository.blob_at(@commit.sha, 'go.mod')&.data
+ else
+ @mod.project.repository.blob_at(@commit.sha, @mod.path + '/go.mod')&.data
+ end
+ end
+ end
+
+ def archive
+ suffix_len = @mod.path == '' ? 0 : @mod.path.length + 1
+
+ Zip::OutputStream.write_buffer do |zip|
+ files.each do |file|
+ zip.put_next_entry "#{full_name}/#{file[suffix_len...]}"
+ zip.write blob_at(file)
+ end
+ end
+ end
+
+ def files
+ strong_memoize(:files) do
+ ls_tree.filter { |e| !excluded.any? { |n| e.start_with? n } }
+ end
+ end
+
+ def excluded
+ strong_memoize(:excluded) do
+ ls_tree
+ .filter { |f| f.end_with?('/go.mod') && f != @mod.path + '/go.mod' }
+ .map { |f| f[0..-7] }
+ end
+ end
+
+ def valid?
+ @mod.path_valid?(major) && @mod.gomod_valid?(gomod)
+ end
+
+ private
+
+ def blob_at(path)
+ return if path.nil? || path.empty?
+
+ path = path[1..] if path.start_with? '/'
+
+ blobs.find { |x| x.path == path }&.data
+ end
+
+ def blobs
+ strong_memoize(:blobs) { @mod.project.repository.batch_blobs(files.map { |x| [@commit.sha, x] }) }
+ end
+
+ def ls_tree
+ strong_memoize(:ls_tree) do
+ path =
+ if @mod.path.empty?
+ '.'
+ else
+ @mod.path
+ end
+
+ @mod.project.repository.gitaly_repository_client.search_files_by_name(@commit.sha, path)
+ end
+ end
+ end
+ end
+end
diff --git a/app/models/packages/maven.rb b/app/models/packages/maven.rb
new file mode 100644
index 00000000000..5c1581ce0b7
--- /dev/null
+++ b/app/models/packages/maven.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+module Packages
+ module Maven
+ def self.table_name_prefix
+ 'packages_maven_'
+ end
+ end
+end
diff --git a/app/models/packages/maven/metadatum.rb b/app/models/packages/maven/metadatum.rb
new file mode 100644
index 00000000000..b7f27fb9e06
--- /dev/null
+++ b/app/models/packages/maven/metadatum.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+class Packages::Maven::Metadatum < ApplicationRecord
+ belongs_to :package, -> { where(package_type: :maven) }
+
+ validates :package, presence: true
+
+ validates :path,
+ presence: true,
+ format: { with: Gitlab::Regex.maven_path_regex }
+
+ validates :app_group,
+ presence: true,
+ format: { with: Gitlab::Regex.maven_app_group_regex }
+
+ validates :app_name,
+ presence: true,
+ format: { with: Gitlab::Regex.maven_app_name_regex }
+
+ validate :maven_package_type
+
+ private
+
+ def maven_package_type
+ unless package&.maven?
+ errors.add(:base, _('Package type must be Maven'))
+ end
+ end
+end
diff --git a/app/models/packages/nuget.rb b/app/models/packages/nuget.rb
new file mode 100644
index 00000000000..42c167e9b7f
--- /dev/null
+++ b/app/models/packages/nuget.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+module Packages
+ module Nuget
+ def self.table_name_prefix
+ 'packages_nuget_'
+ end
+ end
+end
diff --git a/app/models/packages/nuget/dependency_link_metadatum.rb b/app/models/packages/nuget/dependency_link_metadatum.rb
new file mode 100644
index 00000000000..b586b55d3f0
--- /dev/null
+++ b/app/models/packages/nuget/dependency_link_metadatum.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class Packages::Nuget::DependencyLinkMetadatum < ApplicationRecord
+ self.primary_key = :dependency_link_id
+
+ belongs_to :dependency_link, inverse_of: :nuget_metadatum
+
+ validates :dependency_link, :target_framework, presence: true
+
+ validate :ensure_nuget_package_type
+
+ private
+
+ def ensure_nuget_package_type
+ return if dependency_link&.package&.nuget?
+
+ errors.add(:base, _('Package type must be NuGet'))
+ end
+end
diff --git a/app/models/packages/nuget/metadatum.rb b/app/models/packages/nuget/metadatum.rb
new file mode 100644
index 00000000000..1db8c0eddbf
--- /dev/null
+++ b/app/models/packages/nuget/metadatum.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+class Packages::Nuget::Metadatum < ApplicationRecord
+ belongs_to :package, -> { where(package_type: :nuget) }, inverse_of: :nuget_metadatum
+
+ validates :package, presence: true
+ validates :license_url, public_url: { allow_blank: true }
+ validates :project_url, public_url: { allow_blank: true }
+ validates :icon_url, public_url: { allow_blank: true }
+
+ validate :ensure_at_least_one_field_supplied
+ validate :ensure_nuget_package_type
+
+ private
+
+ def ensure_at_least_one_field_supplied
+ return if license_url? || project_url? || icon_url?
+
+ errors.add(:base, _('Nuget metadatum must have at least license_url, project_url or icon_url set'))
+ end
+
+ def ensure_nuget_package_type
+ return if package&.nuget?
+
+ errors.add(:base, _('Package type must be NuGet'))
+ end
+end
diff --git a/app/models/packages/package.rb b/app/models/packages/package.rb
new file mode 100644
index 00000000000..d6633456de4
--- /dev/null
+++ b/app/models/packages/package.rb
@@ -0,0 +1,195 @@
+# frozen_string_literal: true
+class Packages::Package < ApplicationRecord
+ include Sortable
+ include Gitlab::SQL::Pattern
+ include UsageStatistics
+
+ belongs_to :project
+ # package_files must be destroyed by ruby code in order to properly remove carrierwave uploads and update project statistics
+ has_many :package_files, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :dependency_links, inverse_of: :package, class_name: 'Packages::DependencyLink'
+ has_many :tags, inverse_of: :package, class_name: 'Packages::Tag'
+ has_one :conan_metadatum, inverse_of: :package, class_name: 'Packages::Conan::Metadatum'
+ has_one :pypi_metadatum, inverse_of: :package, class_name: 'Packages::Pypi::Metadatum'
+ has_one :maven_metadatum, inverse_of: :package, class_name: 'Packages::Maven::Metadatum'
+ has_one :nuget_metadatum, inverse_of: :package, class_name: 'Packages::Nuget::Metadatum'
+ has_one :composer_metadatum, inverse_of: :package, class_name: 'Packages::Composer::Metadatum'
+ has_one :build_info, inverse_of: :package
+
+ accepts_nested_attributes_for :conan_metadatum
+ accepts_nested_attributes_for :maven_metadatum
+
+ delegate :recipe, :recipe_path, to: :conan_metadatum, prefix: :conan
+
+ validates :project, presence: true
+ validates :name, presence: true
+
+ validates :name, format: { with: Gitlab::Regex.package_name_regex }, unless: :conan?
+
+ validates :name,
+ uniqueness: { scope: %i[project_id version package_type] }, unless: :conan?
+
+ validate :valid_conan_package_recipe, if: :conan?
+ validate :valid_npm_package_name, if: :npm?
+ validate :valid_composer_global_name, if: :composer?
+ validate :package_already_taken, if: :npm?
+ validates :version, format: { with: Gitlab::Regex.semver_regex }, if: -> { npm? || nuget? }
+ validates :name, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
+ validates :version, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
+ validates :version, format: { with: Gitlab::Regex.maven_version_regex }, if: -> { version? && maven? }
+
+ enum package_type: { maven: 1, npm: 2, conan: 3, nuget: 4, pypi: 5, composer: 6 }
+
+ scope :with_name, ->(name) { where(name: name) }
+ scope :with_name_like, ->(name) { where(arel_table[:name].matches(name)) }
+ scope :search_by_name, ->(query) { fuzzy_search(query, [:name], use_minimum_char_limit: false) }
+ scope :with_version, ->(version) { where(version: version) }
+ scope :without_version_like, -> (version) { where.not(arel_table[:version].matches(version)) }
+ scope :with_package_type, ->(package_type) { where(package_type: package_type) }
+
+ scope :with_conan_channel, ->(package_channel) do
+ joins(:conan_metadatum).where(packages_conan_metadata: { package_channel: package_channel })
+ end
+ scope :with_conan_username, ->(package_username) do
+ joins(:conan_metadatum).where(packages_conan_metadata: { package_username: package_username })
+ end
+
+ scope :with_composer_target, -> (target) do
+ includes(:composer_metadatum)
+ .joins(:composer_metadatum)
+ .where(Packages::Composer::Metadatum.table_name => { target_sha: target })
+ end
+ scope :preload_composer, -> { preload(:composer_metadatum) }
+
+ scope :without_nuget_temporary_name, -> { where.not(name: Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) }
+
+ scope :has_version, -> { where.not(version: nil) }
+ scope :processed, -> do
+ where.not(package_type: :nuget).or(
+ where.not(name: Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME)
+ )
+ end
+ scope :preload_files, -> { preload(:package_files) }
+ scope :last_of_each_version, -> { where(id: all.select('MAX(id) AS id').group(:version)) }
+ scope :limit_recent, ->(limit) { order_created_desc.limit(limit) }
+ scope :select_distinct_name, -> { select(:name).distinct }
+
+ # Sorting
+ scope :order_created, -> { reorder('created_at ASC') }
+ scope :order_created_desc, -> { reorder('created_at DESC') }
+ scope :order_name, -> { reorder('name ASC') }
+ scope :order_name_desc, -> { reorder('name DESC') }
+ scope :order_version, -> { reorder('version ASC') }
+ scope :order_version_desc, -> { reorder('version DESC') }
+ scope :order_type, -> { reorder('package_type ASC') }
+ scope :order_type_desc, -> { reorder('package_type DESC') }
+ scope :order_project_name, -> { joins(:project).reorder('projects.name ASC') }
+ scope :order_project_name_desc, -> { joins(:project).reorder('projects.name DESC') }
+ scope :order_project_path, -> { joins(:project).reorder('projects.path ASC, id ASC') }
+ scope :order_project_path_desc, -> { joins(:project).reorder('projects.path DESC, id DESC') }
+
+ def self.for_projects(projects)
+ return none unless projects.any?
+
+ where(project_id: projects)
+ end
+
+ def self.only_maven_packages_with_path(path)
+ joins(:maven_metadatum).where(packages_maven_metadata: { path: path })
+ end
+
+ def self.by_name_and_file_name(name, file_name)
+ with_name(name)
+ .joins(:package_files)
+ .where(packages_package_files: { file_name: file_name }).last!
+ end
+
+ def self.by_file_name_and_sha256(file_name, sha256)
+ joins(:package_files)
+ .where(packages_package_files: { file_name: file_name, file_sha256: sha256 }).last!
+ end
+
+ def self.pluck_names
+ pluck(:name)
+ end
+
+ def self.pluck_versions
+ pluck(:version)
+ end
+
+ def self.sort_by_attribute(method)
+ case method.to_s
+ when 'created_asc' then order_created
+ when 'created_at_asc' then order_created
+ when 'name_asc' then order_name
+ when 'name_desc' then order_name_desc
+ when 'version_asc' then order_version
+ when 'version_desc' then order_version_desc
+ when 'type_asc' then order_type
+ when 'type_desc' then order_type_desc
+ when 'project_name_asc' then order_project_name
+ when 'project_name_desc' then order_project_name_desc
+ when 'project_path_asc' then order_project_path
+ when 'project_path_desc' then order_project_path_desc
+ else
+ order_created_desc
+ end
+ end
+
+ def versions
+ project.packages
+ .with_name(name)
+ .where.not(version: version)
+ .with_package_type(package_type)
+ .order(:version)
+ end
+
+ def pipeline
+ build_info&.pipeline
+ end
+
+ def tag_names
+ tags.pluck(:name)
+ end
+
+ private
+
+ def valid_conan_package_recipe
+ recipe_exists = project.packages
+ .conan
+ .includes(:conan_metadatum)
+ .with_name(name)
+ .with_version(version)
+ .with_conan_channel(conan_metadatum.package_channel)
+ .with_conan_username(conan_metadatum.package_username)
+ .id_not_in(id)
+ .exists?
+
+ errors.add(:base, _('Package recipe already exists')) if recipe_exists
+ end
+
+ def valid_composer_global_name
+ # .default_scoped is required here due to a bug in rails that leaks
+ # the scope and adds `self` to the query incorrectly
+ # See https://github.com/rails/rails/pull/35186
+ if Packages::Package.default_scoped.composer.with_name(name).where.not(project_id: project_id).exists?
+ errors.add(:name, 'is already taken by another project')
+ end
+ end
+
+ def valid_npm_package_name
+ return unless project&.root_namespace
+
+ unless name =~ %r{\A@#{project.root_namespace.path}/[^/]+\z}
+ errors.add(:name, 'is not valid')
+ end
+ end
+
+ def package_already_taken
+ return unless project
+
+ if project.package_already_taken?(name)
+ errors.add(:base, _('Package already exists'))
+ end
+ end
+end
diff --git a/app/models/packages/package_file.rb b/app/models/packages/package_file.rb
new file mode 100644
index 00000000000..9b412cd6d6a
--- /dev/null
+++ b/app/models/packages/package_file.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+class Packages::PackageFile < ApplicationRecord
+ include UpdateProjectStatistics
+
+ delegate :project, :project_id, to: :package
+ delegate :conan_file_type, to: :conan_file_metadatum
+
+ belongs_to :package
+
+ has_one :conan_file_metadatum, inverse_of: :package_file, class_name: 'Packages::Conan::FileMetadatum'
+
+ accepts_nested_attributes_for :conan_file_metadatum
+
+ validates :package, presence: true
+ validates :file, presence: true
+ validates :file_name, presence: true
+
+ scope :recent, -> { order(id: :desc) }
+ scope :with_file_name, ->(file_name) { where(file_name: file_name) }
+ scope :with_file_name_like, ->(file_name) { where(arel_table[:file_name].matches(file_name)) }
+ scope :with_files_stored_locally, -> { where(file_store: ::Packages::PackageFileUploader::Store::LOCAL) }
+ scope :preload_conan_file_metadata, -> { preload(:conan_file_metadatum) }
+
+ scope :with_conan_file_type, ->(file_type) do
+ joins(:conan_file_metadatum)
+ .where(packages_conan_file_metadata: { conan_file_type: ::Packages::Conan::FileMetadatum.conan_file_types[file_type] })
+ end
+
+ scope :with_conan_package_reference, ->(conan_package_reference) do
+ joins(:conan_file_metadatum)
+ .where(packages_conan_file_metadata: { conan_package_reference: conan_package_reference })
+ end
+
+ mount_uploader :file, Packages::PackageFileUploader
+
+ after_save :update_file_metadata, if: :saved_change_to_file?
+
+ update_project_statistics project_statistics_name: :packages_size
+
+ def update_file_metadata
+ # The file.object_store is set during `uploader.store!`
+ # which happens after object is inserted/updated
+ self.update_column(:file_store, file.object_store)
+ self.update_column(:size, file.size) unless file.size == self.size
+ end
+
+ def download_path
+ Gitlab::Routing.url_helpers.download_project_package_file_path(project, self)
+ end
+
+ def local?
+ file_store == ::Packages::PackageFileUploader::Store::LOCAL
+ end
+end
+
+Packages::PackageFile.prepend_if_ee('EE::Packages::PackageFileGeo')
diff --git a/app/models/packages/pypi.rb b/app/models/packages/pypi.rb
new file mode 100644
index 00000000000..fc8a55caa31
--- /dev/null
+++ b/app/models/packages/pypi.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+module Packages
+ module Pypi
+ def self.table_name_prefix
+ 'packages_pypi_'
+ end
+ end
+end
diff --git a/app/models/packages/pypi/metadatum.rb b/app/models/packages/pypi/metadatum.rb
new file mode 100644
index 00000000000..7e6456ad964
--- /dev/null
+++ b/app/models/packages/pypi/metadatum.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class Packages::Pypi::Metadatum < ApplicationRecord
+ self.primary_key = :package_id
+
+ belongs_to :package, -> { where(package_type: :pypi) }, inverse_of: :pypi_metadatum
+
+ validates :package, presence: true
+
+ validate :pypi_package_type
+
+ private
+
+ def pypi_package_type
+ unless package&.pypi?
+ errors.add(:base, _('Package type must be PyPi'))
+ end
+ end
+end
diff --git a/app/models/packages/sem_ver.rb b/app/models/packages/sem_ver.rb
new file mode 100644
index 00000000000..b73d51b08b7
--- /dev/null
+++ b/app/models/packages/sem_ver.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+class Packages::SemVer
+ attr_accessor :major, :minor, :patch, :prerelease, :build
+
+ def initialize(major = 0, minor = 0, patch = 0, prerelease = nil, build = nil, prefixed: false)
+ @major = major
+ @minor = minor
+ @patch = patch
+ @prerelease = prerelease
+ @build = build
+ @prefixed = prefixed
+ end
+
+ def prefixed?
+ @prefixed
+ end
+
+ def ==(other)
+ self.class == other.class &&
+ self.major == other.major &&
+ self.minor == other.minor &&
+ self.patch == other.patch &&
+ self.prerelease == other.prerelease &&
+ self.build == other.build
+ end
+
+ def to_s
+ s = "#{prefixed? ? 'v' : ''}#{major || 0}.#{minor || 0}.#{patch || 0}"
+ s += "-#{prerelease}" if prerelease
+ s += "+#{build}" if build
+
+ s
+ end
+
+ def self.match(str, prefixed: false)
+ return unless str&.start_with?('v') == prefixed
+
+ str = str[1..] if prefixed
+
+ Gitlab::Regex.semver_regex.match(str)
+ end
+
+ def self.match?(str, prefixed: false)
+ !match(str, prefixed: prefixed).nil?
+ end
+
+ def self.parse(str, prefixed: false)
+ m = match str, prefixed: prefixed
+ return unless m
+
+ new(m[1].to_i, m[2].to_i, m[3].to_i, m[4], m[5], prefixed: prefixed)
+ end
+end
diff --git a/app/models/packages/tag.rb b/app/models/packages/tag.rb
new file mode 100644
index 00000000000..771d016daed
--- /dev/null
+++ b/app/models/packages/tag.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+class Packages::Tag < ApplicationRecord
+ belongs_to :package, inverse_of: :tags
+
+ validates :package, :name, presence: true
+
+ FOR_PACKAGES_TAGS_LIMIT = 200.freeze
+ NUGET_TAGS_SEPARATOR = ' ' # https://docs.microsoft.com/en-us/nuget/reference/nuspec#tags
+
+ scope :preload_package, -> { preload(:package) }
+ scope :with_name, -> (name) { where(name: name) }
+
+ def self.for_packages(packages)
+ where(package_id: packages.select(:id))
+ .order(updated_at: :desc)
+ .limit(FOR_PACKAGES_TAGS_LIMIT)
+ end
+end
diff --git a/app/models/project.rb b/app/models/project.rb
index 9b1e453216b..d51b5bc7b34 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -190,6 +190,10 @@ class Project < ApplicationRecord
has_many :forks, through: :forked_to_members, source: :project, inverse_of: :forked_from_project
has_many :fork_network_projects, through: :fork_network, source: :projects
+ # Packages
+ has_many :packages, class_name: 'Packages::Package'
+ has_many :package_files, through: :packages, class_name: 'Packages::PackageFile'
+
has_one :import_state, autosave: true, class_name: 'ProjectImportState', inverse_of: :project
has_one :import_export_upload, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :export_jobs, class_name: 'ProjectExportJob'
@@ -1700,10 +1704,10 @@ class Project < ApplicationRecord
def pages_url
url = pages_group_url
- url_path = full_path.partition('/').last.downcase
+ url_path = full_path.partition('/').last
# If the project path is the same as host, we serve it as group page
- return url if url == "#{Settings.pages.protocol}://#{url_path}"
+ return url if url == "#{Settings.pages.protocol}://#{url_path}".downcase
"#{url}/#{url_path}"
end
@@ -2421,6 +2425,22 @@ class Project < ApplicationRecord
end
alias_method :service_desk_enabled?, :service_desk_enabled
+ def root_namespace
+ if namespace.has_parent?
+ namespace.root_ancestor
+ else
+ namespace
+ end
+ end
+
+ def package_already_taken?(package_name)
+ namespace.root_ancestor.all_projects
+ .joins(:packages)
+ .where.not(id: id)
+ .merge(Packages::Package.with_name(package_name))
+ .exists?
+ end
+
private
def find_service(services, name)
diff --git a/app/models/prometheus_metric.rb b/app/models/prometheus_metric.rb
index 571b586056b..bfd23d2a334 100644
--- a/app/models/prometheus_metric.rb
+++ b/app/models/prometheus_metric.rb
@@ -11,6 +11,7 @@ class PrometheusMetric < ApplicationRecord
validates :group, presence: true
validates :y_label, presence: true
validates :unit, presence: true
+ validates :identifier, uniqueness: { scope: :project_id }, allow_nil: true
validates :project, presence: true, unless: :common?
validates :project, absence: true, if: :common?
diff --git a/app/presenters/clusterable_presenter.rb b/app/presenters/clusterable_presenter.rb
index 4eb0e244e54..6d21ae8a4f8 100644
--- a/app/presenters/clusterable_presenter.rb
+++ b/app/presenters/clusterable_presenter.rb
@@ -13,8 +13,7 @@ class ClusterablePresenter < Gitlab::View::Presenter::Delegated
end
def can_add_cluster?
- can?(current_user, :add_cluster, clusterable) &&
- (has_no_clusters? || multiple_clusters_available?)
+ can?(current_user, :add_cluster, clusterable)
end
def can_create_cluster?
@@ -81,17 +80,6 @@ class ClusterablePresenter < Gitlab::View::Presenter::Delegated
def learn_more_link
raise NotImplementedError
end
-
- private
-
- # Overridden on EE module
- def multiple_clusters_available?
- false
- end
-
- def has_no_clusters?
- clusterable.clusters.empty?
- end
end
ClusterablePresenter.prepend_if_ee('EE::ClusterablePresenter')
diff --git a/app/services/clusters/create_service.rb b/app/services/clusters/create_service.rb
index 7b5bf6b32c2..6693a58683f 100644
--- a/app/services/clusters/create_service.rb
+++ b/app/services/clusters/create_service.rb
@@ -19,10 +19,6 @@ module Clusters
cluster = Clusters::Cluster.new(cluster_params)
- unless can_create_cluster?
- cluster.errors.add(:base, _('Instance does not support multiple Kubernetes clusters'))
- end
-
validate_management_project_permissions(cluster)
return cluster if cluster.errors.present?
@@ -55,16 +51,9 @@ module Clusters
end
end
- # EE would override this method
- def can_create_cluster?
- clusterable.clusters.empty?
- end
-
def validate_management_project_permissions(cluster)
Clusters::Management::ValidateManagementProjectPermissionsService.new(current_user)
.execute(cluster, params[:management_project_id])
end
end
end
-
-Clusters::CreateService.prepend_if_ee('EE::Clusters::CreateService')
diff --git a/app/services/metrics/dashboard/clone_dashboard_service.rb b/app/services/metrics/dashboard/clone_dashboard_service.rb
index 739f2b9531a..a6bece391f2 100644
--- a/app/services/metrics/dashboard/clone_dashboard_service.rb
+++ b/app/services/metrics/dashboard/clone_dashboard_service.rb
@@ -10,29 +10,29 @@ module Metrics
ALLOWED_FILE_TYPE = '.yml'
USER_DASHBOARDS_DIR = ::Metrics::Dashboard::CustomDashboardService::DASHBOARD_ROOT
+ SEQUENCES = {
+ ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH => [
+ ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::Sorter
+ ].freeze,
+
+ ::Metrics::Dashboard::SelfMonitoringDashboardService::DASHBOARD_PATH => [
+ ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter
+ ].freeze,
+
+ ::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH => [
+ ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::Sorter
+ ].freeze
+ }.freeze
steps :check_push_authorized,
- :check_branch_name,
- :check_file_type,
- :check_dashboard_template,
- :create_file,
- :refresh_repository_method_caches
-
- class << self
- def sequences
- @sequences ||= {
- ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH => [
- ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::Sorter
- ].freeze,
-
- ::Metrics::Dashboard::SelfMonitoringDashboardService::DASHBOARD_PATH => [
- ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter
- ].freeze
- }.freeze
- end
- end
+ :check_branch_name,
+ :check_file_type,
+ :check_dashboard_template,
+ :create_file,
+ :refresh_repository_method_caches
def execute
execute_steps
@@ -173,10 +173,8 @@ module Metrics
end
def sequence
- self.class.sequences[dashboard_template] || []
+ SEQUENCES[dashboard_template] || []
end
end
end
end
-
-Metrics::Dashboard::CloneDashboardService.prepend_if_ee('EE::Metrics::Dashboard::CloneDashboardService')
diff --git a/app/services/metrics/dashboard/cluster_dashboard_service.rb b/app/services/metrics/dashboard/cluster_dashboard_service.rb
new file mode 100644
index 00000000000..00ac1d6fb23
--- /dev/null
+++ b/app/services/metrics/dashboard/cluster_dashboard_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+# Fetches the system metrics dashboard and formats the output.
+# Use Gitlab::Metrics::Dashboard::Finder to retrive dashboards.
+module Metrics
+ module Dashboard
+ class ClusterDashboardService < ::Metrics::Dashboard::PredefinedDashboardService
+ DASHBOARD_PATH = 'config/prometheus/cluster_metrics.yml'
+ DASHBOARD_NAME = 'Cluster'
+
+ SEQUENCE = [
+ STAGES::ClusterEndpointInserter,
+ STAGES::PanelIdsInserter,
+ STAGES::Sorter
+ ].freeze
+
+ class << self
+ def valid_params?(params)
+ # support selecting this service by cluster id via .find
+ # Use super to support selecting this service by dashboard_path via .find_raw
+ (params[:cluster].present? && params[:embedded] != 'true') || super
+ end
+ end
+
+ # Permissions are handled at the controller level
+ def allowed?
+ true
+ end
+ end
+ end
+end
diff --git a/app/services/metrics/dashboard/cluster_metrics_embed_service.rb b/app/services/metrics/dashboard/cluster_metrics_embed_service.rb
new file mode 100644
index 00000000000..6fb39ed3004
--- /dev/null
+++ b/app/services/metrics/dashboard/cluster_metrics_embed_service.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+#
+module Metrics
+ module Dashboard
+ class ClusterMetricsEmbedService < Metrics::Dashboard::DynamicEmbedService
+ class << self
+ def valid_params?(params)
+ [
+ params[:cluster],
+ embedded?(params[:embedded]),
+ params[:group].present?,
+ params[:title].present?,
+ params[:y_label].present?
+ ].all?
+ end
+ end
+
+ private
+
+ # Permissions are handled at the controller level
+ def allowed?
+ true
+ end
+
+ def dashboard_path
+ ::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH
+ end
+
+ def sequence
+ [
+ STAGES::ClusterEndpointInserter,
+ STAGES::PanelIdsInserter
+ ]
+ end
+ end
+ end
+end
diff --git a/app/services/packages/conan/create_package_file_service.rb b/app/services/packages/conan/create_package_file_service.rb
new file mode 100644
index 00000000000..2db5c4e507b
--- /dev/null
+++ b/app/services/packages/conan/create_package_file_service.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+module Packages
+ module Conan
+ class CreatePackageFileService
+ attr_reader :package, :file, :params
+
+ def initialize(package, file, params)
+ @package = package
+ @file = file
+ @params = params
+ end
+
+ def execute
+ package.package_files.create!(
+ file: file,
+ size: params['file.size'],
+ file_name: params[:file_name],
+ file_sha1: params['file.sha1'],
+ file_md5: params['file.md5'],
+ conan_file_metadatum_attributes: {
+ recipe_revision: params[:recipe_revision],
+ package_revision: params[:package_revision],
+ conan_package_reference: params[:conan_package_reference],
+ conan_file_type: params[:conan_file_type]
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/packages/conan/create_package_service.rb b/app/services/packages/conan/create_package_service.rb
new file mode 100644
index 00000000000..22a0436c5fb
--- /dev/null
+++ b/app/services/packages/conan/create_package_service.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Packages
+ module Conan
+ class CreatePackageService < BaseService
+ def execute
+ project.packages.create!(
+ name: params[:package_name],
+ version: params[:package_version],
+ package_type: :conan,
+ conan_metadatum_attributes: {
+ package_username: params[:package_username],
+ package_channel: params[:package_channel]
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/app/services/packages/conan/search_service.rb b/app/services/packages/conan/search_service.rb
new file mode 100644
index 00000000000..4513616bad2
--- /dev/null
+++ b/app/services/packages/conan/search_service.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module Packages
+ module Conan
+ class SearchService < BaseService
+ include ActiveRecord::Sanitization::ClassMethods
+
+ WILDCARD = '*'
+ RECIPE_SEPARATOR = '@'
+
+ def initialize(user, params)
+ super(nil, user, params)
+ end
+
+ def execute
+ ServiceResponse.success(payload: { results: search_results })
+ end
+
+ private
+
+ def search_results
+ return [] if wildcard_query?
+
+ return search_for_single_package(sanitized_query) if params[:query].include?(RECIPE_SEPARATOR)
+
+ search_packages(build_query)
+ end
+
+ def wildcard_query?
+ params[:query] == WILDCARD
+ end
+
+ def build_query
+ return "#{sanitized_query}%" if params[:query].end_with?(WILDCARD)
+
+ sanitized_query
+ end
+
+ def search_packages(query)
+ ::Packages::Conan::PackageFinder.new(current_user, query: query).execute.map(&:conan_recipe)
+ end
+
+ def search_for_single_package(query)
+ name, version, username, _ = query.split(/[@\/]/)
+ full_path = Packages::Conan::Metadatum.full_path_from(package_username: username)
+ project = Project.find_by_full_path(full_path)
+ return unless current_user.can?(:read_package, project)
+
+ result = project.packages.with_name(name).with_version(version).order_created.last
+ [result&.conan_recipe].compact
+ end
+
+ def sanitized_query
+ @sanitized_query ||= sanitize_sql_like(params[:query].delete(WILDCARD))
+ end
+ end
+ end
+end
diff --git a/app/services/packages/create_dependency_service.rb b/app/services/packages/create_dependency_service.rb
new file mode 100644
index 00000000000..2999885d55d
--- /dev/null
+++ b/app/services/packages/create_dependency_service.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+module Packages
+ class CreateDependencyService < BaseService
+ attr_reader :package, :dependencies
+
+ def initialize(package, dependencies)
+ @package = package
+ @dependencies = dependencies
+ end
+
+ def execute
+ Packages::DependencyLink.dependency_types.each_key do |type|
+ create_dependency(type)
+ end
+ end
+
+ private
+
+ def create_dependency(type)
+ return unless dependencies[type].is_a?(Hash)
+
+ names_and_version_patterns = dependencies[type]
+ existing_ids, existing_names = find_existing_ids_and_names(names_and_version_patterns)
+ dependencies_to_insert = names_and_version_patterns
+
+ if existing_names.any?
+ dependencies_to_insert = names_and_version_patterns.reject { |k, _| k.in?(existing_names) }
+ end
+
+ ActiveRecord::Base.transaction do
+ inserted_ids = bulk_insert_package_dependencies(dependencies_to_insert)
+ bulk_insert_package_dependency_links(type, (existing_ids + inserted_ids))
+ end
+ end
+
+ def find_existing_ids_and_names(names_and_version_patterns)
+ ids_and_names = Packages::Dependency.for_package_names_and_version_patterns(names_and_version_patterns)
+ .pluck_ids_and_names
+ ids = ids_and_names.map(&:first) || []
+ names = ids_and_names.map(&:second) || []
+ [ids, names]
+ end
+
+ def bulk_insert_package_dependencies(names_and_version_patterns)
+ return [] if names_and_version_patterns.empty?
+
+ rows = names_and_version_patterns.map do |name, version_pattern|
+ {
+ name: name,
+ version_pattern: version_pattern
+ }
+ end
+
+ ids = database.bulk_insert(Packages::Dependency.table_name, rows, return_ids: true, on_conflict: :do_nothing)
+ return ids if ids.size == names_and_version_patterns.size
+
+ Packages::Dependency.uncached do
+ # The bulk_insert statement above do not dirty the query cache. To make
+ # sure that the results are fresh from the database and not from a stalled
+ # and potentially wrong cache, this query has to be done with the query
+ # chache disabled.
+ Packages::Dependency.ids_for_package_names_and_version_patterns(names_and_version_patterns)
+ end
+ end
+
+ def bulk_insert_package_dependency_links(type, dependency_ids)
+ rows = dependency_ids.map do |dependency_id|
+ {
+ package_id: package.id,
+ dependency_id: dependency_id,
+ dependency_type: Packages::DependencyLink.dependency_types[type.to_s]
+ }
+ end
+
+ database.bulk_insert(Packages::DependencyLink.table_name, rows)
+ end
+
+ def database
+ ::Gitlab::Database
+ end
+ end
+end
diff --git a/app/services/packages/create_package_file_service.rb b/app/services/packages/create_package_file_service.rb
new file mode 100644
index 00000000000..0ebceeee779
--- /dev/null
+++ b/app/services/packages/create_package_file_service.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+module Packages
+ class CreatePackageFileService
+ attr_reader :package, :params
+
+ def initialize(package, params)
+ @package = package
+ @params = params
+ end
+
+ def execute
+ package.package_files.create!(
+ file: params[:file],
+ size: params[:size],
+ file_name: params[:file_name],
+ file_sha1: params[:file_sha1],
+ file_sha256: params[:file_sha256],
+ file_md5: params[:file_md5]
+ )
+ end
+ end
+end
diff --git a/app/services/packages/maven/create_package_service.rb b/app/services/packages/maven/create_package_service.rb
new file mode 100644
index 00000000000..aca5d28ca98
--- /dev/null
+++ b/app/services/packages/maven/create_package_service.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+module Packages
+ module Maven
+ class CreatePackageService < BaseService
+ def execute
+ app_group, _, app_name = params[:name].rpartition('/')
+ app_group.tr!('/', '.')
+
+ package = project.packages.create!(
+ name: params[:name],
+ version: params[:version],
+ package_type: :maven,
+ maven_metadatum_attributes: {
+ path: params[:path],
+ app_group: app_group,
+ app_name: app_name,
+ app_version: params[:version]
+ }
+ )
+
+ build = params[:build]
+ package.create_build_info!(pipeline: build.pipeline) if build.present?
+
+ package
+ end
+ end
+ end
+end
diff --git a/app/services/packages/maven/find_or_create_package_service.rb b/app/services/packages/maven/find_or_create_package_service.rb
new file mode 100644
index 00000000000..50a008843ad
--- /dev/null
+++ b/app/services/packages/maven/find_or_create_package_service.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+module Packages
+ module Maven
+ class FindOrCreatePackageService < BaseService
+ MAVEN_METADATA_FILE = 'maven-metadata.xml'.freeze
+
+ def execute
+ package = ::Packages::Maven::PackageFinder
+ .new(params[:path], current_user, project: project).execute
+
+ unless package
+ if params[:file_name] == MAVEN_METADATA_FILE
+ # Maven uploads several files during `mvn deploy` in next order:
+ # - my-company/my-app/1.0-SNAPSHOT/my-app.jar
+ # - my-company/my-app/1.0-SNAPSHOT/my-app.pom
+ # - my-company/my-app/1.0-SNAPSHOT/maven-metadata.xml
+ # - my-company/my-app/maven-metadata.xml
+ #
+ # The last xml file does not have VERSION in URL because it contains
+ # information about all versions.
+ package_name, version = params[:path], nil
+ else
+ package_name, _, version = params[:path].rpartition('/')
+ end
+
+ package_params = {
+ name: package_name,
+ path: params[:path],
+ version: version,
+ build: params[:build]
+ }
+
+ package = ::Packages::Maven::CreatePackageService
+ .new(project, current_user, package_params).execute
+ end
+
+ package
+ end
+ end
+ end
+end
diff --git a/app/services/packages/npm/create_package_service.rb b/app/services/packages/npm/create_package_service.rb
new file mode 100644
index 00000000000..cf927683ce9
--- /dev/null
+++ b/app/services/packages/npm/create_package_service.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+module Packages
+ module Npm
+ class CreatePackageService < BaseService
+ include Gitlab::Utils::StrongMemoize
+
+ def execute
+ return error('Version is empty.', 400) if version.blank?
+ return error('Package already exists.', 403) if current_package_exists?
+
+ ActiveRecord::Base.transaction { create_package! }
+ end
+
+ private
+
+ def create_package!
+ package = project.packages.create!(
+ name: name,
+ version: version,
+ package_type: 'npm'
+ )
+
+ if build.present?
+ package.create_build_info!(pipeline: build.pipeline)
+ end
+
+ ::Packages::CreatePackageFileService.new(package, file_params).execute
+ ::Packages::CreateDependencyService.new(package, package_dependencies).execute
+ ::Packages::Npm::CreateTagService.new(package, dist_tag).execute
+
+ package
+ end
+
+ def current_package_exists?
+ project.packages
+ .npm
+ .with_name(name)
+ .with_version(version)
+ .exists?
+ end
+
+ def name
+ params[:name]
+ end
+
+ def version
+ strong_memoize(:version) do
+ params[:versions].each_key.first
+ end
+ end
+
+ def version_data
+ params[:versions][version]
+ end
+
+ def build
+ params[:build]
+ end
+
+ def dist_tag
+ params['dist-tags'].each_key.first
+ end
+
+ def package_file_name
+ strong_memoize(:package_file_name) do
+ "#{name}-#{version}.tgz"
+ end
+ end
+
+ def attachment
+ strong_memoize(:attachment) do
+ params['_attachments'][package_file_name]
+ end
+ end
+
+ def file_params
+ {
+ file: CarrierWaveStringFile.new(Base64.decode64(attachment['data'])),
+ size: attachment['length'],
+ file_sha1: version_data[:dist][:shasum],
+ file_name: package_file_name
+ }
+ end
+
+ def package_dependencies
+ _version, versions_data = params[:versions].first
+ versions_data
+ end
+ end
+ end
+end
diff --git a/app/services/packages/npm/create_tag_service.rb b/app/services/packages/npm/create_tag_service.rb
new file mode 100644
index 00000000000..82974d0ca4b
--- /dev/null
+++ b/app/services/packages/npm/create_tag_service.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+module Packages
+ module Npm
+ class CreateTagService
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :package, :tag_name
+
+ def initialize(package, tag_name)
+ @package = package
+ @tag_name = tag_name
+ end
+
+ def execute
+ if existing_tag.present?
+ existing_tag.update_column(:package_id, package.id)
+ existing_tag
+ else
+ package.tags.create!(name: tag_name)
+ end
+ end
+
+ private
+
+ def existing_tag
+ strong_memoize(:existing_tag) do
+ Packages::TagsFinder
+ .new(package.project, package.name, package_type: package.package_type)
+ .find_by_name(tag_name)
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/nuget/create_dependency_service.rb b/app/services/packages/nuget/create_dependency_service.rb
new file mode 100644
index 00000000000..2be5db732f6
--- /dev/null
+++ b/app/services/packages/nuget/create_dependency_service.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+module Packages
+ module Nuget
+ class CreateDependencyService < BaseService
+ def initialize(package, dependencies = [])
+ @package = package
+ @dependencies = dependencies
+ end
+
+ def execute
+ return if @dependencies.empty?
+
+ @package.transaction do
+ create_dependency_links
+ create_dependency_link_metadata
+ end
+ end
+
+ private
+
+ def create_dependency_links
+ ::Packages::CreateDependencyService
+ .new(@package, dependencies_for_create_dependency_service)
+ .execute
+ end
+
+ def create_dependency_link_metadata
+ inserted_links = ::Packages::DependencyLink.preload_dependency
+ .for_package(@package)
+
+ return if inserted_links.empty?
+
+ rows = inserted_links.map do |dependency_link|
+ raw_dependency = raw_dependency_for(dependency_link.dependency)
+
+ next if raw_dependency[:target_framework].blank?
+
+ {
+ dependency_link_id: dependency_link.id,
+ target_framework: raw_dependency[:target_framework]
+ }
+ end
+
+ ::Gitlab::Database.bulk_insert(::Packages::Nuget::DependencyLinkMetadatum.table_name, rows.compact)
+ end
+
+ def raw_dependency_for(dependency)
+ name = dependency.name
+ version = dependency.version_pattern.presence
+
+ @dependencies.find do |raw_dependency|
+ raw_dependency[:name] == name && raw_dependency[:version] == version
+ end
+ end
+
+ def dependencies_for_create_dependency_service
+ names_and_versions = @dependencies.map do |dependency|
+ [dependency[:name], version_or_empty_string(dependency[:version])]
+ end.to_h
+
+ { 'dependencies' => names_and_versions }
+ end
+
+ def version_or_empty_string(version)
+ return '' if version.blank?
+
+ version
+ end
+ end
+ end
+end
diff --git a/app/services/packages/nuget/create_package_service.rb b/app/services/packages/nuget/create_package_service.rb
new file mode 100644
index 00000000000..68ad7f028e4
--- /dev/null
+++ b/app/services/packages/nuget/create_package_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Packages
+ module Nuget
+ class CreatePackageService < BaseService
+ TEMPORARY_PACKAGE_NAME = 'NuGet.Temporary.Package'
+ PACKAGE_VERSION = '0.0.0'
+
+ def execute
+ project.packages.nuget.create!(
+ name: TEMPORARY_PACKAGE_NAME,
+ version: "#{PACKAGE_VERSION}-#{uuid}"
+ )
+ end
+
+ private
+
+ def uuid
+ SecureRandom.uuid
+ end
+ end
+ end
+end
diff --git a/app/services/packages/nuget/metadata_extraction_service.rb b/app/services/packages/nuget/metadata_extraction_service.rb
new file mode 100644
index 00000000000..6fec398fab0
--- /dev/null
+++ b/app/services/packages/nuget/metadata_extraction_service.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+module Packages
+ module Nuget
+ class MetadataExtractionService
+ include Gitlab::Utils::StrongMemoize
+
+ ExtractionError = Class.new(StandardError)
+
+ XPATHS = {
+ package_name: '//xmlns:package/xmlns:metadata/xmlns:id',
+ package_version: '//xmlns:package/xmlns:metadata/xmlns:version',
+ license_url: '//xmlns:package/xmlns:metadata/xmlns:licenseUrl',
+ project_url: '//xmlns:package/xmlns:metadata/xmlns:projectUrl',
+ icon_url: '//xmlns:package/xmlns:metadata/xmlns:iconUrl'
+ }.freeze
+
+ XPATH_DEPENDENCIES = '//xmlns:package/xmlns:metadata/xmlns:dependencies/xmlns:dependency'
+ XPATH_DEPENDENCY_GROUPS = '//xmlns:package/xmlns:metadata/xmlns:dependencies/xmlns:group'
+ XPATH_TAGS = '//xmlns:package/xmlns:metadata/xmlns:tags'
+
+ MAX_FILE_SIZE = 4.megabytes.freeze
+
+ def initialize(package_file_id)
+ @package_file_id = package_file_id
+ end
+
+ def execute
+ raise ExtractionError.new('invalid package file') unless valid_package_file?
+
+ extract_metadata(nuspec_file)
+ end
+
+ private
+
+ def package_file
+ strong_memoize(:package_file) do
+ ::Packages::PackageFile.find_by_id(@package_file_id)
+ end
+ end
+
+ def valid_package_file?
+ package_file &&
+ package_file.package&.nuget? &&
+ package_file.file.size.positive?
+ end
+
+ def extract_metadata(file)
+ doc = Nokogiri::XML(file)
+
+ XPATHS.transform_values { |query| doc.xpath(query).text.presence }
+ .compact
+ .tap do |metadata|
+ metadata[:package_dependencies] = extract_dependencies(doc)
+ metadata[:package_tags] = extract_tags(doc)
+ end
+ end
+
+ def extract_dependencies(doc)
+ dependencies = []
+
+ doc.xpath(XPATH_DEPENDENCIES).each do |node|
+ dependencies << extract_dependency(node)
+ end
+
+ doc.xpath(XPATH_DEPENDENCY_GROUPS).each do |group_node|
+ target_framework = group_node.attr("targetFramework")
+
+ group_node.xpath("xmlns:dependency").each do |node|
+ dependencies << extract_dependency(node).merge(target_framework: target_framework)
+ end
+ end
+
+ dependencies
+ end
+
+ def extract_dependency(node)
+ {
+ name: node.attr('id'),
+ version: node.attr('version')
+ }.compact
+ end
+
+ def extract_tags(doc)
+ tags = doc.xpath(XPATH_TAGS).text
+
+ return [] if tags.blank?
+
+ tags.split(::Packages::Tag::NUGET_TAGS_SEPARATOR)
+ end
+
+ def nuspec_file
+ package_file.file.use_file do |file_path|
+ Zip::File.open(file_path) do |zip_file|
+ entry = zip_file.glob('*.nuspec').first
+
+ raise ExtractionError.new('nuspec file not found') unless entry
+ raise ExtractionError.new('nuspec file too big') if entry.size > MAX_FILE_SIZE
+
+ entry.get_input_stream.read
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/app/services/packages/nuget/search_service.rb b/app/services/packages/nuget/search_service.rb
new file mode 100644
index 00000000000..f7e09e11819
--- /dev/null
+++ b/app/services/packages/nuget/search_service.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+module Packages
+ module Nuget
+ class SearchService < BaseService
+ include Gitlab::Utils::StrongMemoize
+ include ActiveRecord::ConnectionAdapters::Quoting
+
+ MAX_PER_PAGE = 30
+ MAX_VERSIONS_PER_PACKAGE = 10
+ PRE_RELEASE_VERSION_MATCHING_TERM = '%-%'
+
+ DEFAULT_OPTIONS = {
+ include_prerelease_versions: true,
+ per_page: Kaminari.config.default_per_page,
+ padding: 0
+ }.freeze
+
+ def initialize(project, search_term, options = {})
+ @project = project
+ @search_term = search_term
+ @options = DEFAULT_OPTIONS.merge(options)
+
+ raise ArgumentError, 'negative per_page' if per_page.negative?
+ raise ArgumentError, 'negative padding' if padding.negative?
+ end
+
+ def execute
+ OpenStruct.new(
+ total_count: package_names.total_count,
+ results: search_packages
+ )
+ end
+
+ private
+
+ def search_packages
+ # custom query to get package names and versions as expected from the nuget search api
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24182#technical-notes
+ # and https://docs.microsoft.com/en-us/nuget/api/search-query-service-resource
+ subquery_name = :partition_subquery
+ arel_table = Arel::Table.new(:partition_subquery)
+ column_names = Packages::Package.column_names.map do |cn|
+ "#{subquery_name}.#{quote_column_name(cn)}"
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ pkgs = Packages::Package.select(column_names.join(','))
+ .from(package_names_partition, subquery_name)
+ .where(arel_table[:row_number].lteq(MAX_VERSIONS_PER_PACKAGE))
+
+ return pkgs if include_prerelease_versions?
+
+ # we can't use pkgs.without_version_like since we have a custom from
+ pkgs.where.not(arel_table[:version].matches(PRE_RELEASE_VERSION_MATCHING_TERM))
+ end
+
+ def package_names_partition
+ table_name = quote_table_name(Packages::Package.table_name)
+ name_column = "#{table_name}.#{quote_column_name('name')}"
+ created_at_column = "#{table_name}.#{quote_column_name('created_at')}"
+ select_sql = "ROW_NUMBER() OVER (PARTITION BY #{name_column} ORDER BY #{created_at_column} DESC) AS row_number, #{table_name}.*"
+
+ @project.packages
+ .select(select_sql)
+ .nuget
+ .has_version
+ .without_nuget_temporary_name
+ .with_name(package_names)
+ end
+
+ def package_names
+ strong_memoize(:package_names) do
+ pkgs = @project.packages
+ .nuget
+ .has_version
+ .without_nuget_temporary_name
+ .order_name
+ .select_distinct_name
+ pkgs = pkgs.without_version_like(PRE_RELEASE_VERSION_MATCHING_TERM) unless include_prerelease_versions?
+ pkgs = pkgs.search_by_name(@search_term) if @search_term.present?
+ pkgs.page(0) # we're using a padding
+ .per(per_page)
+ .padding(padding)
+ end
+ end
+
+ def include_prerelease_versions?
+ @options[:include_prerelease_versions]
+ end
+
+ def padding
+ @options[:padding]
+ end
+
+ def per_page
+ [@options[:per_page], MAX_PER_PAGE].min
+ end
+ end
+ end
+end
diff --git a/app/services/packages/nuget/sync_metadatum_service.rb b/app/services/packages/nuget/sync_metadatum_service.rb
new file mode 100644
index 00000000000..ca9cc4d5b78
--- /dev/null
+++ b/app/services/packages/nuget/sync_metadatum_service.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+module Packages
+ module Nuget
+ class SyncMetadatumService
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(package, metadata)
+ @package = package
+ @metadata = metadata
+ end
+
+ def execute
+ if blank_metadata?
+ metadatum.destroy! if metadatum.persisted?
+ else
+ metadatum.update!(
+ license_url: license_url,
+ project_url: project_url,
+ icon_url: icon_url
+ )
+ end
+ end
+
+ private
+
+ def metadatum
+ strong_memoize(:metadatum) do
+ @package.nuget_metadatum || @package.build_nuget_metadatum
+ end
+ end
+
+ def blank_metadata?
+ project_url.blank? && license_url.blank? && icon_url.blank?
+ end
+
+ def project_url
+ @metadata[:project_url]
+ end
+
+ def license_url
+ @metadata[:license_url]
+ end
+
+ def icon_url
+ @metadata[:icon_url]
+ end
+ end
+ end
+end
diff --git a/app/services/packages/nuget/update_package_from_metadata_service.rb b/app/services/packages/nuget/update_package_from_metadata_service.rb
new file mode 100644
index 00000000000..f72b1386985
--- /dev/null
+++ b/app/services/packages/nuget/update_package_from_metadata_service.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+module Packages
+ module Nuget
+ class UpdatePackageFromMetadataService
+ include Gitlab::Utils::StrongMemoize
+ include ExclusiveLeaseGuard
+
+ # used by ExclusiveLeaseGuard
+ DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
+
+ InvalidMetadataError = Class.new(StandardError)
+
+ def initialize(package_file)
+ @package_file = package_file
+ end
+
+ def execute
+ raise InvalidMetadataError.new('package name and/or package version not found in metadata') unless valid_metadata?
+
+ try_obtain_lease do
+ @package_file.transaction do
+ package = existing_package ? link_to_existing_package : update_linked_package
+
+ update_package(package)
+
+ # Updating file_name updates the path where the file is stored.
+ # We must pass the file again so that CarrierWave can handle the update
+ @package_file.update!(
+ file_name: package_filename,
+ file: @package_file.file
+ )
+ end
+ end
+ end
+
+ private
+
+ def update_package(package)
+ ::Packages::Nuget::SyncMetadatumService
+ .new(package, metadata.slice(:project_url, :license_url, :icon_url))
+ .execute
+ ::Packages::UpdateTagsService
+ .new(package, package_tags)
+ .execute
+ rescue => e
+ raise InvalidMetadataError, e.message
+ end
+
+ def valid_metadata?
+ package_name.present? && package_version.present?
+ end
+
+ def link_to_existing_package
+ package_to_destroy = @package_file.package
+ # Updating package_id updates the path where the file is stored.
+ # We must pass the file again so that CarrierWave can handle the update
+ @package_file.update!(
+ package_id: existing_package.id,
+ file: @package_file.file
+ )
+ package_to_destroy.destroy!
+ existing_package
+ end
+
+ def update_linked_package
+ @package_file.package.update!(
+ name: package_name,
+ version: package_version
+ )
+
+ ::Packages::Nuget::CreateDependencyService.new(@package_file.package, package_dependencies)
+ .execute
+ @package_file.package
+ end
+
+ def existing_package
+ strong_memoize(:existing_package) do
+ @package_file.project.packages
+ .nuget
+ .with_name(package_name)
+ .with_version(package_version)
+ .first
+ end
+ end
+
+ def package_name
+ metadata[:package_name]
+ end
+
+ def package_version
+ metadata[:package_version]
+ end
+
+ def package_dependencies
+ metadata.fetch(:package_dependencies, [])
+ end
+
+ def package_tags
+ metadata.fetch(:package_tags, [])
+ end
+
+ def metadata
+ strong_memoize(:metadata) do
+ ::Packages::Nuget::MetadataExtractionService.new(@package_file.id).execute
+ end
+ end
+
+ def package_filename
+ "#{package_name.downcase}.#{package_version.downcase}.nupkg"
+ end
+
+ # used by ExclusiveLeaseGuard
+ def lease_key
+ package_id = existing_package ? existing_package.id : @package_file.package_id
+ "packages:nuget:update_package_from_metadata_service:package:#{package_id}"
+ end
+
+ # used by ExclusiveLeaseGuard
+ def lease_timeout
+ DEFAULT_LEASE_TIMEOUT
+ end
+ end
+ end
+end
diff --git a/app/services/packages/pypi/create_package_service.rb b/app/services/packages/pypi/create_package_service.rb
new file mode 100644
index 00000000000..1313fc80e33
--- /dev/null
+++ b/app/services/packages/pypi/create_package_service.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Packages
+ module Pypi
+ class CreatePackageService < BaseService
+ include ::Gitlab::Utils::StrongMemoize
+
+ def execute
+ ::Packages::Package.transaction do
+ Packages::Pypi::Metadatum.upsert(
+ package_id: created_package.id,
+ required_python: params[:requires_python]
+ )
+
+ ::Packages::CreatePackageFileService.new(created_package, file_params).execute
+ end
+ end
+
+ private
+
+ def created_package
+ strong_memoize(:created_package) do
+ project
+ .packages
+ .pypi
+ .safe_find_or_create_by!(name: params[:name], version: params[:version])
+ end
+ end
+
+ def file_params
+ {
+ file: params[:content],
+ file_name: params[:content].original_filename,
+ file_md5: params[:md5_digest],
+ file_sha256: params[:sha256_digest]
+ }
+ end
+ end
+ end
+end
diff --git a/app/services/packages/remove_tag_service.rb b/app/services/packages/remove_tag_service.rb
new file mode 100644
index 00000000000..465b85506a6
--- /dev/null
+++ b/app/services/packages/remove_tag_service.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+module Packages
+ class RemoveTagService < BaseService
+ attr_reader :package_tag
+
+ def initialize(package_tag)
+ raise ArgumentError, "Package tag must be set" if package_tag.blank?
+
+ @package_tag = package_tag
+ end
+
+ def execute
+ package_tag.delete
+ end
+ end
+end
diff --git a/app/services/packages/update_tags_service.rb b/app/services/packages/update_tags_service.rb
new file mode 100644
index 00000000000..da50cd3479e
--- /dev/null
+++ b/app/services/packages/update_tags_service.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+module Packages
+ class UpdateTagsService
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(package, tags = [])
+ @package = package
+ @tags = tags
+ end
+
+ def execute
+ return if @tags.empty?
+
+ tags_to_destroy = existing_tags - @tags
+ tags_to_create = @tags - existing_tags
+
+ @package.tags.with_name(tags_to_destroy).delete_all if tags_to_destroy.any?
+ ::Gitlab::Database.bulk_insert(Packages::Tag.table_name, rows(tags_to_create)) if tags_to_create.any?
+ end
+
+ private
+
+ def existing_tags
+ strong_memoize(:existing_tags) do
+ @package.tag_names
+ end
+ end
+
+ def rows(tags)
+ now = Time.zone.now
+ tags.map do |tag|
+ {
+ package_id: @package.id,
+ name: tag,
+ created_at: now,
+ updated_at: now
+ }
+ end
+ end
+ end
+end
diff --git a/app/uploaders/packages/package_file_uploader.rb b/app/uploaders/packages/package_file_uploader.rb
new file mode 100644
index 00000000000..20fcf0a7a32
--- /dev/null
+++ b/app/uploaders/packages/package_file_uploader.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+class Packages::PackageFileUploader < GitlabUploader
+ extend Workhorse::UploadPath
+ include ObjectStorage::Concern
+
+ storage_options Gitlab.config.packages
+
+ after :store, :schedule_background_upload
+
+ alias_method :upload, :model
+
+ def filename
+ model.file_name
+ end
+
+ def store_dir
+ dynamic_segment
+ end
+
+ private
+
+ def dynamic_segment
+ File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
+ 'packages', model.package.id.to_s, 'files', model.id.to_s)
+ end
+
+ def disk_hash
+ @disk_hash ||= Digest::SHA2.hexdigest(model.package.project_id.to_s)
+ end
+end
diff --git a/app/views/clusters/clusters/_multiple_clusters_message.html.haml b/app/views/clusters/clusters/_multiple_clusters_message.html.haml
new file mode 100644
index 00000000000..da3e128ba32
--- /dev/null
+++ b/app/views/clusters/clusters/_multiple_clusters_message.html.haml
@@ -0,0 +1,6 @@
+- autodevops_help_url = help_page_path('topics/autodevops/index.md', anchor: 'using-multiple-kubernetes-clusters')
+- help_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe
+- help_link_end = '</a>'.html_safe
+
+%p
+ = s_('ClusterIntegration|If you are setting up multiple clusters and are using Auto DevOps, %{help_link_start}read this first%{help_link_end}.').html_safe % { help_link_start: help_link_start % { url: autodevops_help_url }, help_link_end: help_link_end }
diff --git a/app/views/clusters/clusters/_sidebar.html.haml b/app/views/clusters/clusters/_sidebar.html.haml
index 24a74c59b97..31add011bfa 100644
--- a/app/views/clusters/clusters/_sidebar.html.haml
+++ b/app/views/clusters/clusters/_sidebar.html.haml
@@ -5,4 +5,4 @@
%p
= clusterable.learn_more_link
-= render_if_exists 'clusters/multiple_clusters_message'
+= render 'clusters/clusters/multiple_clusters_message'
diff --git a/app/views/devise/mailer/_confirmation_instructions_secondary.html.haml b/app/views/devise/mailer/_confirmation_instructions_secondary.html.haml
index ccc3e734276..f14d50eaf71 100644
--- a/app/views/devise/mailer/_confirmation_instructions_secondary.html.haml
+++ b/app/views/devise/mailer/_confirmation_instructions_secondary.html.haml
@@ -1,5 +1,5 @@
#content
- = email_default_heading("#{sanitize_name(@resource.user.name)}, you've added an additional email!")
+ = email_default_heading("#{sanitize_name(@resource.user.name)}, confirm your email address now!")
%p Click the link below to confirm your email address (#{@resource.email})
#cta
= link_to 'Confirm your email address', confirmation_url(@resource, confirmation_token: @token)
diff --git a/app/views/devise/mailer/_confirmation_instructions_secondary.text.erb b/app/views/devise/mailer/_confirmation_instructions_secondary.text.erb
index a3b28cb0b84..b91498ccfae 100644
--- a/app/views/devise/mailer/_confirmation_instructions_secondary.text.erb
+++ b/app/views/devise/mailer/_confirmation_instructions_secondary.text.erb
@@ -1,4 +1,4 @@
-<%= @resource.user.name %>, you've added an additional email!
+<%= @resource.user.name %>, confirm your email address now!
Use the link below to confirm your email address (<%= @resource.email %>)
diff --git a/app/views/shared/boards/components/sidebar/_milestone.html.haml b/app/views/shared/boards/components/sidebar/_milestone.html.haml
index b15d60002fc..8ba6d62a4ff 100644
--- a/app/views/shared/boards/components/sidebar/_milestone.html.haml
+++ b/app/views/shared/boards/components/sidebar/_milestone.html.haml
@@ -18,7 +18,8 @@
.dropdown
%button.dropdown-menu-toggle.js-milestone-select.js-issue-board-sidebar{ type: "button", data: { toggle: "dropdown", show_no: "true", field_name: "issue[milestone_id]", milestones: milestones_filter_path(format: :json), ability_name: "issue", use_id: "true", default_no: "true" },
":data-selected" => "milestoneTitle",
- ":data-issuable-id" => "issue.iid" }
+ ":data-issuable-id" => "issue.iid",
+ ":data-project-id" => "issue.project_id" }
= _("Milestone")
= icon("chevron-down")
.dropdown-menu.dropdown-select.dropdown-menu-selectable
diff --git a/app/views/shared/issuable/_sidebar.html.haml b/app/views/shared/issuable/_sidebar.html.haml
index 00113b2c2c0..bf39053301a 100644
--- a/app/views/shared/issuable/_sidebar.html.haml
+++ b/app/views/shared/issuable/_sidebar.html.haml
@@ -45,7 +45,8 @@
= link_to _('Edit'), '#', class: 'js-sidebar-dropdown-toggle edit-link float-right', data: { qa_selector: "edit_milestone_link", track_label: "right_sidebar", track_property: "milestone", track_event: "click_edit_button", track_value: "" }
.value.hide-collapsed
- if milestone.present?
- = link_to milestone[:title], milestone[:web_url], class: "bold has-tooltip", title: sidebar_milestone_remaining_days(milestone), data: { container: "body", html: 'true', boundary: 'viewport', qa_selector: 'milestone_link', qa_title: milestone[:title] }
+ - milestone_title = milestone[:expired] ? _("%{milestone_name} (Past due)").html_safe % { milestone_name: milestone[:title] } : milestone[:title]
+ = link_to milestone_title, milestone[:web_url], class: "bold has-tooltip", title: sidebar_milestone_remaining_days(milestone), data: { container: "body", html: 'true', boundary: 'viewport', qa_selector: 'milestone_link', qa_title: milestone[:title] }
- else
%span.no-value
= _('None')
diff --git a/changelogs/unreleased/196066-add-milestone-expired-info.yml b/changelogs/unreleased/196066-add-milestone-expired-info.yml
new file mode 100644
index 00000000000..ef8fd039ef9
--- /dev/null
+++ b/changelogs/unreleased/196066-add-milestone-expired-info.yml
@@ -0,0 +1,5 @@
+---
+title: Show expired milestones at the bottom of the list within dropdown
+merge_request: 35595
+author:
+type: changed
diff --git a/changelogs/unreleased/212229-move-features-to-core-multiple-kubernetes-clusters.yml b/changelogs/unreleased/212229-move-features-to-core-multiple-kubernetes-clusters.yml
new file mode 100644
index 00000000000..9898b6449b6
--- /dev/null
+++ b/changelogs/unreleased/212229-move-features-to-core-multiple-kubernetes-clusters.yml
@@ -0,0 +1,5 @@
+---
+title: 'Multiple Kubernetes clusters now available in GitLab core'
+merge_request: 35094
+author:
+type: changed
diff --git a/changelogs/unreleased/219455-fe-inapplicable-tooltip-message.yml b/changelogs/unreleased/219455-fe-inapplicable-tooltip-message.yml
new file mode 100644
index 00000000000..f1200160eb9
--- /dev/null
+++ b/changelogs/unreleased/219455-fe-inapplicable-tooltip-message.yml
@@ -0,0 +1,5 @@
+---
+title: Add inapplicable reason in MR suggestion Tooltip
+merge_request: 35276
+author:
+type: changed
diff --git a/changelogs/unreleased/226874-fix-pages-url-path.yml b/changelogs/unreleased/226874-fix-pages-url-path.yml
new file mode 100644
index 00000000000..082edf3de80
--- /dev/null
+++ b/changelogs/unreleased/226874-fix-pages-url-path.yml
@@ -0,0 +1,5 @@
+---
+title: Fix Project#pages_url not to downcase url path
+merge_request: 36183
+author:
+type: fixed
diff --git a/changelogs/unreleased/groups_routing_priority.yml b/changelogs/unreleased/groups_routing_priority.yml
new file mode 100644
index 00000000000..042da076018
--- /dev/null
+++ b/changelogs/unreleased/groups_routing_priority.yml
@@ -0,0 +1,5 @@
+---
+title: Fix routing for paths starting with help and projects
+merge_request: 36048
+author:
+type: fixed
diff --git a/changelogs/unreleased/rc-enforce_unique_metrics_id_across_project.yml b/changelogs/unreleased/rc-enforce_unique_metrics_id_across_project.yml
new file mode 100644
index 00000000000..4ad2f1722f4
--- /dev/null
+++ b/changelogs/unreleased/rc-enforce_unique_metrics_id_across_project.yml
@@ -0,0 +1,5 @@
+---
+title: Enforce prometheus metric uniqueness across project scope
+merge_request: 35566
+author:
+type: fixed
diff --git a/changelogs/unreleased/remove-group_milestone_descendants.yml b/changelogs/unreleased/remove-group_milestone_descendants.yml
new file mode 100644
index 00000000000..770b4ec12de
--- /dev/null
+++ b/changelogs/unreleased/remove-group_milestone_descendants.yml
@@ -0,0 +1,5 @@
+---
+title: Include project and subgroup milestones on Roadmap page
+merge_request: 35973
+author:
+type: added
diff --git a/changelogs/unreleased/unconfirm-wrongfully-verified-email-records.yml b/changelogs/unreleased/unconfirm-wrongfully-verified-email-records.yml
new file mode 100644
index 00000000000..ebf216836f8
--- /dev/null
+++ b/changelogs/unreleased/unconfirm-wrongfully-verified-email-records.yml
@@ -0,0 +1,5 @@
+---
+title: Unconfirm wrongfully verified email addresses and user accounts
+merge_request: 35492
+author:
+type: security
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index ff7090c3fa3..1a2401c3396 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -347,12 +347,10 @@ Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
#
# Packages
#
-Gitlab.ee do
- Settings['packages'] ||= Settingslogic.new({})
- Settings.packages['enabled'] = true if Settings.packages['enabled'].nil?
- Settings.packages['storage_path'] = Settings.absolute(Settings.packages['storage_path'] || File.join(Settings.shared['path'], "packages"))
- Settings.packages['object_store'] = ObjectStoreSettings.legacy_parse(Settings.packages['object_store'])
-end
+Settings['packages'] ||= Settingslogic.new({})
+Settings.packages['enabled'] = true if Settings.packages['enabled'].nil?
+Settings.packages['storage_path'] = Settings.absolute(Settings.packages['storage_path'] || File.join(Settings.shared['path'], "packages"))
+Settings.packages['object_store'] = ObjectStoreSettings.legacy_parse(Settings.packages['object_store'])
#
# Dependency Proxy
diff --git a/config/prometheus/cluster_metrics.yml b/config/prometheus/cluster_metrics.yml
index f2a41e4c337..1e396f4bbbd 100644
--- a/config/prometheus/cluster_metrics.yml
+++ b/config/prometheus/cluster_metrics.yml
@@ -1,63 +1,40 @@
+dashboard: 'Cluster health'
+priority: 1
+panel_groups:
- group: Cluster Health
- priority: 1
- metrics:
+ priority: 10
+ panels:
- title: "CPU Usage"
+ type: "area-chart"
y_label: "CPU (cores)"
- required_metrics: ['container_cpu_usage_seconds_total']
weight: 1
- queries:
- - query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{id="/"}[15m])) by (job)) without (job)'
- label: Usage (cores)
- unit: "cores"
- appearance:
- line:
- width: 2
- area:
- opacity: 0
- - query_range: 'sum(kube_pod_container_resource_requests_cpu_cores{kubernetes_namespace="gitlab-managed-apps"})'
- label: Requested (cores)
- unit: "cores"
- appearance:
- line:
- width: 2
- area:
- opacity: 0
- - query_range: 'sum(kube_node_status_capacity_cpu_cores{kubernetes_namespace="gitlab-managed-apps"})'
- label: Capacity (cores)
- unit: "cores"
- appearance:
- line:
- type: 'dashed'
- width: 2
- area:
- opacity: 0
- - title: "Memory usage"
+ metrics:
+ - id: cluster_health_cpu_usage
+ query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{id="/"}[15m])) by (job)) without (job)'
+ unit: cores
+ label: Usage (cores)
+ - id: cluster_health_cpu_requested
+ query_range: 'sum(kube_pod_container_resource_requests_cpu_cores{kubernetes_namespace="gitlab-managed-apps"})'
+ unit: cores
+ label: Requested (cores)
+ - id: cluster_health_cpu_capacity
+ query_range: 'sum(kube_node_status_capacity_cpu_cores{kubernetes_namespace="gitlab-managed-apps"})'
+ unit: cores
+ label: Capacity (cores)
+ - title: "Memory Usage"
+ type: "area-chart"
y_label: "Memory (GiB)"
- required_metrics: ['container_memory_usage_bytes']
weight: 1
- queries:
- - query_range: 'avg(sum(container_memory_usage_bytes{id="/"}) by (job)) without (job) / 2^30'
- label: Usage (GiB)
- unit: "GiB"
- appearance:
- line:
- width: 2
- area:
- opacity: 0
- - query_range: 'sum(kube_pod_container_resource_requests_memory_bytes{kubernetes_namespace="gitlab-managed-apps"})/2^30'
- label: Requested (GiB)
- unit: "GiB"
- appearance:
- line:
- width: 2
- area:
- opacity: 0
- - query_range: 'sum(kube_node_status_capacity_memory_bytes{kubernetes_namespace="gitlab-managed-apps"})/2^30'
- label: Capacity (GiB)
- unit: "GiB"
- appearance:
- line:
- type: 'dashed'
- width: 2
- area:
- opacity: 0
+ metrics:
+ - id: cluster_health_memory_usage
+ query_range: 'avg(sum(container_memory_usage_bytes{id="/"}) by (job)) without (job) / 2^30'
+ unit: GiB
+ label: Usage (GiB)
+ - id: cluster_health_memory_requested
+ query_range: 'sum(kube_pod_container_resource_requests_memory_bytes{kubernetes_namespace="gitlab-managed-apps"})/2^30'
+ unit: GiB
+ label: Requested (GiB)
+ - id: cluster_health_memory_capacity
+ query_range: 'sum(kube_node_status_capacity_memory_bytes{kubernetes_namespace="gitlab-managed-apps"})/2^30'
+ unit: GiB
+ label: Capacity (GiB)
diff --git a/config/prometheus/queries_cluster_metrics.yml b/config/prometheus/queries_cluster_metrics.yml
new file mode 100644
index 00000000000..bec3ba22d83
--- /dev/null
+++ b/config/prometheus/queries_cluster_metrics.yml
@@ -0,0 +1,65 @@
+# most likely this file can be removed, but until we are sure and have capacity to tackle that I've
+# only moved it and added https://gitlab.com/gitlab-org/gitlab/-/issues/225869 to track work need to clean up codebase.
+- group: Cluster Health
+ priority: 1
+ metrics:
+ - title: "CPU Usage"
+ y_label: "CPU (cores)"
+ required_metrics: ['container_cpu_usage_seconds_total']
+ weight: 1
+ queries:
+ - query_range: 'avg(sum(rate(container_cpu_usage_seconds_total{id="/"}[15m])) by (job)) without (job)'
+ label: Usage (cores)
+ unit: "cores"
+ appearance:
+ line:
+ width: 2
+ area:
+ opacity: 0
+ - query_range: 'sum(kube_pod_container_resource_requests_cpu_cores{kubernetes_namespace="gitlab-managed-apps"})'
+ label: Requested (cores)
+ unit: "cores"
+ appearance:
+ line:
+ width: 2
+ area:
+ opacity: 0
+ - query_range: 'sum(kube_node_status_capacity_cpu_cores{kubernetes_namespace="gitlab-managed-apps"})'
+ label: Capacity (cores)
+ unit: "cores"
+ appearance:
+ line:
+ type: 'dashed'
+ width: 2
+ area:
+ opacity: 0
+ - title: "Memory usage"
+ y_label: "Memory (GiB)"
+ required_metrics: ['container_memory_usage_bytes']
+ weight: 1
+ queries:
+ - query_range: 'avg(sum(container_memory_usage_bytes{id="/"}) by (job)) without (job) / 2^30'
+ label: Usage (GiB)
+ unit: "GiB"
+ appearance:
+ line:
+ width: 2
+ area:
+ opacity: 0
+ - query_range: 'sum(kube_pod_container_resource_requests_memory_bytes{kubernetes_namespace="gitlab-managed-apps"})/2^30'
+ label: Requested (GiB)
+ unit: "GiB"
+ appearance:
+ line:
+ width: 2
+ area:
+ opacity: 0
+ - query_range: 'sum(kube_node_status_capacity_memory_bytes{kubernetes_namespace="gitlab-managed-apps"})/2^30'
+ label: Capacity (GiB)
+ unit: "GiB"
+ appearance:
+ line:
+ type: 'dashed'
+ width: 2
+ area:
+ opacity: 0
diff --git a/config/routes.rb b/config/routes.rb
index 73be8643611..9739d8fe0ff 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -242,6 +242,8 @@ Rails.application.routes.draw do
post :preview_markdown
end
+ draw :group
+
resources :projects, only: [:index, :new, :create]
get '/projects/:id' => 'projects#resolve'
@@ -258,7 +260,6 @@ Rails.application.routes.draw do
draw :admin
draw :profile
draw :dashboard
- draw :group
draw :user
draw :project
diff --git a/db/migrate/20200629192638_add_uniq_index_on_metric_identifier_and_project_id.rb b/db/migrate/20200629192638_add_uniq_index_on_metric_identifier_and_project_id.rb
new file mode 100644
index 00000000000..3e77f80b5e8
--- /dev/null
+++ b/db/migrate/20200629192638_add_uniq_index_on_metric_identifier_and_project_id.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddUniqIndexOnMetricIdentifierAndProjectId < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :prometheus_metrics, [:identifier, :project_id], unique: true
+ end
+
+ def down
+ remove_concurrent_index :prometheus_metrics, [:identifier, :project_id]
+ end
+end
diff --git a/db/post_migrate/20200615111857_unconfirm_wrongfully_verified_emails.rb b/db/post_migrate/20200615111857_unconfirm_wrongfully_verified_emails.rb
new file mode 100644
index 00000000000..12cb79a8d3a
--- /dev/null
+++ b/db/post_migrate/20200615111857_unconfirm_wrongfully_verified_emails.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+class UnconfirmWrongfullyVerifiedEmails < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ INTERVAL = 5.minutes.to_i
+ BATCH_SIZE = 1000
+ MIGRATION = 'WrongfullyConfirmedEmailUnconfirmer'
+ EMAIL_INDEX_NAME = 'tmp_index_for_email_unconfirmation_migration'
+
+ class Email < ActiveRecord::Base
+ include EachBatch
+ end
+
+ def up
+ add_concurrent_index :emails, :id, where: 'confirmed_at IS NOT NULL', name: EMAIL_INDEX_NAME
+
+ queue_background_migration_jobs_by_range_at_intervals(Email,
+ MIGRATION,
+ INTERVAL,
+ batch_size: BATCH_SIZE)
+ end
+
+ def down
+ remove_concurrent_index_by_name(:emails, EMAIL_INDEX_NAME)
+ end
+end
diff --git a/db/structure.sql b/db/structure.sql
index b3cbb733313..20e27b4350d 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -19929,6 +19929,8 @@ CREATE INDEX index_prometheus_metrics_on_group ON public.prometheus_metrics USIN
CREATE UNIQUE INDEX index_prometheus_metrics_on_identifier ON public.prometheus_metrics USING btree (identifier);
+CREATE UNIQUE INDEX index_prometheus_metrics_on_identifier_and_project_id ON public.prometheus_metrics USING btree (identifier, project_id);
+
CREATE INDEX index_prometheus_metrics_on_project_id ON public.prometheus_metrics USING btree (project_id);
CREATE INDEX index_protected_branch_merge_access ON public.protected_branch_merge_access_levels USING btree (protected_branch_id);
@@ -20493,6 +20495,8 @@ CREATE INDEX tmp_index_ci_pipelines_lock_version ON public.ci_pipelines USING bt
CREATE INDEX tmp_index_ci_stages_lock_version ON public.ci_stages USING btree (id) WHERE (lock_version IS NULL);
+CREATE INDEX tmp_index_for_email_unconfirmation_migration ON public.emails USING btree (id) WHERE (confirmed_at IS NOT NULL);
+
CREATE UNIQUE INDEX unique_merge_request_metrics_by_merge_request_id ON public.merge_request_metrics USING btree (merge_request_id);
CREATE UNIQUE INDEX users_security_dashboard_projects_unique_index ON public.users_security_dashboard_projects USING btree (project_id, user_id);
@@ -23538,6 +23542,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200610130002
20200613104045
20200615083635
+20200615111857
20200615121217
20200615123055
20200615193524
@@ -23579,6 +23584,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200625190458
20200626060151
20200626130220
+20200629192638
20200630110826
20200701093859
20200702123805
diff --git a/doc/README.md b/doc/README.md
index 725cefaf802..d40741e0174 100644
--- a/doc/README.md
+++ b/doc/README.md
@@ -316,11 +316,11 @@ The following documentation relates to the DevOps **Configure** stage:
| [GitLab ChatOps](ci/chatops/README.md) | Interact with CI/CD jobs through chat services. |
| [Installing Applications](user/project/clusters/index.md#installing-applications) | Install Helm charts such as Ingress and Prometheus on Kubernetes. |
| [Mattermost slash commands](user/project/integrations/mattermost_slash_commands.md) | Enable and use slash commands from within Mattermost. |
-| [Multiple Kubernetes Clusters](user/project/clusters/index.md#multiple-kubernetes-clusters-premium) **(PREMIUM)** | Associate more than one Kubernetes clusters to your project. |
+| [Multiple Kubernetes Clusters](user/project/clusters/index.md#multiple-kubernetes-clusters) | Associate more than one Kubernetes clusters to your project. |
| [Protected variables](ci/variables/README.md#protect-a-custom-variable) | Restrict variables to protected branches and tags. |
| [Serverless](user/project/clusters/serverless/index.md) | Run serverless workloads on Kubernetes. |
| [Slack slash commands](user/project/integrations/slack_slash_commands.md) | Enable and use slash commands from within Slack. |
-| [Manage your infrastructure with Terraform](user/infrastructure/index.md) | Manage your infrastructure as you run your CI/CD pipeline. |
+| [Manage your infrastructure with Terraform](user/infrastructure/index.md) | Manage your infrastructure as you run your CI/CD pipeline. |
<div align="right">
<a type="button" class="btn btn-default" href="#overview">
diff --git a/doc/administration/troubleshooting/elasticsearch.md b/doc/administration/troubleshooting/elasticsearch.md
index 12b82e4bc48..e13261e3074 100644
--- a/doc/administration/troubleshooting/elasticsearch.md
+++ b/doc/administration/troubleshooting/elasticsearch.md
@@ -261,6 +261,9 @@ Beyond that, you will want to review the error. If it is:
- Specifically from the indexer, this could be a bug/issue and should be escalated to
GitLab support.
- An OS issue, you will want to reach out to your systems administrator.
+- A `Faraday::TimeoutError (execution expired)` error **and** you're using a proxy,
+ [set a custom `gitlab_rails['env']` environment variable, called `no_proxy`](https://docs.gitlab.com/omnibus/settings/environment-variables.html)
+ with the IP address of your Elasticsearch host.
### Troubleshooting performance
diff --git a/doc/api/group_milestones.md b/doc/api/group_milestones.md
index e157655a713..d16c0329f72 100644
--- a/doc/api/group_milestones.md
+++ b/doc/api/group_milestones.md
@@ -54,6 +54,7 @@ Example Response:
"state": "active",
"updated_at": "2013-10-02T09:24:18Z",
"created_at": "2013-10-02T09:24:18Z",
+ "expired": false,
"web_url": "https://gitlab.com/groups/gitlab-org/-/milestones/42"
}
]
diff --git a/doc/api/milestones.md b/doc/api/milestones.md
index b5702c7d6e0..d736d00779c 100644
--- a/doc/api/milestones.md
+++ b/doc/api/milestones.md
@@ -51,7 +51,8 @@ Example Response:
"start_date": "2013-11-10",
"state": "active",
"updated_at": "2013-10-02T09:24:18Z",
- "created_at": "2013-10-02T09:24:18Z"
+ "created_at": "2013-10-02T09:24:18Z",
+ "expired": false
}
]
```
diff --git a/doc/ci/variables/README.md b/doc/ci/variables/README.md
index a14c2a4f098..5feef02ea2f 100644
--- a/doc/ci/variables/README.md
+++ b/doc/ci/variables/README.md
@@ -893,8 +893,8 @@ if [[ -d "/builds/gitlab-examples/ci-debug-trace/.git" ]]; then
++ CI_SERVER_VERSION_PATCH=0
++ export CI_SERVER_REVISION=f4cc00ae823
++ CI_SERVER_REVISION=f4cc00ae823
-++ export GITLAB_FEATURES=audit_events,burndown_charts,code_owners,contribution_analytics,description_diffs,elastic_search,group_bulk_edit,group_burndown_charts,group_webhooks,issuable_default_templates,issue_weights,jenkins_integration,ldap_group_sync,member_lock,merge_request_approvers,multiple_issue_assignees,multiple_ldap_servers,multiple_merge_request_assignees,protected_refs_for_users,push_rules,related_issues,repository_mirrors,repository_size_limit,scoped_issue_board,usage_quotas,visual_review_app,wip_limits,adjourned_deletion_for_projects_and_groups,admin_audit_log,auditor_user,batch_comments,blocking_merge_requests,board_assignee_lists,board_milestone_lists,ci_cd_projects,cluster_deployments,code_analytics,code_owner_approval_required,commit_committer_check,cross_project_pipelines,custom_file_templates,custom_file_templates_for_namespace,custom_project_templates,custom_prometheus_metrics,cycle_analytics_for_groups,db_load_balancing,default_project_deletion_protection,dependency_proxy,deploy_board,design_management,email_additional_text,extended_audit_events,external_authorization_service_api_management,feature_flags,file_locks,geo,github_project_service_integration,group_allowed_email_domains,group_project_templates,group_saml,issues_analytics,jira_dev_panel_integration,ldap_group_sync_filter,merge_pipelines,merge_request_performance_metrics,merge_trains,metrics_reports,multiple_approval_rules,multiple_clusters,multiple_group_issue_boards,object_storage,operations_dashboard,packages,productivity_analytics,project_aliases,protected_environments,reject_unsigned_commits,required_ci_templates,scoped_labels,service_desk,smartcard_auth,group_timelogs,type_of_work_analytics,unprotection_restrictions,ci_project_subscriptions,container_scanning,dast,dependency_scanning,epics,group_ip_restriction,incident_management,insights,license_management,personal_access_token_expiration_policy,pod_logs,prometheus_alerts,pseudonymizer,report_approver_rules,sast,security_dashboard,tracing,web_ide_terminal
-++ GITLAB_FEATURES=audit_events,burndown_charts,code_owners,contribution_analytics,description_diffs,elastic_search,group_bulk_edit,group_burndown_charts,group_webhooks,issuable_default_templates,issue_weights,jenkins_integration,ldap_group_sync,member_lock,merge_request_approvers,multiple_issue_assignees,multiple_ldap_servers,multiple_merge_request_assignees,protected_refs_for_users,push_rules,related_issues,repository_mirrors,repository_size_limit,scoped_issue_board,usage_quotas,visual_review_app,wip_limits,adjourned_deletion_for_projects_and_groups,admin_audit_log,auditor_user,batch_comments,blocking_merge_requests,board_assignee_lists,board_milestone_lists,ci_cd_projects,cluster_deployments,code_analytics,code_owner_approval_required,commit_committer_check,cross_project_pipelines,custom_file_templates,custom_file_templates_for_namespace,custom_project_templates,custom_prometheus_metrics,cycle_analytics_for_groups,db_load_balancing,default_project_deletion_protection,dependency_proxy,deploy_board,design_management,email_additional_text,extended_audit_events,external_authorization_service_api_management,feature_flags,file_locks,geo,github_project_service_integration,group_allowed_email_domains,group_project_templates,group_saml,issues_analytics,jira_dev_panel_integration,ldap_group_sync_filter,merge_pipelines,merge_request_performance_metrics,merge_trains,metrics_reports,multiple_approval_rules,multiple_clusters,multiple_group_issue_boards,object_storage,operations_dashboard,packages,productivity_analytics,project_aliases,protected_environments,reject_unsigned_commits,required_ci_templates,scoped_labels,service_desk,smartcard_auth,group_timelogs,type_of_work_analytics,unprotection_restrictions,ci_project_subscriptions,cluster_health,container_scanning,dast,dependency_scanning,epics,group_ip_restriction,incident_management,insights,license_management,personal_access_token_expiration_policy,pod_logs,prometheus_alerts,pseudonymizer,report_approver_rules,sast,security_dashboard,tracing,web_ide_terminal
+++ export GITLAB_FEATURES=audit_events,burndown_charts,code_owners,contribution_analytics,description_diffs,elastic_search,group_bulk_edit,group_burndown_charts,group_webhooks,issuable_default_templates,issue_weights,jenkins_integration,ldap_group_sync,member_lock,merge_request_approvers,multiple_issue_assignees,multiple_ldap_servers,multiple_merge_request_assignees,protected_refs_for_users,push_rules,related_issues,repository_mirrors,repository_size_limit,scoped_issue_board,usage_quotas,visual_review_app,wip_limits,adjourned_deletion_for_projects_and_groups,admin_audit_log,auditor_user,batch_comments,blocking_merge_requests,board_assignee_lists,board_milestone_lists,ci_cd_projects,cluster_deployments,code_analytics,code_owner_approval_required,commit_committer_check,cross_project_pipelines,custom_file_templates,custom_file_templates_for_namespace,custom_project_templates,custom_prometheus_metrics,cycle_analytics_for_groups,db_load_balancing,default_project_deletion_protection,dependency_proxy,deploy_board,design_management,email_additional_text,extended_audit_events,external_authorization_service_api_management,feature_flags,file_locks,geo,github_project_service_integration,group_allowed_email_domains,group_project_templates,group_saml,issues_analytics,jira_dev_panel_integration,ldap_group_sync_filter,merge_pipelines,merge_request_performance_metrics,merge_trains,metrics_reports,multiple_approval_rules,multiple_group_issue_boards,object_storage,operations_dashboard,packages,productivity_analytics,project_aliases,protected_environments,reject_unsigned_commits,required_ci_templates,scoped_labels,service_desk,smartcard_auth,group_timelogs,type_of_work_analytics,unprotection_restrictions,ci_project_subscriptions,container_scanning,dast,dependency_scanning,epics,group_ip_restriction,incident_management,insights,license_management,personal_access_token_expiration_policy,pod_logs,prometheus_alerts,pseudonymizer,report_approver_rules,sast,security_dashboard,tracing,web_ide_terminal
+++ GITLAB_FEATURES=audit_events,burndown_charts,code_owners,contribution_analytics,description_diffs,elastic_search,group_bulk_edit,group_burndown_charts,group_webhooks,issuable_default_templates,issue_weights,jenkins_integration,ldap_group_sync,member_lock,merge_request_approvers,multiple_issue_assignees,multiple_ldap_servers,multiple_merge_request_assignees,protected_refs_for_users,push_rules,related_issues,repository_mirrors,repository_size_limit,scoped_issue_board,usage_quotas,visual_review_app,wip_limits,adjourned_deletion_for_projects_and_groups,admin_audit_log,auditor_user,batch_comments,blocking_merge_requests,board_assignee_lists,board_milestone_lists,ci_cd_projects,cluster_deployments,code_analytics,code_owner_approval_required,commit_committer_check,cross_project_pipelines,custom_file_templates,custom_file_templates_for_namespace,custom_project_templates,custom_prometheus_metrics,cycle_analytics_for_groups,db_load_balancing,default_project_deletion_protection,dependency_proxy,deploy_board,design_management,email_additional_text,extended_audit_events,external_authorization_service_api_management,feature_flags,file_locks,geo,github_project_service_integration,group_allowed_email_domains,group_project_templates,group_saml,issues_analytics,jira_dev_panel_integration,ldap_group_sync_filter,merge_pipelines,merge_request_performance_metrics,merge_trains,metrics_reports,multiple_approval_rules,multiple_group_issue_boards,object_storage,operations_dashboard,packages,productivity_analytics,project_aliases,protected_environments,reject_unsigned_commits,required_ci_templates,scoped_labels,service_desk,smartcard_auth,group_timelogs,type_of_work_analytics,unprotection_restrictions,ci_project_subscriptions,cluster_health,container_scanning,dast,dependency_scanning,epics,group_ip_restriction,incident_management,insights,license_management,personal_access_token_expiration_policy,pod_logs,prometheus_alerts,pseudonymizer,report_approver_rules,sast,security_dashboard,tracing,web_ide_terminal
++ export CI_PROJECT_ID=17893
++ CI_PROJECT_ID=17893
++ export CI_PROJECT_NAME=ci-debug-trace
diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md
index 5b06afc0ab1..9d681a49208 100644
--- a/doc/ci/yaml/README.md
+++ b/doc/ci/yaml/README.md
@@ -298,6 +298,26 @@ determine whether or not a pipeline is created. It currently accepts a single
`rules:` key that operates similarly to [`rules:` defined within jobs](#rules),
enabling dynamic configuration of the pipeline.
+If you are new to GitLab CI/CD and `workflow: rules`, you may find the [`workflow:rules` templates](#workflowrules-templates) useful.
+
+To define your own `workflow: rules`, the configuration options currently available are:
+
+- [`if`](#rulesif): Define a rule.
+- [`when`](#when): May be set to `always` or `never` only. If not provided, the default value is `always`​.
+
+The list of `if` rules is evaluated until a single one is matched. If none
+match, the last `when` will be used:
+
+```yaml
+workflow:
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /-wip$/
+ when: never
+ - if: $CI_COMMIT_TAG
+ when: never
+ - when: always
+```
+
#### `workflow:rules` templates
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/217732) in GitLab 13.0.
@@ -335,24 +355,6 @@ include:
- template: 'Workflows/MergeRequest-Pipelines.gitlab-ci.yml'
```
-If you prefer to define your own rules, the configuration options currently available are:​
-
-- [`if`](#rulesif): Define a rule.
-- [`when`](#when): May be set to `always` or `never` only. If not provided, the default value is `always`​.
-
-The list of `if` rules is evaluated until a single one is matched. If none
-match, the last `when` will be used:
-
-```yaml
-workflow:
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /-wip$/
- when: never
- - if: $CI_COMMIT_TAG
- when: never
- - when: always
-```
-
### `include`
> - Introduced in [GitLab Premium](https://about.gitlab.com/pricing/) 10.5.
diff --git a/doc/development/code_review.md b/doc/development/code_review.md
index 301bf80641c..fd53ce79534 100644
--- a/doc/development/code_review.md
+++ b/doc/development/code_review.md
@@ -96,16 +96,15 @@ with [domain expertise](#domain-experts).
1. If your merge request includes documentation changes, it must be **approved
by a [Technical writer](https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers)**, based on
the appropriate [product category](https://about.gitlab.com/handbook/product/product-categories/).
-1. If your merge request includes Quality and non-Quality-related changes (*3*), it must be **approved
+1. If your merge request includes end-to-end **and** non-end-to-end changes (*3*), it must be **approved
by a [Software Engineer in Test](https://about.gitlab.com/handbook/engineering/quality/#individual-contributors)**.
-1. If your merge request includes _only_ Quality-related changes (*3*), it must be **approved
- by a [Quality maintainer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_maintainers_qa)**.
+1. If your merge request only includes end-to-end changes (*3*) **or** if the MR author is a [Software Engineer in Test](https://about.gitlab.com/handbook/engineering/quality/#individual-contributors), it must be **approved by a [Quality maintainer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_maintainers_qa)**
- (*1*): Please note that specs other than JavaScript specs are considered backend code.
- (*2*): We encourage you to seek guidance from a database maintainer if your merge
request is potentially introducing expensive queries. It is most efficient to comment
on the line of code in question with the SQL queries so they can give their advice.
-- (*3*): Quality-related changes include all files within the `qa` directory.
+- (*3*): End-to-end changes include all files within the `qa` directory.
#### Security requirements
diff --git a/doc/development/telemetry/usage_ping.md b/doc/development/telemetry/usage_ping.md
index 75b5c593070..d6f8824cc26 100644
--- a/doc/development/telemetry/usage_ping.md
+++ b/doc/development/telemetry/usage_ping.md
@@ -701,6 +701,7 @@ appear to be associated to any of the services running, since they all appear to
| `process_memory_rss` | `topology > nodes > node_services` | `enablement` | | | The average Resident Set Size of a service process |
| `process_memory_uss` | `topology > nodes > node_services` | `enablement` | | | The average Unique Set Size of a service process |
| `process_memory_pss` | `topology > nodes > node_services` | `enablement` | | | The average Proportional Set Size of a service process |
+| `server` | `topology > nodes > node_services` | `enablement` | | | The type of web server used (Unicorn or Puma) |
## Example Usage Ping payload
@@ -912,7 +913,8 @@ The following is example content of the Usage Ping payload.
"process_count": 16,
"process_memory_pss": 233349888,
"process_memory_rss": 788220927,
- "process_memory_uss": 195295487
+ "process_memory_uss": 195295487,
+ "server": "puma"
},
{
"name": "sidekiq",
diff --git a/doc/topics/autodevops/index.md b/doc/topics/autodevops/index.md
index c8489d2e832..099fe29631e 100644
--- a/doc/topics/autodevops/index.md
+++ b/doc/topics/autodevops/index.md
@@ -248,11 +248,11 @@ TIP: **Tip:**
Use the [blue-green deployment](../../ci/environments/incremental_rollouts.md#blue-green-deployment) technique
to minimize downtime and risk.
-## Using multiple Kubernetes clusters **(PREMIUM)**
+## Using multiple Kubernetes clusters
When using Auto DevOps, you can deploy different environments to
different Kubernetes clusters, due to the 1:1 connection
-[existing between them](../../user/project/clusters/index.md#multiple-kubernetes-clusters-premium).
+[existing between them](../../user/project/clusters/index.md#multiple-kubernetes-clusters).
The [Deploy Job template](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml)
used by Auto DevOps currently defines 3 environment names:
diff --git a/doc/topics/autodevops/quick_start_guide.md b/doc/topics/autodevops/quick_start_guide.md
index 7d65dcf7f4e..4f8074f047e 100644
--- a/doc/topics/autodevops/quick_start_guide.md
+++ b/doc/topics/autodevops/quick_start_guide.md
@@ -291,7 +291,7 @@ all within GitLab. Despite its automatic nature, Auto DevOps can also be configu
and customized to fit your workflow. Here are some helpful resources for further reading:
1. [Auto DevOps](index.md)
-1. [Multiple Kubernetes clusters](index.md#using-multiple-kubernetes-clusters-premium) **(PREMIUM)**
+1. [Multiple Kubernetes clusters](index.md#using-multiple-kubernetes-clusters)
1. [Incremental rollout to production](customize.md#incremental-rollout-to-production-premium) **(PREMIUM)**
1. [Disable jobs you don't need with environment variables](customize.md#environment-variables)
1. [Use a static IP for your cluster](../../user/clusters/applications.md#using-a-static-ip)
diff --git a/doc/user/group/clusters/index.md b/doc/user/group/clusters/index.md
index 8dcc08bce46..89e0c4898fb 100644
--- a/doc/user/group/clusters/index.md
+++ b/doc/user/group/clusters/index.md
@@ -38,10 +38,11 @@ the project.
In the case of sub-groups, GitLab uses the cluster of the closest ancestor group
to the project, provided the cluster is not disabled.
-## Multiple Kubernetes clusters **(PREMIUM)**
+## Multiple Kubernetes clusters
-With [GitLab Premium](https://about.gitlab.com/pricing/premium/), you can associate
-more than one Kubernetes cluster to your group, and maintain different clusters
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/35094) to GitLab Core in 13.2.
+
+You can associate more than one Kubernetes cluster to your group, and maintain different clusters
for different environments, such as development, staging, and production.
When adding another cluster,
@@ -93,7 +94,7 @@ To clear the cache:
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/24580) in GitLab 11.8.
Domains at the cluster level permit support for multiple domains
-per [multiple Kubernetes clusters](#multiple-kubernetes-clusters-premium). When specifying a domain,
+per [multiple Kubernetes clusters](#multiple-kubernetes-clusters) When specifying a domain,
this will be automatically set as an environment variable (`KUBE_INGRESS_BASE_DOMAIN`) during
the [Auto DevOps](../../../topics/autodevops/index.md) stages.
diff --git a/doc/user/group/roadmap/index.md b/doc/user/group/roadmap/index.md
index 614ed700cfc..950721503ae 100644
--- a/doc/user/group/roadmap/index.md
+++ b/doc/user/group/roadmap/index.md
@@ -12,11 +12,11 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> - In [GitLab 12.9](https://gitlab.com/gitlab-org/gitlab/-/issues/5164) and later, the epic bars show epics' title, progress, and completed weight percentage.
> - Milestones appear in roadmaps in [GitLab 12.10](https://gitlab.com/gitlab-org/gitlab/-/issues/6802), and later.
> - Feature flag for milestones visible in roadmaps removed in [GitLab 13.0](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/29641).
+> - In [GitLab 13.2](https://gitlab.com/gitlab-org/gitlab/-/issues/214375) and later, the Roadmap also shows milestones in projects in a group.
-Epics and milestones within a group containing **Start date** and/or **Due date**
-can be visualized in a form of a timeline (that is, a Gantt chart). The Roadmap page
-shows such a visualization for all the epics and milestones which are under a group or one of its
-subgroups.
+Epics and milestones within a group containing a start date or due date can be visualized in a form
+of a timeline (that is, a Gantt chart). The Roadmap page shows the epics and milestones in a
+group, one of its subgroups, or a project in one of the groups.
On the epic bars, you can see the each epic's title, progress, and completed weight percentage.
When you hover over an epic bar, a popover appears with the epic's title, start date, due date, and
diff --git a/doc/user/project/clusters/index.md b/doc/user/project/clusters/index.md
index 16d78751f40..ff0aaca40b7 100644
--- a/doc/user/project/clusters/index.md
+++ b/doc/user/project/clusters/index.md
@@ -64,11 +64,12 @@ to:
(EKS) using GitLab's UI.
- Add an integration to an existing cluster from any Kubernetes platform.
-### Multiple Kubernetes clusters **(PREMIUM)**
+### Multiple Kubernetes clusters
-> Introduced in [GitLab Premium](https://about.gitlab.com/pricing/) 10.3.
+> - Introduced in [GitLab Premium](https://about.gitlab.com/pricing/) 10.3
+> - [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/35094) to GitLab core in 13.2.
-With GitLab Premium, you can associate more than one Kubernetes cluster to your
+You can associate more than one Kubernetes cluster to your
project. That way you can have different clusters for different environments,
like dev, staging, production, and so on.
diff --git a/lib/api/entities/milestone.rb b/lib/api/entities/milestone.rb
index 5a0c222d691..b191210a234 100644
--- a/lib/api/entities/milestone.rb
+++ b/lib/api/entities/milestone.rb
@@ -10,6 +10,7 @@ module API
expose :state, :created_at, :updated_at
expose :due_date
expose :start_date
+ expose :expired?, as: :expired
expose :web_url do |milestone, _options|
Gitlab::UrlBuilder.build(milestone)
diff --git a/lib/api/group_clusters.rb b/lib/api/group_clusters.rb
index c6d10f22bb4..ae41d9f13b8 100644
--- a/lib/api/group_clusters.rb
+++ b/lib/api/group_clusters.rb
@@ -6,18 +6,6 @@ module API
before { authenticate! }
- # EE::API::GroupClusters will
- # override these methods
- helpers do
- params :create_params_ee do
- end
-
- params :update_params_ee do
- end
- end
-
- prepend_if_ee('EE::API::GroupClusters') # rubocop: disable Cop/InjectEnterpriseEditionModule
-
params do
requires :id, type: String, desc: 'The ID of the group'
end
@@ -52,6 +40,7 @@ module API
params do
requires :name, type: String, desc: 'Cluster name'
optional :enabled, type: Boolean, default: true, desc: 'Determines if cluster is active or not, defaults to true'
+ optional :environment_scope, default: '*', type: String, desc: 'The associated environment to the cluster'
optional :domain, type: String, desc: 'Cluster base domain'
optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :managed, type: Boolean, default: true, desc: 'Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true'
@@ -62,7 +51,6 @@ module API
optional :namespace, type: String, desc: 'Unique namespace related to Group'
optional :authorization_type, type: String, values: ::Clusters::Platforms::Kubernetes.authorization_types.keys, default: 'rbac', desc: 'Cluster authorization type, defaults to RBAC'
end
- use :create_params_ee
end
post ':id/clusters/user' do
authorize! :add_cluster, user_group
@@ -85,6 +73,7 @@ module API
requires :cluster_id, type: Integer, desc: 'The cluster ID'
optional :name, type: String, desc: 'Cluster name'
optional :domain, type: String, desc: 'Cluster base domain'
+ optional :environment_scope, type: String, desc: 'The associated environment to the cluster'
optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
optional :api_url, type: String, desc: 'URL to access the Kubernetes API'
@@ -92,7 +81,6 @@ module API
optional :ca_cert, type: String, desc: 'TLS certificate (needed if API is using a self-signed TLS certificate)'
optional :namespace, type: String, desc: 'Unique namespace related to Group'
end
- use :update_params_ee
end
put ':id/clusters/:cluster_id' do
authorize! :update_cluster, cluster
diff --git a/lib/api/project_clusters.rb b/lib/api/project_clusters.rb
index e1dfb647fa0..0e5605984e6 100644
--- a/lib/api/project_clusters.rb
+++ b/lib/api/project_clusters.rb
@@ -6,18 +6,6 @@ module API
before { authenticate! }
- # EE::API::ProjectClusters will
- # override these methods
- helpers do
- params :create_params_ee do
- end
-
- params :update_params_ee do
- end
- end
-
- prepend_if_ee('EE::API::ProjectClusters') # rubocop: disable Cop/InjectEnterpriseEditionModule
-
params do
requires :id, type: String, desc: 'The ID of the project'
end
@@ -56,6 +44,7 @@ module API
requires :name, type: String, desc: 'Cluster name'
optional :enabled, type: Boolean, default: true, desc: 'Determines if cluster is active or not, defaults to true'
optional :domain, type: String, desc: 'Cluster base domain'
+ optional :environment_scope, default: '*', type: String, desc: 'The associated environment to the cluster'
optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :managed, type: Boolean, default: true, desc: 'Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true'
requires :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
@@ -65,7 +54,6 @@ module API
optional :namespace, type: String, desc: 'Unique namespace related to Project'
optional :authorization_type, type: String, values: ::Clusters::Platforms::Kubernetes.authorization_types.keys, default: 'rbac', desc: 'Cluster authorization type, defaults to RBAC'
end
- use :create_params_ee
end
post ':id/clusters/user' do
authorize! :add_cluster, user_project
@@ -89,6 +77,7 @@ module API
requires :cluster_id, type: Integer, desc: 'The cluster ID'
optional :name, type: String, desc: 'Cluster name'
optional :domain, type: String, desc: 'Cluster base domain'
+ optional :environment_scope, type: String, desc: 'The associated environment to the cluster'
optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
optional :api_url, type: String, desc: 'URL to access the Kubernetes API'
@@ -96,7 +85,6 @@ module API
optional :ca_cert, type: String, desc: 'TLS certificate (needed if API is using a self-signed TLS certificate)'
optional :namespace, type: String, desc: 'Unique namespace related to Project'
end
- use :update_params_ee
end
put ':id/clusters/:cluster_id' do
authorize! :update_cluster, cluster
diff --git a/lib/gitlab/background_migration/mailers/unconfirm_mailer.rb b/lib/gitlab/background_migration/mailers/unconfirm_mailer.rb
new file mode 100644
index 00000000000..c096dae0631
--- /dev/null
+++ b/lib/gitlab/background_migration/mailers/unconfirm_mailer.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ module Mailers
+ class UnconfirmMailer < ::Notify
+ prepend_view_path(File.join(__dir__, 'views'))
+
+ def unconfirm_notification_email(user)
+ @user = user
+ @verification_from_mail = Gitlab.config.gitlab.email_from
+
+ mail(
+ template_path: 'unconfirm_mailer',
+ template_name: 'unconfirm_notification_email',
+ to: @user.notification_email,
+ subject: subject('GitLab email verification request')
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml
new file mode 100644
index 00000000000..d8f7466a1ca
--- /dev/null
+++ b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.html.haml
@@ -0,0 +1,19 @@
+-# haml-lint:disable NoPlainNodes
+%p
+ Dear GitLab user,
+
+%p
+ As part of our commitment to keeping GitLab secure, we have identified and addressed a vulnerability in GitLab that allowed some users to bypass the email verification process in a #{link_to("recent security release", "https://about.gitlab.com/releases/2020/05/27/security-release-13-0-1-released", target: '_blank')}.
+
+%p
+ As a precautionary measure, you will need to re-verify some of your account's email addresses before continuing to use GitLab. Sorry for the inconvenience!
+
+%p
+ We have already sent the re-verification email with a subject line of "Confirmation instructions" from #{@verification_from_mail}. Please feel free to contribute any questions or comments to #{link_to("this issue", "https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/7942", target: '_blank')}.
+
+%p
+ If you are not "#{@user.username}", please #{link_to 'report this to our administrator', new_abuse_report_url(user_id: @user.id)}
+
+%p
+ Thank you for being a GitLab user!
+-# haml-lint:enable NoPlainNodes
diff --git a/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb
new file mode 100644
index 00000000000..d20af9b9803
--- /dev/null
+++ b/lib/gitlab/background_migration/mailers/views/unconfirm_mailer/unconfirm_notification_email.text.erb
@@ -0,0 +1,14 @@
+Dear GitLab user,
+
+As part of our commitment to keeping GitLab secure, we have identified and addressed a vulnerability in GitLab that allowed some users to bypass the email verification process in a recent security release.
+
+Security release: https://about.gitlab.com/releases/2020/05/27/security-release-13-0-1-released
+
+As a precautionary measure, you will need to re-verify some of your account's email addresses before continuing to use GitLab. Sorry for the inconvenience!
+
+We have already sent the re-verification email with a subject line of "Confirmation instructions" from <%= @verification_from_mail %>.
+Please feel free to contribute any questions or comments to this issue: https://gitlab.com/gitlab-com/www-gitlab-com/-/issues/7942
+
+If you are not "<%= @user.username %>", please report this to our administrator. Report link: <%= new_abuse_report_url(user_id: @user.id) %>
+
+Thank you for being a GitLab user!
diff --git a/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb b/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb
new file mode 100644
index 00000000000..151fb5853d8
--- /dev/null
+++ b/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class WrongfullyConfirmedEmailUnconfirmer
+ class UserModel < ActiveRecord::Base
+ alias_method :reset, :reload
+
+ self.table_name = 'users'
+
+ scope :active, -> { where(state: 'active', user_type: nil) } # only humans, skip bots
+
+ devise :confirmable
+ end
+
+ class EmailModel < ActiveRecord::Base
+ alias_method :reset, :reload
+
+ self.table_name = 'emails'
+
+ belongs_to :user
+
+ devise :confirmable
+
+ def self.wrongfully_confirmed_emails(start_id, stop_id)
+ joins(:user)
+ .merge(UserModel.active)
+ .where(id: (start_id..stop_id))
+ .where('emails.confirmed_at IS NOT NULL')
+ .where('emails.confirmed_at = users.confirmed_at')
+ .where('emails.email <> users.email')
+ end
+ end
+
+ def perform(start_id, stop_id)
+ email_records = EmailModel
+ .wrongfully_confirmed_emails(start_id, stop_id)
+ .to_a
+
+ user_ids = email_records.map(&:user_id).uniq
+
+ ActiveRecord::Base.transaction do
+ update_email_records(start_id, stop_id)
+ update_user_records(user_ids)
+ end
+
+ # Refind the records with the "real" Email model so devise will notice that the user / email is unconfirmed
+ unconfirmed_email_records = ::Email.where(id: email_records.map(&:id))
+ ActiveRecord::Associations::Preloader.new.preload(unconfirmed_email_records, [:user])
+
+ send_emails(unconfirmed_email_records)
+ end
+
+ private
+
+ def update_email_records(start_id, stop_id)
+ EmailModel.connection.execute <<-SQL
+ WITH md5_strings as (
+ #{email_query_for_update(start_id, stop_id).to_sql}
+ )
+ UPDATE #{EmailModel.connection.quote_table_name(EmailModel.table_name)}
+ SET confirmed_at = NULL,
+ confirmation_token = md5_strings.md5_string,
+ confirmation_sent_at = NOW()
+ FROM md5_strings
+ WHERE id = md5_strings.email_id
+ SQL
+ end
+
+ def update_user_records(user_ids)
+ UserModel
+ .where(id: user_ids)
+ .update_all("confirmed_at = NULL, confirmation_sent_at = NOW(), confirmation_token=md5(users.id::varchar || users.created_at || users.encrypted_password || '#{Integer(Time.now.to_i)}')")
+ end
+
+ def email_query_for_update(start_id, stop_id)
+ EmailModel
+ .wrongfully_confirmed_emails(start_id, stop_id)
+ .select('emails.id as email_id', "md5(emails.id::varchar || emails.created_at || users.encrypted_password || '#{Integer(Time.now.to_i)}') as md5_string")
+ end
+
+ def send_emails(email_records)
+ email_records.each do |email|
+ DeviseMailer.confirmation_instructions(email, email.confirmation_token).deliver_later
+ end
+
+ user_records = email_records.map(&:user).uniq
+
+ user_records.each do |user|
+ DeviseMailer.confirmation_instructions(user, user.confirmation_token).deliver_later
+ Gitlab::BackgroundMigration::Mailers::UnconfirmMailer.unconfirm_notification_email(user).deliver_later
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/danger/teammate.rb b/lib/gitlab/danger/teammate.rb
index 3a4d4b1ba46..f7da66e77cd 100644
--- a/lib/gitlab/danger/teammate.rb
+++ b/lib/gitlab/danger/teammate.rb
@@ -72,8 +72,9 @@ module Gitlab
return "same timezone as `@#{author.username}`" if diff.zero?
ahead_or_behind = diff < 0 ? 'behind' : 'ahead'
+ pluralized_hours = pluralize(diff.abs, 'hour', 'hours')
- "#{diff.abs} hours #{ahead_or_behind} `@#{author.username}`"
+ "#{pluralized_hours} #{ahead_or_behind} `@#{author.username}`"
end
def has_capability?(project, category, kind, labels)
@@ -95,6 +96,12 @@ module Gitlab
def capabilities(project)
Array(projects.fetch(project, []))
end
+
+ def pluralize(count, singular, plural)
+ word = count == 1 || count.to_s =~ /^1(\.0+)?$/ ? singular : plural
+
+ "#{count || 0} #{word}"
+ end
end
end
end
diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb
index 86f17586898..eb50718e770 100644
--- a/lib/gitlab/database.rb
+++ b/lib/gitlab/database.rb
@@ -107,10 +107,6 @@ module Gitlab
version.to_f >= MINIMUM_POSTGRES_VERSION
end
- def self.upsert_supported?
- version.to_f >= 9.5
- end
-
def self.check_postgres_version_and_print_warning
return if Gitlab::Database.postgresql_minimum_supported_version?
return if Gitlab::Runtime.rails_runner?
@@ -221,9 +217,7 @@ module Gitlab
VALUES #{tuples.map { |tuple| "(#{tuple.join(', ')})" }.join(', ')}
EOF
- if upsert_supported? && on_conflict == :do_nothing
- sql = "#{sql} ON CONFLICT DO NOTHING"
- end
+ sql = "#{sql} ON CONFLICT DO NOTHING" if on_conflict == :do_nothing
sql = "#{sql} RETURNING id" if return_ids
diff --git a/lib/gitlab/metrics/dashboard/service_selector.rb b/lib/gitlab/metrics/dashboard/service_selector.rb
index 49682da320c..641c0c76f8f 100644
--- a/lib/gitlab/metrics/dashboard/service_selector.rb
+++ b/lib/gitlab/metrics/dashboard/service_selector.rb
@@ -13,6 +13,8 @@ module Gitlab
include Gitlab::Utils::StrongMemoize
SERVICES = [
+ ::Metrics::Dashboard::ClusterMetricsEmbedService,
+ ::Metrics::Dashboard::ClusterDashboardService,
::Metrics::Dashboard::GitlabAlertEmbedService,
::Metrics::Dashboard::CustomMetricEmbedService,
::Metrics::Dashboard::GrafanaMetricEmbedService,
@@ -51,5 +53,3 @@ module Gitlab
end
end
end
-
-Gitlab::Metrics::Dashboard::ServiceSelector.prepend_if_ee('EE::Gitlab::Metrics::Dashboard::ServiceSelector')
diff --git a/lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb b/lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb
new file mode 100644
index 00000000000..a12082b704c
--- /dev/null
+++ b/lib/gitlab/metrics/dashboard/stages/cluster_endpoint_inserter.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Metrics
+ module Dashboard
+ module Stages
+ class ClusterEndpointInserter < BaseStage
+ def transform!
+ verify_params
+
+ for_metrics do |metric|
+ metric[:prometheus_endpoint_path] = endpoint_for_metric(metric)
+ end
+ end
+
+ private
+
+ def admin_url(metric)
+ Gitlab::Routing.url_helpers.prometheus_api_admin_cluster_path(
+ params[:cluster],
+ proxy_path: query_type(metric),
+ query: query_for_metric(metric)
+ )
+ end
+
+ def endpoint_for_metric(metric)
+ case params[:cluster_type]
+ when :admin
+ admin_url(metric)
+ when :group
+ error!(_('Group is required when cluster_type is :group')) unless params[:group]
+ group_url(metric)
+ when :project
+ error!(_('Project is required when cluster_type is :project')) unless project
+ project_url(metric)
+ else
+ error!(_('Unrecognized cluster type'))
+ end
+ end
+
+ def error!(message)
+ raise Errors::DashboardProcessingError.new(message)
+ end
+
+ def group_url(metric)
+ Gitlab::Routing.url_helpers.prometheus_api_group_cluster_path(
+ params[:group],
+ params[:cluster],
+ proxy_path: query_type(metric),
+ query: query_for_metric(metric)
+ )
+ end
+
+ def project_url(metric)
+ Gitlab::Routing.url_helpers.prometheus_api_project_cluster_path(
+ project,
+ params[:cluster],
+ proxy_path: query_type(metric),
+ query: query_for_metric(metric)
+ )
+ end
+
+ def query_type(metric)
+ metric[:query] ? :query : :query_range
+ end
+
+ def query_for_metric(metric)
+ query = metric[query_type(metric)]
+
+ raise Errors::MissingQueryError.new('Each "metric" must define one of :query or :query_range') unless query
+
+ query
+ end
+
+ def verify_params
+ raise Errors::DashboardProcessingError.new(_('Cluster is required for Stages::ClusterEndpointInserter')) unless params[:cluster]
+ raise Errors::DashboardProcessingError.new(_('Cluster type must be specificed for Stages::ClusterEndpointInserter')) unless params[:cluster_type]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/dashboard/url.rb b/lib/gitlab/metrics/dashboard/url.rb
index 31670a3f533..10a2f3c2397 100644
--- a/lib/gitlab/metrics/dashboard/url.rb
+++ b/lib/gitlab/metrics/dashboard/url.rb
@@ -60,6 +60,22 @@ module Gitlab
Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_environment_url(*args)
end
+ # Matches dashboard urls for a metric chart embed
+ # for cluster metrics
+ #
+ # EX - https://<host>/<namespace>/<project>/-/clusters/<cluster_id>/?group=Cluster%20Health&title=Memory%20Usage&y_label=Memory%20(GiB)
+ def clusters_regex
+ strong_memoize(:clusters_regex) do
+ regex_for_project_metrics(
+ %r{
+ /clusters
+ /(?<cluster_id>\d+)
+ /?
+ }x
+ )
+ end
+ end
+
private
def regex_for_project_metrics(path_suffix_pattern)
diff --git a/lib/gitlab/usage_data/topology.rb b/lib/gitlab/usage_data/topology.rb
index 8878b85c9b9..5363c5080ef 100644
--- a/lib/gitlab/usage_data/topology.rb
+++ b/lib/gitlab/usage_data/topology.rb
@@ -65,6 +65,7 @@ module Gitlab
# service-level data
by_instance_by_job_by_type_memory = topology_all_service_memory(client)
by_instance_by_job_process_count = topology_all_service_process_count(client)
+ by_instance_by_job_server_types = topology_all_service_server_types(client)
instances = Set.new(by_instance_mem.keys + by_instance_cpus.keys)
instances.map do |instance|
@@ -72,20 +73,22 @@ module Gitlab
node_memory_total_bytes: by_instance_mem[instance],
node_cpus: by_instance_cpus[instance],
node_services:
- topology_node_services(instance, by_instance_by_job_process_count, by_instance_by_job_by_type_memory)
+ topology_node_services(
+ instance, by_instance_by_job_process_count, by_instance_by_job_by_type_memory, by_instance_by_job_server_types
+ )
}.compact
end
end
def topology_node_memory(client)
query_safely('gitlab_usage_ping:node_memory_total_bytes:avg', 'node_memory', fallback: {}) do |query|
- aggregate_by_instance(client, query)
+ aggregate_by_instance(client, one_week_average(query))
end
end
def topology_node_cpus(client)
query_safely('gitlab_usage_ping:node_cpus:count', 'node_cpus', fallback: {}) do |query|
- aggregate_by_instance(client, query)
+ aggregate_by_instance(client, one_week_average(query))
end
end
@@ -100,24 +103,30 @@ module Gitlab
def topology_service_memory_rss(client)
query_safely(
'gitlab_usage_ping:node_service_process_resident_memory_bytes:avg', 'service_rss', fallback: []
- ) { |query| aggregate_by_labels(client, query) }
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
end
def topology_service_memory_uss(client)
query_safely(
'gitlab_usage_ping:node_service_process_unique_memory_bytes:avg', 'service_uss', fallback: []
- ) { |query| aggregate_by_labels(client, query) }
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
end
def topology_service_memory_pss(client)
query_safely(
'gitlab_usage_ping:node_service_process_proportional_memory_bytes:avg', 'service_pss', fallback: []
- ) { |query| aggregate_by_labels(client, query) }
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
end
def topology_all_service_process_count(client)
query_safely(
'gitlab_usage_ping:node_service_process:count', 'service_process_count', fallback: []
+ ) { |query| aggregate_by_labels(client, one_week_average(query)) }
+ end
+
+ def topology_all_service_server_types(client)
+ query_safely(
+ 'gitlab_usage_ping:node_service_app_server_workers:sum', 'service_workers', fallback: []
) { |query| aggregate_by_labels(client, query) }
end
@@ -133,11 +142,12 @@ module Gitlab
fallback
end
- def topology_node_services(instance, all_process_counts, all_process_memory)
+ def topology_node_services(instance, all_process_counts, all_process_memory, all_server_types)
# returns all node service data grouped by service name as the key
instance_service_data =
topology_instance_service_process_count(instance, all_process_counts)
.deep_merge(topology_instance_service_memory(instance, all_process_memory))
+ .deep_merge(topology_instance_service_server_types(instance, all_server_types))
# map to list of hashes where service names become values instead, and remove
# unknown services, since they might not be ours
@@ -173,6 +183,12 @@ module Gitlab
result
end
+ def topology_instance_service_server_types(instance, all_instance_data)
+ topology_data_for_instance(instance, all_instance_data).to_h do |metric, _value|
+ [metric['job'], { server: metric['server'] }]
+ end
+ end
+
def topology_data_for_instance(instance, all_instance_data)
all_instance_data.filter { |metric, _value| metric['instance'] == instance }
end
@@ -186,12 +202,12 @@ module Gitlab
end
def aggregate_by_instance(client, query)
- client.aggregate(one_week_average(query)) { |metric| drop_port(metric['instance']) }
+ client.aggregate(query) { |metric| drop_port(metric['instance']) }
end
# Will retain a composite key that values are mapped to
def aggregate_by_labels(client, query)
- client.aggregate(one_week_average(query)) do |metric|
+ client.aggregate(query) do |metric|
metric['instance'] = drop_port(metric['instance'])
metric
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 58f2d7845cf..7b29ccf0439 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -259,6 +259,11 @@ msgid_plural "%d tags"
msgstr[0] ""
msgstr[1] ""
+msgid "%d unassigned issue"
+msgid_plural "%d unassigned issues"
+msgstr[0] ""
+msgstr[1] ""
+
msgid "%d unresolved thread"
msgid_plural "%d unresolved threads"
msgstr[0] ""
@@ -353,6 +358,9 @@ msgstr ""
msgid "%{description}- Sentry event: %{errorUrl}- First seen: %{firstSeen}- Last seen: %{lastSeen} %{countLabel}: %{count}%{userCountLabel}: %{userCount}"
msgstr ""
+msgid "%{due_date} (Past due)"
+msgstr ""
+
msgid "%{duration}ms"
msgstr ""
@@ -479,6 +487,12 @@ msgstr ""
msgid "%{mergeLength}/%{usersLength} can merge"
msgstr ""
+msgid "%{milestone_name} (Past due)"
+msgstr ""
+
+msgid "%{milestone} (expired)"
+msgstr ""
+
msgid "%{mrText}, this issue will be closed automatically."
msgstr ""
@@ -4051,9 +4065,6 @@ msgstr ""
msgid "Can't apply as the source branch was deleted."
msgstr ""
-msgid "Can't apply as this line has changed or the suggestion already matches its content."
-msgstr ""
-
msgid "Can't apply this suggestion."
msgstr ""
@@ -12479,9 +12490,6 @@ msgstr ""
msgid "Instance administrators group already exists"
msgstr ""
-msgid "Instance does not support multiple Kubernetes clusters"
-msgstr ""
-
msgid "Instance license"
msgstr ""
@@ -12806,7 +12814,7 @@ msgstr ""
msgid "Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, and other project entities"
msgstr ""
-msgid "Issues with no epics assigned"
+msgid "Issues with no epic assigned"
msgstr ""
msgid "Issues, merge requests, pushes, and comments."
@@ -19924,6 +19932,9 @@ msgstr ""
msgid "SAML for %{group_name}"
msgstr ""
+msgid "SAST Configuration"
+msgstr ""
+
msgid "SHA256"
msgstr ""
@@ -21526,6 +21537,9 @@ msgstr ""
msgid "Something went wrong while initializing the OpenAPI viewer"
msgstr ""
+msgid "Something went wrong while inserting your image. Please try again."
+msgstr ""
+
msgid "Something went wrong while merging this merge request. Please try again."
msgstr ""
diff --git a/package.json b/package.json
index 1f1e4918e5d..fd06989ea27 100644
--- a/package.json
+++ b/package.json
@@ -40,8 +40,8 @@
"@babel/plugin-syntax-import-meta": "^7.10.1",
"@babel/preset-env": "^7.10.1",
"@gitlab/at.js": "1.5.5",
- "@gitlab/svgs": "1.150.0",
- "@gitlab/ui": "17.16.0",
+ "@gitlab/svgs": "1.151.0",
+ "@gitlab/ui": "17.18.1",
"@gitlab/visual-review-tools": "1.6.1",
"@rails/actioncable": "^6.0.3-1",
"@sentry/browser": "^5.10.2",
diff --git a/scripts/review_apps/base-config.yaml b/scripts/review_apps/base-config.yaml
index 9aa518e3bc7..82be2d3a691 100644
--- a/scripts/review_apps/base-config.yaml
+++ b/scripts/review_apps/base-config.yaml
@@ -137,10 +137,10 @@ postgresql:
enabled: false
resources:
requests:
- cpu: 347m
+ cpu: 550m
memory: 250M
limits:
- cpu: 520m
+ cpu: 825m
memory: 375M
prometheus:
install: false
diff --git a/spec/factories/go_module_commits.rb b/spec/factories/go_module_commits.rb
new file mode 100644
index 00000000000..e42ef6696d1
--- /dev/null
+++ b/spec/factories/go_module_commits.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :go_module_commit, class: 'Commit' do
+ skip_create
+
+ transient do
+ files { { 'foo.txt' => 'content' } }
+ message { 'Message' }
+ project { create(:project, :repository) }
+
+ service do
+ Files::MultiService.new(
+ project,
+ project.owner,
+ commit_message: message,
+ start_branch: project.repository.root_ref || 'master',
+ branch_name: project.repository.root_ref || 'master',
+ actions: files.map do |path, content|
+ { action: :create, file_path: path, content: content }
+ end
+ )
+ end
+
+ tag { nil }
+ tag_message { nil }
+
+ commit do
+ r = service.execute
+
+ raise "operation failed: #{r}" unless r[:status] == :success
+
+ commit = project.repository.commit_by(oid: r[:result])
+
+ if tag
+ r = Tags::CreateService.new(project, project.owner).execute(tag, commit.sha, tag_message)
+
+ raise "operation failed: #{r}" unless r[:status] == :success
+ end
+
+ commit
+ end
+ end
+
+ trait :files do
+ transient do
+ files { raise ArgumentError.new("files is required") }
+ message { 'Add files' }
+ end
+ end
+
+ trait :package do
+ transient do
+ path { raise ArgumentError.new("path is required") }
+ message { 'Add package' }
+ files { { "#{path}/b.go" => "package b\nfunc Bye() { println(\"Goodbye world!\") }\n" } }
+ end
+ end
+
+ trait :module do
+ transient do
+ name { nil }
+ message { 'Add module' }
+ host_prefix { "#{::Gitlab.config.gitlab.host}/#{project.path_with_namespace}" }
+
+ url { name ? "#{host_prefix}/#{name}" : host_prefix }
+ path { name.to_s + '/' }
+
+ files do
+ {
+ "#{path}go.mod" => "module #{url}\n",
+ "#{path}a.go" => "package a\nfunc Hi() { println(\"Hello world!\") }\n"
+ }
+ end
+ end
+ end
+
+ initialize_with do
+ commit
+ end
+ end
+end
diff --git a/spec/factories/go_module_versions.rb b/spec/factories/go_module_versions.rb
new file mode 100644
index 00000000000..b0a96197350
--- /dev/null
+++ b/spec/factories/go_module_versions.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :go_module_version, class: 'Packages::Go::ModuleVersion' do
+ skip_create
+
+ initialize_with do
+ p = attributes[:params]
+ s = Packages::SemVer.parse(p.semver, prefixed: true)
+
+ raise ArgumentError.new("invalid sematic version: '#{p.semver}''") if !s && p.semver
+
+ new(p.mod, p.type, p.commit, name: p.name, semver: s, ref: p.ref)
+ end
+
+ mod { create :go_module }
+ type { :commit }
+ commit { mod.project.repository.head_commit }
+ name { nil }
+ semver { nil }
+ ref { nil }
+
+ params { OpenStruct.new(mod: mod, type: type, commit: commit, name: name, semver: semver, ref: ref) }
+
+ trait :tagged do
+ ref { mod.project.repository.find_tag(name) }
+ commit { ref.dereferenced_target }
+ name do
+ # This provides a sane default value, but in reality the caller should
+ # specify `name:`
+
+ # Find 'latest' semver tag (does not actually use semver precedence rules)
+ mod.project.repository.tags
+ .filter { |t| Packages::SemVer.match?(t.name, prefixed: true) }
+ .map { |t| Packages::SemVer.parse(t.name, prefixed: true) }
+ .max { |a, b| "#{a}" <=> "#{b}" }
+ .to_s
+ end
+
+ params { OpenStruct.new(mod: mod, type: :ref, commit: commit, semver: name, ref: ref) }
+ end
+
+ trait :pseudo do
+ transient do
+ prefix do
+ # This provides a sane default value, but in reality the caller should
+ # specify `prefix:`
+
+ # This does not take into account that `commit` may be before the
+ # latest tag.
+
+ # Find 'latest' semver tag (does not actually use semver precedence rules)
+ v = mod.project.repository.tags
+ .filter { |t| Packages::SemVer.match?(t.name, prefixed: true) }
+ .map { |t| Packages::SemVer.parse(t.name, prefixed: true) }
+ .max { |a, b| "#{a}" <=> "#{b}" }
+
+ # Default if no semver tags exist
+ next 'v0.0.0' unless v
+
+ # Valid pseudo-versions are:
+ # vX.0.0-yyyymmddhhmmss-sha1337beef0, when no earlier tagged commit exists for X
+ # vX.Y.Z-pre.0.yyyymmddhhmmss-sha1337beef0, when most recent prior tag is vX.Y.Z-pre
+ # vX.Y.(Z+1)-0.yyyymmddhhmmss-sha1337beef0, when most recent prior tag is vX.Y.Z
+
+ v = v.with(patch: v.patch + 1) unless v.prerelease
+ "#{v}.0"
+ end
+ end
+
+ type { :pseudo }
+ name { "#{prefix}#{commit.committed_date.strftime('%Y%m%d%H%M%S')}-#{commit.sha[0..11]}" }
+
+ params { OpenStruct.new(mod: mod, type: :pseudo, commit: commit, name: name, semver: name) }
+ end
+ end
+end
diff --git a/spec/factories/go_modules.rb b/spec/factories/go_modules.rb
new file mode 100644
index 00000000000..fdbacf48d3b
--- /dev/null
+++ b/spec/factories/go_modules.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :go_module, class: 'Packages::Go::Module' do
+ initialize_with { new(attributes[:project], attributes[:name], attributes[:path]) }
+ skip_create
+
+ project { create :project, :repository }
+
+ path { '' }
+ name { "#{Settings.build_gitlab_go_url}/#{project.full_path}#{path.empty? ? '' : '/'}#{path}" }
+ end
+end
diff --git a/spec/factories/packages.rb b/spec/factories/packages.rb
new file mode 100644
index 00000000000..562269a67bc
--- /dev/null
+++ b/spec/factories/packages.rb
@@ -0,0 +1,355 @@
+# frozen_string_literal: true
+FactoryBot.define do
+ factory :package, class: 'Packages::Package' do
+ project
+ name { 'my/company/app/my-app' }
+ sequence(:version) { |n| "1.#{n}-SNAPSHOT" }
+ package_type { :maven }
+
+ factory :maven_package do
+ maven_metadatum
+
+ after :build do |package|
+ package.maven_metadatum.path = "#{package.name}/#{package.version}"
+ end
+
+ after :create do |package|
+ create :package_file, :xml, package: package
+ create :package_file, :jar, package: package
+ create :package_file, :pom, package: package
+ end
+ end
+
+ factory :npm_package do
+ sequence(:name) { |n| "@#{project.root_namespace.path}/package-#{n}"}
+ version { '1.0.0' }
+ package_type { :npm }
+
+ after :create do |package|
+ create :package_file, :npm, package: package
+ end
+
+ trait :with_build do
+ after :create do |package|
+ user = package.project.creator
+ pipeline = create(:ci_pipeline, user: user)
+ create(:ci_build, user: user, pipeline: pipeline)
+ create :package_build_info, package: package, pipeline: pipeline
+ end
+ end
+ end
+
+ factory :nuget_package do
+ sequence(:name) { |n| "NugetPackage#{n}"}
+ sequence(:version) { |n| "1.0.#{n}" }
+ package_type { :nuget }
+
+ after :create do |package|
+ create :package_file, :nuget, package: package, file_name: "#{package.name}.#{package.version}.nupkg"
+ end
+
+ trait(:with_metadatum) do
+ after :build do |pkg|
+ pkg.nuget_metadatum = build(:nuget_metadatum)
+ end
+ end
+ end
+
+ factory :pypi_package do
+ pypi_metadatum
+
+ sequence(:name) { |n| "pypi-package-#{n}"}
+ sequence(:version) { |n| "1.0.#{n}" }
+ package_type { :pypi }
+
+ after :create do |package|
+ create :package_file, :pypi, package: package, file_name: "#{package.name}-#{package.version}.tar.gz"
+ end
+ end
+
+ factory :composer_package do
+ sequence(:name) { |n| "composer-package-#{n}"}
+ sequence(:version) { |n| "1.0.#{n}" }
+ package_type { :composer }
+
+ transient do
+ sha { project.repository.find_branch('master').target }
+ json { { name: name, version: version } }
+ end
+
+ trait(:with_metadatum) do
+ after :create do |package, evaluator|
+ create :composer_metadatum, package: package, target_sha: evaluator.sha, composer_json: evaluator.json
+ end
+ end
+ end
+
+ factory :conan_package do
+ conan_metadatum
+
+ transient do
+ without_package_files { false }
+ end
+
+ after :build do |package|
+ package.conan_metadatum.package_username = Packages::Conan::Metadatum.package_username_from(
+ full_path: package.project.full_path
+ )
+ end
+
+ sequence(:name) { |n| "package-#{n}" }
+ version { '1.0.0' }
+ package_type { :conan }
+
+ after :create do |package, evaluator|
+ unless evaluator.without_package_files
+ create :conan_package_file, :conan_recipe_file, package: package
+ create :conan_package_file, :conan_recipe_manifest, package: package
+ create :conan_package_file, :conan_package_info, package: package
+ create :conan_package_file, :conan_package_manifest, package: package
+ create :conan_package_file, :conan_package, package: package
+ end
+ end
+
+ trait(:without_loaded_metadatum) do
+ conan_metadatum { build(:conan_metadatum, package: nil) }
+ end
+ end
+ end
+
+ factory :composer_metadatum, class: 'Packages::Composer::Metadatum' do
+ package { create(:composer_package) }
+
+ target_sha { '123' }
+ composer_json { { name: 'foo' } }
+ end
+
+ factory :package_build_info, class: 'Packages::BuildInfo' do
+ package
+ end
+
+ factory :package_file, class: 'Packages::PackageFile' do
+ package
+
+ file_name { 'somefile.txt' }
+
+ transient do
+ file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
+ end
+
+ after(:build) do |package_file, evaluator|
+ package_file.file = fixture_file_upload(evaluator.file_fixture)
+ end
+
+ factory :conan_package_file do
+ package { create(:conan_package, without_package_files: true) }
+
+ transient do
+ without_loaded_metadatum { false }
+ end
+
+ trait(:conan_recipe_file) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :recipe_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
+ file_name { 'conanfile.py' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_recipe_manifest) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :recipe_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanmanifest.txt' }
+ file_name { 'conanmanifest.txt' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_package_manifest) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :package_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/package_files/conanmanifest.txt' }
+ file_name { 'conanmanifest.txt' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_package_info) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :package_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/package_files/conaninfo.txt' }
+ file_name { 'conaninfo.txt' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_package) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :package_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/package_files/conan_package.tgz' }
+ file_name { 'conan_package.tgz' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+ end
+
+ trait(:jar) do
+ file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.jar' }
+ file_name { 'my-app-1.0-20180724.124855-1.jar' }
+ file_sha1 { '4f0bfa298744d505383fbb57c554d4f5c12d88b3' }
+ size { 100.kilobytes }
+ end
+
+ trait(:pom) do
+ file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom' }
+ file_name { 'my-app-1.0-20180724.124855-1.pom' }
+ file_sha1 { '19c975abd49e5102ca6c74a619f21e0cf0351c57' }
+ size { 200.kilobytes }
+ end
+
+ trait(:xml) do
+ file_fixture { 'spec/fixtures/packages/maven/maven-metadata.xml' }
+ file_name { 'maven-metadata.xml' }
+ file_sha1 { '42b1bdc80de64953b6876f5a8c644f20204011b0' }
+ size { 300.kilobytes }
+ end
+
+ trait(:npm) do
+ file_fixture { 'spec/fixtures/packages/npm/foo-1.0.1.tgz' }
+ file_name { 'foo-1.0.1.tgz' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ verified_at { Date.current }
+ verification_checksum { '4437b5775e61455588a7e5187a2e5c58c680694260bbe5501c235ec690d17f83' }
+ size { 400.kilobytes }
+ end
+
+ trait(:nuget) do
+ package
+ file_fixture { 'spec/fixtures/packages/nuget/package.nupkg' }
+ file_name { 'package.nupkg' }
+ file_sha1 { '5fe852b2a6abd96c22c11fa1ff2fb19d9ce58b57' }
+ size { 300.kilobytes }
+ end
+
+ trait(:pypi) do
+ package
+ file_fixture { 'spec/fixtures/packages/pypi/sample-project.tar.gz' }
+ file_name { 'sample-project-1.0.0.tar.gz' }
+ file_sha1 { '2c0cfbed075d3fae226f051f0cc771b533e01aff' }
+ file_md5 { '0a7392d24f42f83068fa3767c5310052' }
+ file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ size { 1149.bytes }
+ end
+
+ trait(:object_storage) do
+ file_store { Packages::PackageFileUploader::Store::REMOTE }
+ end
+
+ trait(:checksummed) do
+ verification_checksum { 'abc' }
+ end
+
+ trait(:checksum_failure) do
+ verification_failure { 'Could not calculate the checksum' }
+ end
+
+ factory :package_file_with_file, traits: [:jar]
+ end
+
+ factory :maven_metadatum, class: 'Packages::Maven::Metadatum' do
+ association :package, package_type: :maven
+ path { 'my/company/app/my-app/1.0-SNAPSHOT' }
+ app_group { 'my.company.app' }
+ app_name { 'my-app' }
+ app_version { '1.0-SNAPSHOT' }
+ end
+
+ factory :conan_metadatum, class: 'Packages::Conan::Metadatum' do
+ association :package, factory: [:conan_package, :without_loaded_metadatum], without_package_files: true
+ package_username { 'username' }
+ package_channel { 'stable' }
+ end
+
+ factory :pypi_metadatum, class: 'Packages::Pypi::Metadatum' do
+ association :package, package_type: :pypi
+ required_python { '>=2.7' }
+ end
+
+ factory :nuget_metadatum, class: 'Packages::Nuget::Metadatum' do
+ package { create(:nuget_package) }
+
+ license_url { 'http://www.gitlab.com' }
+ project_url { 'http://www.gitlab.com' }
+ icon_url { 'http://www.gitlab.com' }
+ end
+
+ factory :conan_file_metadatum, class: 'Packages::Conan::FileMetadatum' do
+ package_file { create(:conan_package_file, :conan_recipe_file, without_loaded_metadatum: true) }
+ recipe_revision { '0' }
+ conan_file_type { 'recipe_file' }
+
+ trait(:recipe_file) do
+ conan_file_type { 'recipe_file' }
+ end
+
+ trait(:package_file) do
+ package_file { create(:conan_package_file, :conan_package, without_loaded_metadatum: true) }
+ conan_file_type { 'package_file' }
+ package_revision { '0' }
+ conan_package_reference { '123456789' }
+ end
+ end
+
+ factory :packages_dependency, class: 'Packages::Dependency' do
+ sequence(:name) { |n| "@test/package-#{n}"}
+ sequence(:version_pattern) { |n| "~6.2.#{n}" }
+ end
+
+ factory :packages_dependency_link, class: 'Packages::DependencyLink' do
+ package { create(:nuget_package) }
+ dependency { create(:packages_dependency) }
+ dependency_type { :dependencies }
+
+ trait(:with_nuget_metadatum) do
+ after :build do |link|
+ link.nuget_metadatum = build(:nuget_dependency_link_metadatum)
+ end
+ end
+ end
+
+ factory :nuget_dependency_link_metadatum, class: 'Packages::Nuget::DependencyLinkMetadatum' do
+ dependency_link { create(:packages_dependency_link) }
+ target_framework { '.NETStandard2.0' }
+ end
+
+ factory :packages_tag, class: 'Packages::Tag' do
+ package
+ sequence(:name) { |n| "tag-#{n}"}
+ end
+end
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 3e1006920e7..2e6a366f77a 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -139,6 +139,19 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
end
end
+ context 'when a user adds an existing cluster' do
+ before do
+ visit project_clusters_path(project)
+
+ click_link 'Add Kubernetes cluster'
+ click_link 'Add existing cluster'
+ end
+
+ it 'user sees the "Environment scope" field' do
+ expect(page).to have_css('#cluster_environment_scope')
+ end
+ end
+
context 'when user destroys the cluster' do
before do
click_link 'Advanced Settings'
@@ -155,19 +168,6 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
end
end
- context 'when a user cannot edit the environment scope' do
- before do
- visit project_clusters_path(project)
-
- click_link 'Add Kubernetes cluster'
- click_link 'Add existing cluster'
- end
-
- it 'user does not see the "Environment scope" field' do
- expect(page).not_to have_css('#cluster_environment_scope')
- end
- end
-
context 'when user has not dismissed GCP signup offer' do
before do
visit project_clusters_path(project)
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index 1cf214a5c4e..c56a1ed1711 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -25,6 +25,168 @@ RSpec.describe 'Clusters', :js do
end
end
+ context 'when user has a cluster' do
+ before do
+ allow_any_instance_of(Clusters::Cluster).to receive(:retrieve_connection_status).and_return(:connected)
+ end
+
+ context 'when user adds an existing cluster' do
+ before do
+ create(:cluster, :provided_by_user, name: 'default-cluster', environment_scope: '*', projects: [project])
+ visit project_clusters_path(project)
+ end
+
+ it 'user sees an add cluster button' do
+ expect(page).to have_selector('.js-add-cluster:not(.readonly)')
+ end
+
+ context 'when user filled form with environment scope' do
+ before do
+ click_link 'Add Kubernetes cluster'
+ click_link 'Add existing cluster'
+ fill_in 'cluster_name', with: 'staging-cluster'
+ fill_in 'cluster_environment_scope', with: 'staging/*'
+ click_button 'Add Kubernetes cluster'
+ end
+
+ it 'user sees a cluster details page' do
+ expect(page.find_field('cluster[name]').value).to eq('staging-cluster')
+ expect(page.find_field('cluster[environment_scope]').value).to eq('staging/*')
+ end
+ end
+
+ context 'when user updates environment scope' do
+ before do
+ click_link 'default-cluster'
+ fill_in 'cluster_environment_scope', with: 'production/*'
+ within '.js-cluster-integration-form' do
+ click_button 'Save changes'
+ end
+ end
+
+ it 'updates the environment scope' do
+ expect(page.find_field('cluster[environment_scope]').value).to eq('production/*')
+ end
+ end
+
+ context 'when user updates duplicated environment scope' do
+ before do
+ click_link 'Add Kubernetes cluster'
+ click_link 'Add existing cluster'
+ fill_in 'cluster_name', with: 'staging-cluster'
+ fill_in 'cluster_environment_scope', with: '*'
+ fill_in 'cluster_platform_kubernetes_attributes_api_url', with: 'https://0.0.0.0'
+ fill_in 'cluster_platform_kubernetes_attributes_token', with: 'token'
+
+ click_button 'Add Kubernetes cluster'
+ end
+
+ it 'users sees an environment scope validation error' do
+ expect(page).to have_content('cannot add duplicated environment scope')
+ end
+ end
+ end
+
+ context 'when user adds a Google Kubernetes Engine cluster' do
+ before do
+ allow_any_instance_of(Projects::ClustersController)
+ .to receive(:token_in_session).and_return('token')
+ allow_any_instance_of(Projects::ClustersController)
+ .to receive(:expires_at_in_session).and_return(1.hour.since.to_i.to_s)
+
+ allow_any_instance_of(Projects::ClustersController).to receive(:authorize_google_project_billing)
+ allow_any_instance_of(Projects::ClustersController).to receive(:google_project_billing_status).and_return(true)
+
+ allow_any_instance_of(GoogleApi::CloudPlatform::Client)
+ .to receive(:projects_zones_clusters_create) do
+ OpenStruct.new(
+ self_link: 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123',
+ status: 'RUNNING'
+ )
+ end
+
+ allow(WaitForClusterCreationWorker).to receive(:perform_in).and_return(nil)
+
+ create(:cluster, :provided_by_gcp, name: 'default-cluster', environment_scope: '*', projects: [project])
+ visit project_clusters_path(project)
+ end
+
+ it 'user sees a add cluster button ' do
+ expect(page).to have_selector('.js-add-cluster:not(.readonly)')
+ end
+
+ context 'when user filled form with environment scope' do
+ before do
+ click_link 'Add Kubernetes cluster'
+ click_link 'Create new cluster'
+ click_link 'Google GKE'
+
+ sleep 2 # wait for ajax
+ execute_script('document.querySelector(".js-gcp-project-id-dropdown input").setAttribute("type", "text")')
+ execute_script('document.querySelector(".js-gcp-zone-dropdown input").setAttribute("type", "text")')
+ execute_script('document.querySelector(".js-gcp-machine-type-dropdown input").setAttribute("type", "text")')
+ execute_script('document.querySelector(".js-gke-cluster-creation-submit").removeAttribute("disabled")')
+
+ fill_in 'cluster_name', with: 'staging-cluster'
+ fill_in 'cluster_environment_scope', with: 'staging/*'
+ fill_in 'cluster[provider_gcp_attributes][gcp_project_id]', with: 'gcp-project-123'
+ fill_in 'cluster[provider_gcp_attributes][zone]', with: 'us-central1-a'
+ fill_in 'cluster[provider_gcp_attributes][machine_type]', with: 'n1-standard-2'
+ click_button 'Create Kubernetes cluster'
+
+ # The frontend won't show the details until the cluster is
+ # created, and we don't want to make calls out to GCP.
+ provider = Clusters::Cluster.last.provider
+ provider.make_created
+ end
+
+ it 'user sees a cluster details page' do
+ expect(page).to have_content('GitLab Integration')
+ expect(page.find_field('cluster[environment_scope]').value).to eq('staging/*')
+ end
+ end
+
+ context 'when user updates environment scope' do
+ before do
+ click_link 'default-cluster'
+ fill_in 'cluster_environment_scope', with: 'production/*'
+ within ".js-cluster-integration-form" do
+ click_button 'Save changes'
+ end
+ end
+
+ it 'updates the environment scope' do
+ expect(page.find_field('cluster[environment_scope]').value).to eq('production/*')
+ end
+ end
+
+ context 'when user updates duplicated environment scope' do
+ before do
+ click_link 'Add Kubernetes cluster'
+ click_link 'Create new cluster'
+ click_link 'Google GKE'
+
+ sleep 2 # wait for ajax
+ execute_script('document.querySelector(".js-gcp-project-id-dropdown input").setAttribute("type", "text")')
+ execute_script('document.querySelector(".js-gcp-zone-dropdown input").setAttribute("type", "text")')
+ execute_script('document.querySelector(".js-gcp-machine-type-dropdown input").setAttribute("type", "text")')
+ execute_script('document.querySelector(".js-gke-cluster-creation-submit").removeAttribute("disabled")')
+
+ fill_in 'cluster_name', with: 'staging-cluster'
+ fill_in 'cluster_environment_scope', with: '*'
+ fill_in 'cluster[provider_gcp_attributes][gcp_project_id]', with: 'gcp-project-123'
+ fill_in 'cluster[provider_gcp_attributes][zone]', with: 'us-central1-a'
+ fill_in 'cluster[provider_gcp_attributes][machine_type]', with: 'n1-standard-2'
+ click_button 'Create Kubernetes cluster'
+ end
+
+ it 'users sees an environment scope validation error' do
+ expect(page).to have_content('cannot add duplicated environment scope')
+ end
+ end
+ end
+ end
+
context 'when user has a cluster and visits cluster index page' do
let!(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:project) { cluster.project }
diff --git a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
index a173d633f2c..b72aae146f4 100644
--- a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
+++ b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe 'User uploads new design', :js do
visit project_issue_path(project, issue)
end
- it 'uploads designs' do
+ it 'uploads designs', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/225616' do
attach_file(:design_file, logo_fixture, make_visible: true)
expect(page).to have_selector('.js-design-list-item', count: 1)
diff --git a/spec/fixtures/api/schemas/public_api/v4/milestone.json b/spec/fixtures/api/schemas/public_api/v4/milestone.json
index 6ca2e88ae91..c8c6a7b6ae1 100644
--- a/spec/fixtures/api/schemas/public_api/v4/milestone.json
+++ b/spec/fixtures/api/schemas/public_api/v4/milestone.json
@@ -12,11 +12,13 @@
"updated_at": { "type": "date" },
"start_date": { "type": "date" },
"due_date": { "type": "date" },
+ "expired": { "type": ["boolean", "null"] },
"web_url": { "type": "string" }
},
"required": [
"id", "iid", "title", "description", "state",
- "state", "created_at", "updated_at", "start_date", "due_date"
+ "state", "created_at", "updated_at", "start_date",
+ "due_date", "expired"
],
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json b/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json
index e2475545ee9..f008ed7d55f 100644
--- a/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json
+++ b/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json
@@ -12,6 +12,7 @@
"updated_at": { "type": "date" },
"start_date": { "type": "date" },
"due_date": { "type": "date" },
+ "expired": { "type": ["boolean", "null"] },
"web_url": { "type": "string" },
"issue_stats": {
"required": ["total", "closed"],
@@ -24,7 +25,8 @@
},
"required": [
"id", "iid", "title", "description", "state",
- "state", "created_at", "updated_at", "start_date", "due_date", "issue_stats"
+ "state", "created_at", "updated_at", "start_date",
+ "due_date", "expired", "issue_stats"
],
"additionalProperties": false
}
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index c1a23d441b3..b3b62cc9b58 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -96,6 +96,29 @@ describe('Api', () => {
});
});
+ describe('groupMilestones', () => {
+ it('fetches group milestones', done => {
+ const groupId = 1;
+ const options = { state: 'active' };
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/1/milestones`;
+ mock.onGet(expectedUrl).reply(200, [
+ {
+ id: 1,
+ title: 'milestone1',
+ state: 'active',
+ },
+ ]);
+
+ Api.groupMilestones(groupId, options)
+ .then(({ data }) => {
+ expect(data.length).toBe(1);
+ expect(data[0].title).toBe('milestone1');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
describe('namespaces', () => {
it('fetches namespaces', done => {
const query = 'dummy query';
@@ -296,6 +319,29 @@ describe('Api', () => {
});
});
+ describe('projectMilestones', () => {
+ it('fetches project milestones', done => {
+ const projectId = 1;
+ const options = { state: 'active' };
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/1/milestones`;
+ mock.onGet(expectedUrl).reply(200, [
+ {
+ id: 1,
+ title: 'milestone1',
+ state: 'active',
+ },
+ ]);
+
+ Api.projectMilestones(projectId, options)
+ .then(({ data }) => {
+ expect(data.length).toBe(1);
+ expect(data[0].title).toBe('milestone1');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
describe('newLabel', () => {
it('creates a new label', done => {
const namespace = 'some namespace';
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index ee427bc2154..a1b656876ed 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -10,7 +10,6 @@ describe('board_form.vue', () => {
const propsData = {
canAdminBoard: false,
labelsPath: `${gl.TEST_HOST}/labels/path`,
- milestonePath: `${gl.TEST_HOST}/milestone/path`,
};
const findModal = () => wrapper.find(DeprecatedModal);
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index b1ae86c2d3f..347ffaf672e 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -81,7 +81,6 @@ describe('BoardsSelector', () => {
assignee_id: null,
labels: [],
},
- milestonePath: `${TEST_HOST}/milestone/path`,
boardBaseUrl: `${TEST_HOST}/board/base/url`,
hasMissingBoards: false,
canAdminBoard: true,
diff --git a/spec/frontend/static_site_editor/mock_data.js b/spec/frontend/static_site_editor/mock_data.js
index 422048a5f69..96de9b73af0 100644
--- a/spec/frontend/static_site_editor/mock_data.js
+++ b/spec/frontend/static_site_editor/mock_data.js
@@ -10,6 +10,8 @@ export const sourceContentBody = `## On this page
- TOC
{:toc .hidden-md .hidden-lg}
+
+![image](path/to/image1.png)
`;
export const sourceContent = `${sourceContentHeader}${sourceContentSpacing}${sourceContentBody}`;
export const sourceContentTitle = 'Handbook';
@@ -48,3 +50,8 @@ export const createMergeRequestResponse = {
};
export const trackingCategory = 'projects:static_site_editor:show';
+
+export const images = new Map([
+ ['path/to/image1.png', 'image1-content'],
+ ['path/to/image2.png', 'image2-content'],
+]);
diff --git a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
index 3636de3fe70..a9169eb3e16 100644
--- a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
+++ b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
@@ -22,6 +22,7 @@ import {
sourcePath,
sourceContent as content,
trackingCategory,
+ images,
} from '../mock_data';
jest.mock('~/static_site_editor/services/generate_branch_name');
@@ -69,7 +70,7 @@ describe('submitContentChanges', () => {
});
it('commits the content changes to the branch when creating branch succeeds', () => {
- return submitContentChanges({ username, projectId, sourcePath, content }).then(() => {
+ return submitContentChanges({ username, projectId, sourcePath, content, images }).then(() => {
expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
branch,
commit_message: mergeRequestTitle,
@@ -79,6 +80,35 @@ describe('submitContentChanges', () => {
file_path: sourcePath,
content,
},
+ {
+ action: 'create',
+ content: 'image1-content',
+ encoding: 'base64',
+ file_path: 'path/to/image1.png',
+ },
+ ],
+ });
+ });
+ });
+
+ it('does not commit an image if it has been removed from the content', () => {
+ const contentWithoutImages = '## Content without images';
+ return submitContentChanges({
+ username,
+ projectId,
+ sourcePath,
+ content: contentWithoutImages,
+ images,
+ }).then(() => {
+ expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
+ branch,
+ commit_message: mergeRequestTitle,
+ actions: [
+ {
+ action: 'update',
+ file_path: sourcePath,
+ content: contentWithoutImages,
+ },
],
});
});
@@ -87,13 +117,13 @@ describe('submitContentChanges', () => {
it('notifies error when content could not be committed', () => {
Api.commitMultiple.mockRejectedValueOnce();
- return expect(submitContentChanges({ username, projectId })).rejects.toThrow(
+ return expect(submitContentChanges({ username, projectId, images })).rejects.toThrow(
SUBMIT_CHANGES_COMMIT_ERROR,
);
});
it('creates a merge request when commiting changes succeeds', () => {
- return submitContentChanges({ username, projectId, sourcePath, content }).then(() => {
+ return submitContentChanges({ username, projectId, sourcePath, content, images }).then(() => {
expect(Api.createProjectMergeRequest).toHaveBeenCalledWith(
projectId,
convertObjectPropsToSnakeCase({
@@ -108,7 +138,7 @@ describe('submitContentChanges', () => {
it('notifies error when merge request could not be created', () => {
Api.createProjectMergeRequest.mockRejectedValueOnce();
- return expect(submitContentChanges({ username, projectId })).rejects.toThrow(
+ return expect(submitContentChanges({ username, projectId, images })).rejects.toThrow(
SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
);
});
@@ -117,9 +147,11 @@ describe('submitContentChanges', () => {
let result;
beforeEach(() => {
- return submitContentChanges({ username, projectId, sourcePath, content }).then(_result => {
- result = _result;
- });
+ return submitContentChanges({ username, projectId, sourcePath, content, images }).then(
+ _result => {
+ result = _result;
+ },
+ );
});
it('returns the branch name', () => {
@@ -147,7 +179,7 @@ describe('submitContentChanges', () => {
describe('sends the correct tracking event', () => {
beforeEach(() => {
- return submitContentChanges({ username, projectId, sourcePath, content });
+ return submitContentChanges({ username, projectId, sourcePath, content, images });
});
it('for committing changes', () => {
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
index 436e1fd6815..c6e147899e4 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -69,11 +69,6 @@ describe('Suggestion Diff component', () => {
expect(addToBatchBtn.html().includes('Add suggestion to batch')).toBe(true);
});
- it('renders correct tooltip message for apply button', () => {
- createComponent();
- expect(wrapper.vm.tooltipMessage).toBe('This also resolves this thread');
- });
-
describe('when apply suggestion is clicked', () => {
beforeEach(() => {
createComponent();
@@ -232,11 +227,18 @@ describe('Suggestion Diff component', () => {
expect(findAddToBatchButton().exists()).toBe(false);
expect(findApplyButton().attributes('disabled')).toBe('true');
});
+ });
+
+ describe('tooltip message for apply button', () => {
+ it('renders correct tooltip message when button is applicable', () => {
+ createComponent();
+ expect(wrapper.vm.tooltipMessage).toBe('This also resolves this thread');
+ });
- it('renders correct tooltip message for apply button', () => {
- expect(wrapper.vm.tooltipMessage).toBe(
- "Can't apply as this line has changed or the suggestion already matches its content.",
- );
+ it('renders the inapplicable reason in the tooltip when button is not applicable', () => {
+ const inapplicableReason = 'lorem';
+ createComponent({ canApply: false, inapplicableReason });
+ expect(wrapper.vm.tooltipMessage).toBe(inapplicableReason);
});
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal_spec.js
index 6e2bf21b692..0c2ac53aa52 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal_spec.js
@@ -6,6 +6,7 @@ import { IMAGE_TABS } from '~/vue_shared/components/rich_content_editor/constant
describe('Add Image Modal', () => {
let wrapper;
+ const propsData = { imageRoot: 'path/to/root/' };
const findModal = () => wrapper.find(GlModal);
const findTabs = () => wrapper.find(GlTabs);
@@ -14,7 +15,10 @@ describe('Add Image Modal', () => {
const findDescriptionInput = () => wrapper.find({ ref: 'descriptionInput' });
beforeEach(() => {
- wrapper = shallowMount(AddImageModal, { provide: { glFeatures: { sseImageUploads: true } } });
+ wrapper = shallowMount(AddImageModal, {
+ provide: { glFeatures: { sseImageUploads: true } },
+ propsData,
+ });
});
describe('when content is loaded', () => {
@@ -44,9 +48,10 @@ describe('Add Image Modal', () => {
it('validates the file', () => {
const preventDefault = jest.fn();
const description = 'some description';
+ const file = { name: 'some_file.png' };
wrapper.vm.$refs.uploadImageTab = { validateFile: jest.fn() };
- wrapper.setData({ description, tabIndex: IMAGE_TABS.UPLOAD_TAB });
+ wrapper.setData({ file, description, tabIndex: IMAGE_TABS.UPLOAD_TAB });
findModal().vm.$emit('ok', { preventDefault });
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
index 01f20dd7299..b6ff6aa767c 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
@@ -28,12 +28,13 @@ describe('Rich Content Editor', () => {
let wrapper;
const content = '## Some Markdown';
+ const imageRoot = 'path/to/root/';
const findEditor = () => wrapper.find({ ref: 'editor' });
const findAddImageModal = () => wrapper.find(AddImageModal);
beforeEach(() => {
wrapper = shallowMount(RichContentEditor, {
- propsData: { content },
+ propsData: { content, imageRoot },
});
});
diff --git a/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap b/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap
index fe714924c2b..6beb5eab6db 100644
--- a/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap
+++ b/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap
@@ -112,7 +112,6 @@ exports[`WebIDE runs 1`] = `
class="gl-spinner-container"
>
<span
- aria-hidden="true"
aria-label="Loading"
class="align-text-bottom gl-spinner gl-spinner-orange gl-spinner-md"
/>
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index 2b820cd540c..cebf6235f44 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -101,6 +101,12 @@ RSpec.describe ClustersHelper do
end
end
+ describe '#has_multiple_clusters?' do
+ subject { helper.has_multiple_clusters? }
+
+ it { is_expected.to be_truthy }
+ end
+
describe '#cluster_type_label' do
subject { helper.cluster_type_label(cluster_type) }
diff --git a/spec/lib/gitlab/background_migration/mailers/unconfirm_mailer_spec.rb b/spec/lib/gitlab/background_migration/mailers/unconfirm_mailer_spec.rb
new file mode 100644
index 00000000000..f430009989b
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/mailers/unconfirm_mailer_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::Mailers::UnconfirmMailer do
+ let(:user) { User.new(id: 1111) }
+ let(:subject) { described_class.unconfirm_notification_email(user) }
+
+ it 'contains abuse report url' do
+ expect(subject.body.encoded).to include(Rails.application.routes.url_helpers.new_abuse_report_url(user_id: user.id))
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb b/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb
new file mode 100644
index 00000000000..7b1b1be1149
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb
@@ -0,0 +1,155 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer, schema: 20200615111857 do
+ let(:users) { table(:users) }
+ let(:emails) { table(:emails) }
+ let(:confirmed_at_2_days_ago) { 2.days.ago }
+ let(:confirmed_at_3_days_ago) { 3.days.ago }
+ let(:one_year_ago) { 1.year.ago }
+
+ let!(:user_needs_migration_1) { users.create!(name: 'user1', email: 'test1@test.com', state: 'active', projects_limit: 1, confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
+ let!(:user_needs_migration_2) { users.create!(name: 'user2', email: 'test2@test.com', state: 'active', projects_limit: 1, confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
+ let!(:user_does_not_need_migration) { users.create!(name: 'user3', email: 'test3@test.com', state: 'active', projects_limit: 1) }
+ let!(:inactive_user) { users.create!(name: 'user4', email: 'test4@test.com', state: 'blocked', projects_limit: 1, confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
+ let!(:alert_bot_user) { users.create!(name: 'user5', email: 'test5@test.com', state: 'active', user_type: 2, projects_limit: 1, confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
+
+ let!(:bad_email_1) { emails.create!(user_id: user_needs_migration_1.id, email: 'other1@test.com', confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
+ let!(:bad_email_2) { emails.create!(user_id: user_needs_migration_2.id, email: 'other2@test.com', confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
+ let!(:bad_email_3_inactive_user) { emails.create!(user_id: inactive_user.id, email: 'other-inactive@test.com', confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
+ let!(:bad_email_4_bot_user) { emails.create!(user_id: alert_bot_user.id, email: 'other-bot@test.com', confirmed_at: confirmed_at_3_days_ago, confirmation_sent_at: one_year_ago) }
+
+ let!(:good_email_1) { emails.create!(user_id: user_needs_migration_2.id, email: 'other3@test.com', confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
+ let!(:good_email_2) { emails.create!(user_id: user_needs_migration_2.id, email: 'other4@test.com', confirmed_at: nil) }
+ let!(:good_email_3) { emails.create!(user_id: user_does_not_need_migration.id, email: 'other5@test.com', confirmed_at: confirmed_at_2_days_ago, confirmation_sent_at: one_year_ago) }
+
+ subject do
+ email_ids = [bad_email_1, bad_email_2, good_email_1, good_email_2, good_email_3].map(&:id)
+
+ described_class.new.perform(email_ids.min, email_ids.max)
+ end
+
+ it 'does not change irrelevant email records' do
+ subject
+
+ expect(good_email_1.reload.confirmed_at).to be_within(1.second).of(confirmed_at_2_days_ago)
+ expect(good_email_2.reload.confirmed_at).to be_nil
+ expect(good_email_3.reload.confirmed_at).to be_within(1.second).of(confirmed_at_2_days_ago)
+
+ expect(bad_email_3_inactive_user.reload.confirmed_at).to be_within(1.second).of(confirmed_at_3_days_ago)
+ expect(bad_email_4_bot_user.reload.confirmed_at).to be_within(1.second).of(confirmed_at_3_days_ago)
+
+ expect(good_email_1.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
+ expect(good_email_2.reload.confirmation_sent_at).to be_nil
+ expect(good_email_3.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
+
+ expect(bad_email_3_inactive_user.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
+ expect(bad_email_4_bot_user.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
+ end
+
+ it 'does not change irrelevant user records' do
+ subject
+
+ expect(user_does_not_need_migration.reload.confirmed_at).to be_nil
+ expect(inactive_user.reload.confirmed_at).to be_within(1.second).of(confirmed_at_3_days_ago)
+ expect(alert_bot_user.reload.confirmed_at).to be_within(1.second).of(confirmed_at_3_days_ago)
+
+ expect(user_does_not_need_migration.reload.confirmation_sent_at).to be_nil
+ expect(inactive_user.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
+ expect(alert_bot_user.reload.confirmation_sent_at).to be_within(1.second).of(one_year_ago)
+ end
+
+ it 'updates confirmation_sent_at column' do
+ subject
+
+ expect(user_needs_migration_1.reload.confirmation_sent_at).to be_within(1.minute).of(Time.now)
+ expect(user_needs_migration_2.reload.confirmation_sent_at).to be_within(1.minute).of(Time.now)
+
+ expect(bad_email_1.reload.confirmation_sent_at).to be_within(1.minute).of(Time.now)
+ expect(bad_email_2.reload.confirmation_sent_at).to be_within(1.minute).of(Time.now)
+ end
+
+ it 'unconfirms bad email records' do
+ subject
+
+ expect(bad_email_1.reload.confirmed_at).to be_nil
+ expect(bad_email_2.reload.confirmed_at).to be_nil
+
+ expect(bad_email_1.reload.confirmation_token).not_to be_nil
+ expect(bad_email_2.reload.confirmation_token).not_to be_nil
+ end
+
+ it 'unconfirms user records' do
+ subject
+
+ expect(user_needs_migration_1.reload.confirmed_at).to be_nil
+ expect(user_needs_migration_2.reload.confirmed_at).to be_nil
+
+ expect(user_needs_migration_1.reload.confirmation_token).not_to be_nil
+ expect(user_needs_migration_2.reload.confirmation_token).not_to be_nil
+ end
+
+ context 'enqueued jobs' do
+ let(:user_1_gid) { User.find(user_needs_migration_1.id).to_gid.to_s }
+ let(:user_2_gid) { User.find(user_needs_migration_2.id).to_gid.to_s }
+
+ let(:email_1_gid) { Email.find(bad_email_1.id).to_gid.to_s }
+ let(:email_2_gid) { Email.find(bad_email_2.id).to_gid.to_s }
+
+ it 'enqueues the email confirmation and the unconfirm notification mailer jobs' do
+ subject
+
+ expect(enqueued_jobs.size).to eq(6)
+
+ expected_job_arguments = [
+ [
+ 'DeviseMailer',
+ 'confirmation_instructions',
+ 'deliver_now',
+ { "_aj_globalid" => email_1_gid },
+ bad_email_1.reload.confirmation_token
+ ],
+ [
+ 'DeviseMailer',
+ 'confirmation_instructions',
+ 'deliver_now',
+ { "_aj_globalid" => email_2_gid },
+ bad_email_2.reload.confirmation_token
+ ],
+ [
+ 'DeviseMailer',
+ 'confirmation_instructions',
+ 'deliver_now',
+ { "_aj_globalid" => user_1_gid },
+ user_needs_migration_1.reload.confirmation_token
+ ],
+ [
+ 'Gitlab::BackgroundMigration::Mailers::UnconfirmMailer',
+ 'unconfirm_notification_email',
+ 'deliver_now',
+ { "_aj_globalid" => user_1_gid }
+ ],
+ [
+ 'DeviseMailer',
+ 'confirmation_instructions',
+ 'deliver_now',
+ { "_aj_globalid" => user_2_gid },
+ user_needs_migration_2.reload.confirmation_token
+ ],
+ [
+ 'Gitlab::BackgroundMigration::Mailers::UnconfirmMailer',
+ 'unconfirm_notification_email',
+ 'deliver_now',
+ { "_aj_globalid" => user_2_gid }
+ ]
+ ]
+
+ all_job_arguments = enqueued_jobs.map { |job| job["arguments"] }
+
+ expected_job_arguments.each do |job_arguments|
+ expect(all_job_arguments).to include(job_arguments)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb
index 78fd6145154..a0540a9fbf5 100644
--- a/spec/lib/gitlab/danger/teammate_spec.rb
+++ b/spec/lib/gitlab/danger/teammate_spec.rb
@@ -178,6 +178,8 @@ RSpec.describe Gitlab::Danger::Teammate do
-10 | 2 | "12 hours behind `@mario`"
2 | 4 | "2 hours behind `@mario`"
4 | 2 | "2 hours ahead `@mario`"
+ 2 | 3 | "1 hour behind `@mario`"
+ 3 | 2 | "1 hour ahead `@mario`"
2 | 2 | "same timezone as `@mario`"
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 17ec2bbfdb1..a6af0da66e3 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -283,7 +283,6 @@ RSpec.describe Gitlab::Database do
describe '.bulk_insert' do
before do
allow(described_class).to receive(:connection).and_return(connection)
- allow(described_class).to receive(:version).and_return(version)
allow(connection).to receive(:quote_column_name, &:itself)
allow(connection).to receive(:quote, &:itself)
allow(connection).to receive(:execute)
@@ -298,8 +297,6 @@ RSpec.describe Gitlab::Database do
]
end
- let_it_be(:version) { 9.6 }
-
it 'does nothing with empty rows' do
expect(connection).not_to receive(:execute)
@@ -366,28 +363,13 @@ RSpec.describe Gitlab::Database do
expect(ids).to eq([10])
end
- context 'with version >= 9.5' do
- it 'allows setting the upsert to do nothing' do
- expect(connection)
- .to receive(:execute)
- .with(/ON CONFLICT DO NOTHING/)
-
- described_class
- .bulk_insert('test', [{ number: 10 }], on_conflict: :do_nothing)
- end
- end
-
- context 'with version < 9.5' do
- let(:version) { 9.4 }
-
- it 'refuses setting the upsert' do
- expect(connection)
- .not_to receive(:execute)
- .with(/ON CONFLICT/)
+ it 'allows setting the upsert to do nothing' do
+ expect(connection)
+ .to receive(:execute)
+ .with(/ON CONFLICT DO NOTHING/)
- described_class
- .bulk_insert('test', [{ number: 10 }], on_conflict: :do_nothing)
- end
+ described_class
+ .bulk_insert('test', [{ number: 10 }], on_conflict: :do_nothing)
end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb b/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
index 2466208e715..f3c8209e0b6 100644
--- a/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/service_selector_spec.rb
@@ -109,6 +109,46 @@ RSpec.describe Gitlab::Metrics::Dashboard::ServiceSelector do
it { is_expected.to be Metrics::Dashboard::TransientEmbedService }
end
+
+ context 'when cluster is provided' do
+ let(:arguments) { { cluster: "some cluster" } }
+
+ it { is_expected.to be Metrics::Dashboard::ClusterDashboardService }
+ end
+
+ context 'when cluster is provided and embedded is not true' do
+ let(:arguments) { { cluster: "some cluster", embedded: 'false' } }
+
+ it { is_expected.to be Metrics::Dashboard::ClusterDashboardService }
+ end
+
+ context 'when cluster dashboard_path is provided' do
+ let(:arguments) { { dashboard_path: ::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH } }
+
+ it { is_expected.to be Metrics::Dashboard::ClusterDashboardService }
+ end
+
+ context 'when cluster is provided and embed params' do
+ let(:arguments) do
+ {
+ cluster: "some cluster",
+ embedded: 'true',
+ cluster_type: 'project',
+ format: :json,
+ group: 'Food metrics',
+ title: 'Pizza Consumption',
+ y_label: 'Slice Count'
+ }
+ end
+
+ it { is_expected.to be Metrics::Dashboard::ClusterMetricsEmbedService }
+ end
+
+ context 'when metrics embed is for an alert' do
+ let(:arguments) { { embedded: true, prometheus_alert_id: 5 } }
+
+ it { is_expected.to be Metrics::Dashboard::GitlabAlertEmbedService }
+ end
end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
index afb65f0b6e4..56556423b05 100644
--- a/spec/lib/gitlab/metrics/dashboard/url_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
@@ -46,6 +46,35 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
end
end
+ describe '#clusters_regex' do
+ let(:url) do
+ Gitlab::Routing.url_helpers.namespace_project_cluster_url(
+ 'foo',
+ 'bar',
+ '1',
+ group: 'Cluster Health',
+ title: 'Memory Usage',
+ y_label: 'Memory 20(GiB)',
+ anchor: 'title'
+ )
+ end
+
+ let(:expected_params) do
+ {
+ 'url' => url,
+ 'namespace' => 'foo',
+ 'project' => 'bar',
+ 'cluster_id' => '1',
+ 'query' => '?group=Cluster+Health&title=Memory+Usage&y_label=Memory+20%28GiB%29',
+ 'anchor' => '#title'
+ }
+ end
+
+ subject { described_class.clusters_regex }
+
+ it_behaves_like 'regex which matches url when expected'
+ end
+
describe '#grafana_regex' do
let(:url) do
namespace_project_grafana_api_metrics_dashboard_url(
diff --git a/spec/lib/gitlab/usage_data/topology_spec.rb b/spec/lib/gitlab/usage_data/topology_spec.rb
index 8bd313de14e..002956d726c 100644
--- a/spec/lib/gitlab/usage_data/topology_spec.rb
+++ b/spec/lib/gitlab/usage_data/topology_spec.rb
@@ -28,7 +28,8 @@ RSpec.describe Gitlab::UsageData::Topology do
receive_node_service_memory_rss_query,
receive_node_service_memory_uss_query,
receive_node_service_memory_pss_query,
- receive_node_service_process_count_query
+ receive_node_service_process_count_query,
+ receive_node_service_app_server_workers_query
)
expect(subject[:topology]).to eq({
@@ -45,7 +46,8 @@ RSpec.describe Gitlab::UsageData::Topology do
process_count: 10,
process_memory_rss: 300,
process_memory_uss: 301,
- process_memory_pss: 302
+ process_memory_pss: 302,
+ server: 'puma'
},
{
name: 'sidekiq',
@@ -68,6 +70,10 @@ RSpec.describe Gitlab::UsageData::Topology do
name: 'redis',
process_count: 1,
process_memory_rss: 402
+ },
+ {
+ name: 'web',
+ server: 'unicorn'
}
]
}
@@ -85,7 +91,8 @@ RSpec.describe Gitlab::UsageData::Topology do
receive_node_service_memory_rss_query(result: []),
receive_node_service_memory_uss_query(result: []),
receive_node_service_memory_pss_query,
- receive_node_service_process_count_query
+ receive_node_service_process_count_query,
+ receive_node_service_app_server_workers_query(result: [])
)
expect(subject[:topology]).to eq({
@@ -94,7 +101,8 @@ RSpec.describe Gitlab::UsageData::Topology do
{ 'app_requests' => 'empty_result' },
{ 'node_memory' => 'empty_result' },
{ 'service_rss' => 'empty_result' },
- { 'service_uss' => 'empty_result' }
+ { 'service_uss' => 'empty_result' },
+ { 'service_workers' => 'empty_result' }
],
nodes: [
{
@@ -145,7 +153,8 @@ RSpec.describe Gitlab::UsageData::Topology do
{ 'service_rss' => 'Gitlab::PrometheusClient::ConnectionError' },
{ 'service_uss' => 'Gitlab::PrometheusClient::ConnectionError' },
{ 'service_pss' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_process_count' => 'Gitlab::PrometheusClient::ConnectionError' }
+ { 'service_process_count' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'service_workers' => 'Gitlab::PrometheusClient::ConnectionError' }
],
nodes: []
})
@@ -298,4 +307,21 @@ RSpec.describe Gitlab::UsageData::Topology do
}
])
end
+
+ def receive_node_service_app_server_workers_query(result: nil)
+ receive(:query)
+ .with(/app_server_workers/, an_instance_of(Hash))
+ .and_return(result || [
+ # instance 1
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails', 'server' => 'puma' },
+ 'value' => [1000, '2']
+ },
+ # instance 2
+ {
+ 'metric' => { 'instance' => 'instance2:8080', 'job' => 'gitlab-rails', 'server' => 'unicorn' },
+ 'value' => [1000, '1']
+ }
+ ])
+ end
end
diff --git a/spec/migrations/unconfirm_wrongfully_verified_emails_spec.rb b/spec/migrations/unconfirm_wrongfully_verified_emails_spec.rb
new file mode 100644
index 00000000000..e93f2cb64de
--- /dev/null
+++ b/spec/migrations/unconfirm_wrongfully_verified_emails_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200615111857_unconfirm_wrongfully_verified_emails.rb')
+
+RSpec.describe UnconfirmWrongfullyVerifiedEmails do
+ before do
+ user = table(:users).create!(name: 'user1', email: 'test1@test.com', projects_limit: 1)
+ table(:emails).create!(email: 'test2@test.com', user_id: user.id)
+ end
+
+ it 'enqueues WrongullyConfirmedEmailUnconfirmer job' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ jobs = BackgroundMigrationWorker.jobs
+ expect(jobs.size).to eq(1)
+ expect(jobs.first["args"].first).to eq(Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer.name.demodulize)
+ end
+ end
+end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 0f476e68698..4807957152c 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
subject { build(:cluster) }
+ it { is_expected.to include_module(HasEnvironmentScope) }
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:management_project).class_name('::Project') }
it { is_expected.to have_many(:cluster_projects) }
@@ -289,6 +290,79 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
describe 'validations' do
subject { cluster.valid? }
+ context 'when validates unique_environment_scope' do
+ context 'for a project cluster' do
+ let(:project) { create(:project) }
+
+ before do
+ create(:cluster, projects: [project], environment_scope: 'product/*')
+ end
+
+ context 'when identical environment scope exists in project' do
+ let(:cluster) { build(:cluster, projects: [project], environment_scope: 'product/*') }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when identical environment scope does not exist in project' do
+ let(:cluster) { build(:cluster, projects: [project], environment_scope: '*') }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when identical environment scope exists in different project' do
+ let(:project2) { create(:project) }
+ let(:cluster) { build(:cluster, projects: [project2], environment_scope: 'product/*') }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'for a group cluster' do
+ let(:group) { create(:group) }
+
+ before do
+ create(:cluster, cluster_type: :group_type, groups: [group], environment_scope: 'product/*')
+ end
+
+ context 'when identical environment scope exists in group' do
+ let(:cluster) { build(:cluster, cluster_type: :group_type, groups: [group], environment_scope: 'product/*') }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when identical environment scope does not exist in group' do
+ let(:cluster) { build(:cluster, cluster_type: :group_type, groups: [group], environment_scope: '*') }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when identical environment scope exists in different group' do
+ let(:cluster) { build(:cluster, :group, environment_scope: 'product/*') }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'for an instance cluster' do
+ before do
+ create(:cluster, :instance, environment_scope: 'product/*')
+ end
+
+ context 'identical environment scope exists' do
+ let(:cluster) { build(:cluster, :instance, environment_scope: 'product/*') }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'identical environment scope does not exist' do
+ let(:cluster) { build(:cluster, :instance, environment_scope: '*') }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
context 'when validates name' do
context 'when provided by user' do
let!(:cluster) { build(:cluster, :provided_by_user, name: name) }
diff --git a/spec/models/concerns/deployment_platform_spec.rb b/spec/models/concerns/deployment_platform_spec.rb
index b5b7efa0c47..2bb6aa27e21 100644
--- a/spec/models/concerns/deployment_platform_spec.rb
+++ b/spec/models/concerns/deployment_platform_spec.rb
@@ -8,6 +8,241 @@ RSpec.describe DeploymentPlatform do
describe '#deployment_platform' do
subject { project.deployment_platform }
+ context 'multiple clusters' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+
+ shared_examples 'matching environment scope' do
+ it 'returns environment specific cluster' do
+ is_expected.to eq(cluster.platform_kubernetes)
+ end
+ end
+
+ shared_examples 'not matching environment scope' do
+ it 'returns default cluster' do
+ is_expected.to eq(default_cluster.platform_kubernetes)
+ end
+ end
+
+ context 'multiple clusters use the same management project' do
+ let(:management_project) { create(:project, group: group) }
+
+ let!(:default_cluster) do
+ create(:cluster_for_group, groups: [group], environment_scope: '*', management_project: management_project)
+ end
+
+ let!(:cluster) do
+ create(:cluster_for_group, groups: [group], environment_scope: 'review/*', management_project: management_project)
+ end
+
+ let(:environment) { 'review/name' }
+
+ subject { management_project.deployment_platform(environment: environment) }
+
+ it_behaves_like 'matching environment scope'
+ end
+
+ context 'when project does not have a cluster but has group clusters' do
+ let!(:default_cluster) do
+ create(:cluster, :provided_by_user,
+ cluster_type: :group_type, groups: [group], environment_scope: '*')
+ end
+
+ let!(:cluster) do
+ create(:cluster, :provided_by_user,
+ cluster_type: :group_type, environment_scope: 'review/*', groups: [group])
+ end
+
+ let(:environment) { 'review/name' }
+
+ subject { project.deployment_platform(environment: environment) }
+
+ context 'when environment scope is exactly matched' do
+ before do
+ cluster.update!(environment_scope: 'review/name')
+ end
+
+ it_behaves_like 'matching environment scope'
+ end
+
+ context 'when environment scope is matched by wildcard' do
+ before do
+ cluster.update!(environment_scope: 'review/*')
+ end
+
+ it_behaves_like 'matching environment scope'
+ end
+
+ context 'when environment scope does not match' do
+ before do
+ cluster.update!(environment_scope: 'review/*/special')
+ end
+
+ it_behaves_like 'not matching environment scope'
+ end
+
+ context 'when group belongs to a parent group' do
+ let(:parent_group) { create(:group) }
+ let(:group) { create(:group, parent: parent_group) }
+
+ context 'when parent_group has a cluster with default scope' do
+ let!(:parent_group_cluster) do
+ create(:cluster, :provided_by_user,
+ cluster_type: :group_type, environment_scope: '*', groups: [parent_group])
+ end
+
+ it_behaves_like 'matching environment scope'
+ end
+
+ context 'when parent_group has a cluster that is an exact match' do
+ let!(:parent_group_cluster) do
+ create(:cluster, :provided_by_user,
+ cluster_type: :group_type, environment_scope: 'review/name', groups: [parent_group])
+ end
+
+ it_behaves_like 'matching environment scope'
+ end
+ end
+ end
+
+ context 'with instance clusters' do
+ let!(:default_cluster) do
+ create(:cluster, :provided_by_user, :instance, environment_scope: '*')
+ end
+
+ let!(:cluster) do
+ create(:cluster, :provided_by_user, :instance, environment_scope: 'review/*')
+ end
+
+ let(:environment) { 'review/name' }
+
+ subject { project.deployment_platform(environment: environment) }
+
+ context 'when environment scope is exactly matched' do
+ before do
+ cluster.update!(environment_scope: 'review/name')
+ end
+
+ it_behaves_like 'matching environment scope'
+ end
+
+ context 'when environment scope is matched by wildcard' do
+ before do
+ cluster.update!(environment_scope: 'review/*')
+ end
+
+ it_behaves_like 'matching environment scope'
+ end
+
+ context 'when environment scope does not match' do
+ before do
+ cluster.update!(environment_scope: 'review/*/special')
+ end
+
+ it_behaves_like 'not matching environment scope'
+ end
+ end
+
+ context 'when environment is specified' do
+ let!(:default_cluster) { create(:cluster, :provided_by_user, projects: [project], environment_scope: '*') }
+ let!(:cluster) { create(:cluster, :provided_by_user, environment_scope: 'review/*', projects: [project]) }
+
+ let!(:group_default_cluster) do
+ create(:cluster, :provided_by_user,
+ cluster_type: :group_type, groups: [group], environment_scope: '*')
+ end
+
+ let(:environment) { 'review/name' }
+
+ subject { project.deployment_platform(environment: environment) }
+
+ context 'when environment scope is exactly matched' do
+ before do
+ cluster.update!(environment_scope: 'review/name')
+ end
+
+ it_behaves_like 'matching environment scope'
+ end
+
+ context 'when environment scope is matched by wildcard' do
+ before do
+ cluster.update!(environment_scope: 'review/*')
+ end
+
+ it_behaves_like 'matching environment scope'
+ end
+
+ context 'when environment scope does not match' do
+ before do
+ cluster.update!(environment_scope: 'review/*/special')
+ end
+
+ it_behaves_like 'not matching environment scope'
+ end
+
+ context 'when environment scope has _' do
+ it 'does not treat it as wildcard' do
+ cluster.update!(environment_scope: 'foo_bar/*')
+
+ is_expected.to eq(default_cluster.platform_kubernetes)
+ end
+
+ context 'when environment name contains an underscore' do
+ let(:environment) { 'foo_bar/test' }
+
+ it 'matches literally for _' do
+ cluster.update!(environment_scope: 'foo_bar/*')
+
+ is_expected.to eq(cluster.platform_kubernetes)
+ end
+ end
+ end
+
+ # The environment name and scope cannot have % at the moment,
+ # but we're considering relaxing it and we should also make sure
+ # it doesn't break in case some data sneaked in somehow as we're
+ # not checking this integrity in database level.
+ context 'when environment scope has %' do
+ it 'does not treat it as wildcard' do
+ cluster.update_attribute(:environment_scope, '*%*')
+
+ is_expected.to eq(default_cluster.platform_kubernetes)
+ end
+
+ context 'when environment name contains a percent char' do
+ let(:environment) { 'foo%bar/test' }
+
+ it 'matches literally for %' do
+ cluster.update_attribute(:environment_scope, 'foo%bar/*')
+
+ is_expected.to eq(cluster.platform_kubernetes)
+ end
+ end
+ end
+
+ context 'when perfectly matched cluster exists' do
+ let!(:perfectly_matched_cluster) { create(:cluster, :provided_by_user, projects: [project], environment_scope: 'review/name') }
+
+ it 'returns perfectly matched cluster as highest precedence' do
+ is_expected.to eq(perfectly_matched_cluster.platform_kubernetes)
+ end
+ end
+ end
+
+ context 'with multiple clusters and multiple environments' do
+ let!(:cluster_1) { create(:cluster, :provided_by_user, projects: [project], environment_scope: 'staging/*') }
+ let!(:cluster_2) { create(:cluster, :provided_by_user, projects: [project], environment_scope: 'test/*') }
+
+ let(:environment_1) { 'staging/name' }
+ let(:environment_2) { 'test/name' }
+
+ it 'returns the appropriate cluster' do
+ expect(project.deployment_platform(environment: environment_1)).to eq(cluster_1.platform_kubernetes)
+ expect(project.deployment_platform(environment: environment_2)).to eq(cluster_2.platform_kubernetes)
+ end
+ end
+ end
+
context 'with no Kubernetes configuration on CI/CD, no Kubernetes Service' do
it { is_expected.to be_nil }
end
diff --git a/spec/models/packages/composer/metadatum_spec.rb b/spec/models/packages/composer/metadatum_spec.rb
new file mode 100644
index 00000000000..ae53532696b
--- /dev/null
+++ b/spec/models/packages/composer/metadatum_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Composer::Metadatum, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:package) }
+ it { is_expected.to validate_presence_of(:target_sha) }
+ it { is_expected.to validate_presence_of(:composer_json) }
+ end
+end
diff --git a/spec/models/packages/conan/file_metadatum_spec.rb b/spec/models/packages/conan/file_metadatum_spec.rb
new file mode 100644
index 00000000000..a66a2813196
--- /dev/null
+++ b/spec/models/packages/conan/file_metadatum_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Conan::FileMetadatum, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package_file) }
+ end
+
+ describe 'validations' do
+ let(:package_file) { create(:conan_package_file, :conan_recipe_file) }
+
+ it { is_expected.to validate_presence_of(:package_file) }
+ it { is_expected.to validate_presence_of(:recipe_revision) }
+
+ describe '#recipe_revision' do
+ it { is_expected.to allow_value("0").for(:recipe_revision) }
+ it { is_expected.not_to allow_value(nil).for(:recipe_revision) }
+ end
+
+ describe '#package_revision_for_package_file' do
+ context 'recipe file' do
+ let(:conan_file_metadatum) { build(:conan_file_metadatum, :recipe_file, package_file: package_file) }
+
+ it 'is valid with empty value' do
+ conan_file_metadatum.package_revision = nil
+
+ expect(conan_file_metadatum).to be_valid
+ end
+
+ it 'is invalid with value' do
+ conan_file_metadatum.package_revision = '0'
+
+ expect(conan_file_metadatum).to be_invalid
+ end
+ end
+
+ context 'package file' do
+ let(:conan_file_metadatum) { build(:conan_file_metadatum, :package_file, package_file: package_file) }
+
+ it 'is valid with default value' do
+ conan_file_metadatum.package_revision = '0'
+
+ expect(conan_file_metadatum).to be_valid
+ end
+
+ it 'is invalid with non-default value' do
+ conan_file_metadatum.package_revision = 'foo'
+
+ expect(conan_file_metadatum).to be_invalid
+ end
+ end
+ end
+
+ describe '#conan_package_reference_for_package_file' do
+ context 'recipe file' do
+ let(:conan_file_metadatum) { build(:conan_file_metadatum, :recipe_file, package_file: package_file) }
+
+ it 'is valid with empty value' do
+ conan_file_metadatum.conan_package_reference = nil
+
+ expect(conan_file_metadatum).to be_valid
+ end
+
+ it 'is invalid with value' do
+ conan_file_metadatum.conan_package_reference = '123456789'
+
+ expect(conan_file_metadatum).to be_invalid
+ end
+ end
+
+ context 'package file' do
+ let(:conan_file_metadatum) { build(:conan_file_metadatum, :package_file, package_file: package_file) }
+
+ it 'is valid with acceptable value' do
+ conan_file_metadatum.conan_package_reference = '123456asdf'
+
+ expect(conan_file_metadatum).to be_valid
+ end
+
+ it 'is invalid with invalid value' do
+ conan_file_metadatum.conan_package_reference = 'foo@bar'
+
+ expect(conan_file_metadatum).to be_invalid
+ end
+
+ it 'is invalid when nil' do
+ conan_file_metadatum.conan_package_reference = nil
+
+ expect(conan_file_metadatum).to be_invalid
+ end
+ end
+ end
+
+ describe '#conan_package_type' do
+ it 'validates package of type conan' do
+ package = build('package')
+ package_file = build('package_file', package: package)
+ conan_file_metadatum = build('conan_file_metadatum', package_file: package_file)
+
+ expect(conan_file_metadatum).not_to be_valid
+ expect(conan_file_metadatum.errors.to_a).to contain_exactly('Package type must be Conan')
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/conan/metadatum_spec.rb b/spec/models/packages/conan/metadatum_spec.rb
new file mode 100644
index 00000000000..112f395818b
--- /dev/null
+++ b/spec/models/packages/conan/metadatum_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Conan::Metadatum, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package) }
+ end
+
+ describe 'validations' do
+ let(:fifty_one_characters) { 'f_a' * 17}
+
+ it { is_expected.to validate_presence_of(:package) }
+ it { is_expected.to validate_presence_of(:package_username) }
+ it { is_expected.to validate_presence_of(:package_channel) }
+
+ describe '#package_username' do
+ it { is_expected.to allow_value("my-package+username").for(:package_username) }
+ it { is_expected.to allow_value("my_package.username").for(:package_username) }
+ it { is_expected.to allow_value("_my-package.username123").for(:package_username) }
+ it { is_expected.to allow_value("my").for(:package_username) }
+ it { is_expected.not_to allow_value('+my_package').for(:package_username) }
+ it { is_expected.not_to allow_value('.my_package').for(:package_username) }
+ it { is_expected.not_to allow_value('-my_package').for(:package_username) }
+ it { is_expected.not_to allow_value('m').for(:package_username) }
+ it { is_expected.not_to allow_value(fifty_one_characters).for(:package_username) }
+ it { is_expected.not_to allow_value("my/package").for(:package_username) }
+ it { is_expected.not_to allow_value("my(package)").for(:package_username) }
+ it { is_expected.not_to allow_value("my@package").for(:package_username) }
+ end
+
+ describe '#package_channel' do
+ it { is_expected.to allow_value("beta").for(:package_channel) }
+ it { is_expected.to allow_value("stable+1.0").for(:package_channel) }
+ it { is_expected.to allow_value("my").for(:package_channel) }
+ it { is_expected.to allow_value("my_channel.beta").for(:package_channel) }
+ it { is_expected.to allow_value("_my-channel.beta123").for(:package_channel) }
+ it { is_expected.not_to allow_value('+my_channel').for(:package_channel) }
+ it { is_expected.not_to allow_value('.my_channel').for(:package_channel) }
+ it { is_expected.not_to allow_value('-my_channel').for(:package_channel) }
+ it { is_expected.not_to allow_value('m').for(:package_channel) }
+ it { is_expected.not_to allow_value(fifty_one_characters).for(:package_channel) }
+ it { is_expected.not_to allow_value("my/channel").for(:package_channel) }
+ it { is_expected.not_to allow_value("my(channel)").for(:package_channel) }
+ it { is_expected.not_to allow_value("my@channel").for(:package_channel) }
+ end
+
+ describe '#conan_package_type' do
+ it 'will not allow a package with a different package_type' do
+ package = build('package')
+ conan_metadatum = build('conan_metadatum', package: package)
+
+ expect(conan_metadatum).not_to be_valid
+ expect(conan_metadatum.errors.to_a).to include('Package type must be Conan')
+ end
+ end
+ end
+
+ describe '#recipe' do
+ let(:package) { create(:conan_package) }
+
+ it 'returns the recipe' do
+ expect(package.conan_recipe).to eq("#{package.name}/#{package.version}@#{package.conan_metadatum.package_username}/#{package.conan_metadatum.package_channel}")
+ end
+ end
+
+ describe '#recipe_url' do
+ let(:package) { create(:conan_package) }
+
+ it 'returns the recipe url' do
+ expect(package.conan_recipe_path).to eq("#{package.name}/#{package.version}/#{package.conan_metadatum.package_username}/#{package.conan_metadatum.package_channel}")
+ end
+ end
+
+ describe '.package_username_from' do
+ let(:full_path) { 'foo/bar/baz-buz' }
+
+ it 'returns the username formatted package path' do
+ expect(described_class.package_username_from(full_path: full_path)).to eq('foo+bar+baz-buz')
+ end
+ end
+
+ describe '.full_path_from' do
+ let(:username) { 'foo+bar+baz-buz' }
+
+ it 'returns the username formatted package path' do
+ expect(described_class.full_path_from(package_username: username)).to eq('foo/bar/baz-buz')
+ end
+ end
+end
diff --git a/spec/models/packages/dependency_link_spec.rb b/spec/models/packages/dependency_link_spec.rb
new file mode 100644
index 00000000000..d8fde8f5eb3
--- /dev/null
+++ b/spec/models/packages/dependency_link_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::DependencyLink, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package).inverse_of(:dependency_links) }
+ it { is_expected.to belong_to(:dependency).inverse_of(:dependency_links) }
+ it { is_expected.to have_one(:nuget_metadatum).inverse_of(:dependency_link) }
+ end
+
+ describe 'validations' do
+ subject { create(:packages_dependency_link) }
+
+ it { is_expected.to validate_presence_of(:package) }
+ it { is_expected.to validate_presence_of(:dependency) }
+
+ context 'package_id and package_dependency_id uniqueness for dependency_type' do
+ it 'is not valid' do
+ exisiting_link = subject
+ link = build(
+ :packages_dependency_link,
+ package: exisiting_link.package,
+ dependency: exisiting_link.dependency,
+ dependency_type: exisiting_link.dependency_type
+ )
+
+ expect(link).not_to be_valid
+ expect(link.errors.to_a).to include("Dependency type has already been taken")
+ end
+ end
+ end
+
+ context 'with multiple links' do
+ let_it_be(:link1) { create(:packages_dependency_link) }
+ let_it_be(:link2) { create(:packages_dependency_link, dependency: link1.dependency, dependency_type: :devDependencies) }
+ let_it_be(:link3) { create(:packages_dependency_link, dependency: link1.dependency, dependency_type: :bundleDependencies) }
+
+ subject { described_class }
+
+ describe '.with_dependency_type' do
+ it 'returns links of the given type' do
+ expect(subject.with_dependency_type(:bundleDependencies)).to eq([link3])
+ end
+ end
+
+ describe '.for_package' do
+ let_it_be(:link1) { create(:packages_dependency_link) }
+ let_it_be(:link2) { create(:packages_dependency_link, dependency: link1.dependency, dependency_type: :devDependencies) }
+ let_it_be(:link3) { create(:packages_dependency_link, dependency: link1.dependency, dependency_type: :bundleDependencies) }
+
+ it 'returns the link for the given package' do
+ expect(subject.for_package(link1.package)).to eq([link1])
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/dependency_spec.rb b/spec/models/packages/dependency_spec.rb
new file mode 100644
index 00000000000..fa6b0fd1848
--- /dev/null
+++ b/spec/models/packages/dependency_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Dependency, type: :model do
+ describe 'relationships' do
+ it { is_expected.to have_many(:dependency_links) }
+ end
+
+ describe 'validations' do
+ subject { create(:packages_dependency) }
+
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_presence_of(:version_pattern) }
+ it { is_expected.to validate_uniqueness_of(:name).scoped_to(:version_pattern) }
+ end
+
+ describe '.ids_for_package_names_and_version_patterns' do
+ let_it_be(:package_dependency1) { create(:packages_dependency, name: 'foo', version_pattern: '~1.0.0') }
+ let_it_be(:package_dependency2) { create(:packages_dependency, name: 'bar', version_pattern: '~2.5.0') }
+ let_it_be(:expected_ids) { [package_dependency1.id, package_dependency2.id] }
+ let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2) }
+ let(:chunk_size) { 50 }
+ let(:rows_limit) { 50 }
+
+ subject { Packages::Dependency.ids_for_package_names_and_version_patterns(names_and_version_patterns, chunk_size, rows_limit) }
+
+ it { is_expected.to match_array(expected_ids) }
+
+ context 'with unknown names' do
+ let(:names_and_version_patterns) { { unknown: '~1.0.0' } }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'with unknown version patterns' do
+ let(:names_and_version_patterns) { { 'foo' => '~1.0.0beta' } }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'with a name bigger than column size' do
+ let_it_be(:big_name) { 'a' * (Packages::Dependency::MAX_STRING_LENGTH + 1) }
+ let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2).merge(big_name => '~1.0.0') }
+
+ it { is_expected.to match_array(expected_ids) }
+ end
+
+ context 'with a version pattern bigger than column size' do
+ let_it_be(:big_version_pattern) { 'a' * (Packages::Dependency::MAX_STRING_LENGTH + 1) }
+ let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2).merge('test' => big_version_pattern) }
+
+ it { is_expected.to match_array(expected_ids) }
+ end
+
+ context 'with too big parameter' do
+ let(:size) { (Packages::Dependency::MAX_CHUNKED_QUERIES_COUNT * chunk_size) + 1 }
+ let(:names_and_version_patterns) { Hash[(1..size).map { |v| [v, v] }] }
+
+ it { expect { subject }.to raise_error(ArgumentError, 'Too many names_and_version_patterns') }
+ end
+
+ context 'with parameters size' do
+ let_it_be(:package_dependency3) { create(:packages_dependency, name: 'foo3', version_pattern: '~1.5.3') }
+ let_it_be(:package_dependency4) { create(:packages_dependency, name: 'foo4', version_pattern: '~1.5.4') }
+ let_it_be(:package_dependency5) { create(:packages_dependency, name: 'foo5', version_pattern: '~1.5.5') }
+ let_it_be(:package_dependency6) { create(:packages_dependency, name: 'foo6', version_pattern: '~1.5.6') }
+ let_it_be(:package_dependency7) { create(:packages_dependency, name: 'foo7', version_pattern: '~1.5.7') }
+ let(:expected_ids) { [package_dependency1.id, package_dependency2.id, package_dependency3.id, package_dependency4.id, package_dependency5.id, package_dependency6.id, package_dependency7.id] }
+ let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2, package_dependency3, package_dependency4, package_dependency5, package_dependency6, package_dependency7) }
+
+ context 'above the chunk size' do
+ let(:chunk_size) { 2 }
+
+ it { is_expected.to match_array(expected_ids) }
+ end
+
+ context 'selecting too many rows' do
+ let(:rows_limit) { 2 }
+
+ it { expect { subject }.to raise_error(ArgumentError, 'Too many Dependencies selected') }
+ end
+ end
+ end
+
+ describe '.for_package_names_and_version_patterns' do
+ let_it_be(:package_dependency1) { create(:packages_dependency, name: 'foo', version_pattern: '~1.0.0') }
+ let_it_be(:package_dependency2) { create(:packages_dependency, name: 'bar', version_pattern: '~2.5.0') }
+ let_it_be(:expected_array) { [package_dependency1, package_dependency2] }
+ let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2) }
+
+ subject { Packages::Dependency.for_package_names_and_version_patterns(names_and_version_patterns) }
+
+ it { is_expected.to match_array(expected_array) }
+
+ context 'with unknown names' do
+ let(:names_and_version_patterns) { { unknown: '~1.0.0' } }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'with unknown version patterns' do
+ let(:names_and_version_patterns) { { 'foo' => '~1.0.0beta' } }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ def build_names_and_version_patterns(*package_dependencies)
+ result = Hash.new { |h, dependency| h[dependency.name] = dependency.version_pattern }
+ package_dependencies.each { |dependency| result[dependency] }
+ result
+ end
+end
diff --git a/spec/models/packages/go/module_spec.rb b/spec/models/packages/go/module_spec.rb
new file mode 100644
index 00000000000..03af4cf4b70
--- /dev/null
+++ b/spec/models/packages/go/module_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Go::Module, type: :model do
+ before do
+ stub_feature_flags(go_proxy_disable_gomod_validation: false)
+ end
+
+ describe '#path_valid?' do
+ context 'with root path' do
+ let_it_be(:package) { create(:go_module) }
+
+ context 'with major version 0' do
+ it('returns true') { expect(package.path_valid?(0)).to eq(true) }
+ end
+
+ context 'with major version 1' do
+ it('returns true') { expect(package.path_valid?(1)).to eq(true) }
+ end
+
+ context 'with major version 2' do
+ it('returns false') { expect(package.path_valid?(2)).to eq(false) }
+ end
+ end
+
+ context 'with path ./v2' do
+ let_it_be(:package) { create(:go_module, path: '/v2') }
+
+ context 'with major version 0' do
+ it('returns false') { expect(package.path_valid?(0)).to eq(false) }
+ end
+
+ context 'with major version 1' do
+ it('returns false') { expect(package.path_valid?(1)).to eq(false) }
+ end
+
+ context 'with major version 2' do
+ it('returns true') { expect(package.path_valid?(2)).to eq(true) }
+ end
+ end
+ end
+
+ describe '#gomod_valid?' do
+ let_it_be(:package) { create(:go_module) }
+
+ context 'with good gomod' do
+ it('returns true') { expect(package.gomod_valid?("module #{package.name}")).to eq(true) }
+ end
+
+ context 'with bad gomod' do
+ it('returns false') { expect(package.gomod_valid?("module #{package.name}/v2")).to eq(false) }
+ end
+
+ context 'with empty gomod' do
+ it('returns false') { expect(package.gomod_valid?("")).to eq(false) }
+ end
+ end
+end
diff --git a/spec/models/packages/go/module_version_spec.rb b/spec/models/packages/go/module_version_spec.rb
new file mode 100644
index 00000000000..c4c6a07d9e9
--- /dev/null
+++ b/spec/models/packages/go/module_version_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Go::ModuleVersion, type: :model do
+ let_it_be(:user) { create :user }
+ let_it_be(:project) { create :project_empty_repo, creator: user, path: 'my-go-lib' }
+ let_it_be(:mod) { create :go_module, project: project }
+
+ before :all do
+ create :go_module_commit, :files, project: project, tag: 'v1.0.0', files: { 'README.md' => 'Hi' }
+ create :go_module_commit, :module, project: project, tag: 'v1.0.1'
+ create :go_module_commit, :package, project: project, tag: 'v1.0.2', path: 'pkg'
+ create :go_module_commit, :module, project: project, tag: 'v1.0.3', name: 'mod'
+ create :go_module_commit, :files, project: project, files: { 'y.go' => "package a\n" }
+ create :go_module_commit, :module, project: project, name: 'v2'
+ create :go_module_commit, :files, project: project, tag: 'v2.0.0', files: { 'v2/x.go' => "package a\n" }
+ end
+
+ shared_examples '#files' do |desc, *entries|
+ it "returns #{desc}" do
+ actual = version.files.map { |x| x }.to_set
+ expect(actual).to eq(entries.to_set)
+ end
+ end
+
+ shared_examples '#archive' do |desc, *entries|
+ it "returns an archive of #{desc}" do
+ expected = entries.map { |e| "#{version.full_name}/#{e}" }.to_set
+
+ actual = Set[]
+ Zip::InputStream.open(StringIO.new(version.archive.string)) do |zip|
+ while (entry = zip.get_next_entry)
+ actual.add(entry.name)
+ end
+ end
+
+ expect(actual).to eq(expected)
+ end
+ end
+
+ describe '#name' do
+ context 'with ref and name specified' do
+ let_it_be(:version) { create :go_module_version, mod: mod, name: 'foobar', commit: project.repository.head_commit, ref: project.repository.find_tag('v1.0.0') }
+ it('returns that name') { expect(version.name).to eq('foobar') }
+ end
+
+ context 'with ref specified and name unspecified' do
+ let_it_be(:version) { create :go_module_version, mod: mod, commit: project.repository.head_commit, ref: project.repository.find_tag('v1.0.0') }
+ it('returns the name of the ref') { expect(version.name).to eq('v1.0.0') }
+ end
+
+ context 'with ref and name unspecified' do
+ let_it_be(:version) { create :go_module_version, mod: mod, commit: project.repository.head_commit }
+ it('returns nil') { expect(version.name).to eq(nil) }
+ end
+ end
+
+ describe '#gomod' do
+ context 'with go.mod missing' do
+ let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.0' }
+ it('returns nil') { expect(version.gomod).to eq(nil) }
+ end
+
+ context 'with go.mod present' do
+ let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.1' }
+ it('returns the contents of go.mod') { expect(version.gomod).to eq("module #{mod.name}\n") }
+ end
+ end
+
+ describe '#files' do
+ context 'with a root module' do
+ context 'with an empty module path' do
+ let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.2' }
+ it_behaves_like '#files', 'all the files', 'README.md', 'go.mod', 'a.go', 'pkg/b.go'
+ end
+ end
+
+ context 'with a root module and a submodule' do
+ context 'with an empty module path' do
+ let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.3' }
+ it_behaves_like '#files', 'files excluding the submodule', 'README.md', 'go.mod', 'a.go', 'pkg/b.go'
+ end
+
+ context 'with the submodule\'s path' do
+ let_it_be(:mod) { create :go_module, project: project, path: 'mod' }
+ let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.3' }
+ it_behaves_like '#files', 'the submodule\'s files', 'mod/go.mod', 'mod/a.go'
+ end
+ end
+ end
+
+ describe '#archive' do
+ context 'with a root module' do
+ context 'with an empty module path' do
+ let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.2' }
+ it_behaves_like '#archive', 'all the files', 'README.md', 'go.mod', 'a.go', 'pkg/b.go'
+ end
+ end
+
+ context 'with a root module and a submodule' do
+ context 'with an empty module path' do
+ let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.3' }
+ it_behaves_like '#archive', 'files excluding the submodule', 'README.md', 'go.mod', 'a.go', 'pkg/b.go'
+ end
+
+ context 'with the submodule\'s path' do
+ let_it_be(:mod) { create :go_module, project: project, path: 'mod' }
+ let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.3' }
+ it_behaves_like '#archive', 'the submodule\'s files', 'go.mod', 'a.go'
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/maven/metadatum_spec.rb b/spec/models/packages/maven/metadatum_spec.rb
new file mode 100644
index 00000000000..16f6929d710
--- /dev/null
+++ b/spec/models/packages/maven/metadatum_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Maven::Metadatum, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:package) }
+
+ describe '#app_name' do
+ it { is_expected.to allow_value("my-app").for(:app_name) }
+ it { is_expected.not_to allow_value("my/app").for(:app_name) }
+ it { is_expected.not_to allow_value("my(app)").for(:app_name) }
+ end
+
+ describe '#app_group' do
+ it { is_expected.to allow_value("my.domain.com").for(:app_group) }
+ it { is_expected.not_to allow_value("my/domain/com").for(:app_group) }
+ it { is_expected.not_to allow_value("my(domain)").for(:app_group) }
+ end
+
+ describe '#path' do
+ it { is_expected.to allow_value("my/domain/com/my-app").for(:path) }
+ it { is_expected.to allow_value("my/domain/com/my-app/1.0-SNAPSHOT").for(:path) }
+ it { is_expected.not_to allow_value("my(domain)com.my-app").for(:path) }
+ end
+
+ describe '#maven_package_type' do
+ it 'will not allow a package with a different package_type' do
+ package = build('conan_package')
+ maven_metadatum = build('maven_metadatum', package: package)
+
+ expect(maven_metadatum).not_to be_valid
+ expect(maven_metadatum.errors.to_a).to include('Package type must be Maven')
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/nuget/dependency_link_metadatum_spec.rb b/spec/models/packages/nuget/dependency_link_metadatum_spec.rb
new file mode 100644
index 00000000000..0c03c65028e
--- /dev/null
+++ b/spec/models/packages/nuget/dependency_link_metadatum_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::DependencyLinkMetadatum, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:dependency_link) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:dependency_link) }
+ it { is_expected.to validate_presence_of(:target_framework) }
+
+ describe '#ensure_nuget_package_type' do
+ it 'validates package of type nuget' do
+ package = build('conan_package')
+ dependency_link = build('packages_dependency_link', package: package)
+ nuget_metadatum = build('nuget_dependency_link_metadatum', dependency_link: dependency_link)
+
+ expect(nuget_metadatum).not_to be_valid
+ expect(nuget_metadatum.errors.to_a).to contain_exactly('Package type must be NuGet')
+ end
+
+ it 'validates package of type nuget with nil dependency_link' do
+ nuget_metadatum = build('nuget_dependency_link_metadatum', dependency_link: nil)
+
+ expect(nuget_metadatum).not_to be_valid
+ expect(nuget_metadatum.errors.to_a).to contain_exactly("Dependency link can't be blank", 'Package type must be NuGet')
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/nuget/metadatum_spec.rb b/spec/models/packages/nuget/metadatum_spec.rb
new file mode 100644
index 00000000000..c1bc5429500
--- /dev/null
+++ b/spec/models/packages/nuget/metadatum_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Nuget::Metadatum, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package).inverse_of(:nuget_metadatum) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:package) }
+
+ %i[license_url project_url icon_url].each do |url|
+ describe "##{url}" do
+ it { is_expected.to allow_value('http://sandbox.com').for(url) }
+ it { is_expected.to allow_value('https://sandbox.com').for(url) }
+ it { is_expected.not_to allow_value('123').for(url) }
+ it { is_expected.not_to allow_value('sandbox.com').for(url) }
+ end
+
+ describe '#ensure_at_least_one_field_supplied' do
+ subject { build(:nuget_metadatum) }
+
+ it 'rejects unfilled metadatum' do
+ subject.attributes = { license_url: nil, project_url: nil, icon_url: nil }
+
+ expect(subject).not_to be_valid
+ expect(subject.errors).to contain_exactly('Nuget metadatum must have at least license_url, project_url or icon_url set')
+ end
+ end
+
+ describe '#ensure_nuget_package_type' do
+ subject { build(:nuget_metadatum) }
+
+ it 'rejects if not linked to a nuget package' do
+ subject.package = build(:npm_package)
+
+ expect(subject).not_to be_valid
+ expect(subject.errors).to contain_exactly('Package type must be NuGet')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
new file mode 100644
index 00000000000..7758ed4a500
--- /dev/null
+++ b/spec/models/packages/package_file_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::PackageFile, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package) }
+ it { is_expected.to have_one(:conan_file_metadatum) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:package) }
+ end
+
+ context 'with package filenames' do
+ let_it_be(:package_file1) { create(:package_file, :xml, file_name: 'FooBar') }
+ let_it_be(:package_file2) { create(:package_file, :xml, file_name: 'ThisIsATest') }
+
+ describe '.with_file_name' do
+ let(:filename) { 'FooBar' }
+
+ subject { described_class.with_file_name(filename) }
+
+ it { is_expected.to match_array([package_file1]) }
+ end
+
+ describe '.with_file_name_like' do
+ let(:filename) { 'foobar' }
+
+ subject { described_class.with_file_name_like(filename) }
+
+ it { is_expected.to match_array([package_file1]) }
+ end
+ end
+
+ it_behaves_like 'UpdateProjectStatistics' do
+ subject { build(:package_file, :jar, size: 42) }
+
+ before do
+ allow_any_instance_of(Packages::PackageFileUploader).to receive(:size).and_return(42)
+ end
+ end
+
+ describe '.with_conan_package_reference' do
+ let_it_be(:non_matching_package_file) { create(:package_file, :nuget) }
+ let_it_be(:metadatum) { create(:conan_file_metadatum, :package_file) }
+ let_it_be(:reference) { metadatum.conan_package_reference}
+
+ it 'returns matching packages' do
+ expect(described_class.with_conan_package_reference(reference))
+ .to eq([metadatum.package_file])
+ end
+ end
+
+ describe '#update_file_metadata callback' do
+ let_it_be(:package_file) { build(:package_file, :nuget, file_store: nil, size: nil) }
+
+ subject { package_file.save! }
+
+ it 'updates metadata columns' do
+ expect(package_file)
+ .to receive(:update_file_metadata)
+ .and_call_original
+
+ expect { subject }
+ .to change { package_file.file_store }.from(nil).to(::Packages::PackageFileUploader::Store::LOCAL)
+ .and change { package_file.size }.from(nil).to(3513)
+ end
+ end
+end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
new file mode 100644
index 00000000000..4170bf595f0
--- /dev/null
+++ b/spec/models/packages/package_spec.rb
@@ -0,0 +1,485 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Package, type: :model do
+ include SortingHelper
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:package_files).dependent(:destroy) }
+ it { is_expected.to have_many(:dependency_links).inverse_of(:package) }
+ it { is_expected.to have_many(:tags).inverse_of(:package) }
+ it { is_expected.to have_one(:conan_metadatum).inverse_of(:package) }
+ it { is_expected.to have_one(:maven_metadatum).inverse_of(:package) }
+ it { is_expected.to have_one(:nuget_metadatum).inverse_of(:package) }
+ end
+
+ describe '.with_composer_target' do
+ let!(:package1) { create(:composer_package, :with_metadatum, sha: '123') }
+ let!(:package2) { create(:composer_package, :with_metadatum, sha: '123') }
+ let!(:package3) { create(:composer_package, :with_metadatum, sha: '234') }
+
+ subject { described_class.with_composer_target('123').to_a }
+
+ it 'selects packages with the specified sha' do
+ expect(subject).to include(package1)
+ expect(subject).to include(package2)
+ expect(subject).not_to include(package3)
+ end
+ end
+
+ describe '.sort_by_attribute' do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public, namespace: group, name: 'project A') }
+ let!(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
+ let!(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
+ let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
+
+ before do
+ travel_to(1.day.ago) do
+ package3
+ end
+ end
+
+ RSpec.shared_examples 'package sorting by attribute' do |order_by|
+ subject { described_class.where(id: packages.map(&:id)).sort_by_attribute("#{order_by}_#{sort}").to_a }
+
+ context "sorting by #{order_by}" do
+ context 'ascending order' do
+ let(:sort) { 'asc' }
+
+ it { is_expected.to eq packages }
+ end
+
+ context 'descending order' do
+ let(:sort) { 'desc' }
+
+ it { is_expected.to eq packages.reverse }
+ end
+ end
+ end
+
+ it_behaves_like 'package sorting by attribute', 'name' do
+ let(:packages) { [package1, package2, package3] }
+ end
+
+ it_behaves_like 'package sorting by attribute', 'created_at' do
+ let(:packages) { [package3, package1, package2] }
+ end
+
+ it_behaves_like 'package sorting by attribute', 'version' do
+ let(:packages) { [package3, package2, package1] }
+ end
+
+ it_behaves_like 'package sorting by attribute', 'type' do
+ let(:packages) { [package3, package1, package2] }
+ end
+
+ it_behaves_like 'package sorting by attribute', 'project_path' do
+ let(:another_project) { create(:project, :public, namespace: group, name: 'project B') }
+ let!(:package4) { create(:npm_package, project: another_project, version: '3.1.0', name: "@#{project.root_namespace.path}/bar") }
+
+ let(:packages) { [package1, package2, package3, package4] }
+ end
+ end
+
+ describe 'validations' do
+ subject { create(:package) }
+
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id, :version, :package_type) }
+
+ describe '#name' do
+ it { is_expected.to allow_value("my/domain/com/my-app").for(:name) }
+ it { is_expected.to allow_value("my.app-11.07.2018").for(:name) }
+ it { is_expected.not_to allow_value("my(dom$$$ain)com.my-app").for(:name) }
+
+ context 'conan package' do
+ subject { create(:conan_package) }
+
+ let(:fifty_one_characters) {'f_b' * 17}
+
+ it { is_expected.to allow_value('foo+bar').for(:name) }
+ it { is_expected.to allow_value('foo_bar').for(:name) }
+ it { is_expected.to allow_value('foo.bar').for(:name) }
+ it { is_expected.not_to allow_value(fifty_one_characters).for(:name) }
+ it { is_expected.not_to allow_value('+foobar').for(:name) }
+ it { is_expected.not_to allow_value('.foobar').for(:name) }
+ it { is_expected.not_to allow_value('%foo%bar').for(:name) }
+ end
+ end
+
+ describe '#version' do
+ RSpec.shared_examples 'validating version to be SemVer compliant for' do |factory_name|
+ context "for #{factory_name}" do
+ subject { create(factory_name) }
+
+ it { is_expected.to allow_value('1.2.3').for(:version) }
+ it { is_expected.to allow_value('1.2.3-beta').for(:version) }
+ it { is_expected.to allow_value('1.2.3-alpha.3').for(:version) }
+ it { is_expected.not_to allow_value('1').for(:version) }
+ it { is_expected.not_to allow_value('1.2').for(:version) }
+ it { is_expected.not_to allow_value('1./2.3').for(:version) }
+ it { is_expected.not_to allow_value('../../../../../1.2.3').for(:version) }
+ it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
+ end
+ end
+
+ context 'conan package' do
+ subject { create(:conan_package) }
+
+ let(:fifty_one_characters) {'1.2' * 17}
+
+ it { is_expected.to allow_value('1.2').for(:version) }
+ it { is_expected.to allow_value('1.2.3-beta').for(:version) }
+ it { is_expected.to allow_value('1.2.3-pre1+build2').for(:version) }
+ it { is_expected.not_to allow_value('1').for(:version) }
+ it { is_expected.not_to allow_value(fifty_one_characters).for(:version) }
+ it { is_expected.not_to allow_value('1./2.3').for(:version) }
+ it { is_expected.not_to allow_value('.1.2.3').for(:version) }
+ it { is_expected.not_to allow_value('+1.2.3').for(:version) }
+ it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
+ end
+
+ context 'maven package' do
+ subject { create(:maven_package) }
+
+ it { is_expected.to allow_value('0').for(:version) }
+ it { is_expected.to allow_value('1').for(:version) }
+ it { is_expected.to allow_value('10').for(:version) }
+ it { is_expected.to allow_value('1.0').for(:version) }
+ it { is_expected.to allow_value('1.3.350.v20200505-1744').for(:version) }
+ it { is_expected.to allow_value('1.1-beta-2').for(:version) }
+ it { is_expected.to allow_value('1.2-SNAPSHOT').for(:version) }
+ it { is_expected.to allow_value('12.1.2-2-1').for(:version) }
+ it { is_expected.to allow_value('1.2.3..beta').for(:version) }
+ it { is_expected.to allow_value('1.2.3-beta').for(:version) }
+ it { is_expected.to allow_value('10.2.3-beta').for(:version) }
+ it { is_expected.to allow_value('2.0.0.v200706041905-7C78EK9E_EkMNfNOd2d8qq').for(:version) }
+ it { is_expected.to allow_value('1.2-alpha-1-20050205.060708-1').for(:version) }
+ it { is_expected.to allow_value('703220b4e2cea9592caeb9f3013f6b1e5335c293').for(:version) }
+ it { is_expected.to allow_value('RELEASE').for(:version) }
+ it { is_expected.not_to allow_value('..1.2.3').for(:version) }
+ it { is_expected.not_to allow_value(' 1.2.3').for(:version) }
+ it { is_expected.not_to allow_value("1.2.3 \r\t").for(:version) }
+ it { is_expected.not_to allow_value("\r\t 1.2.3").for(:version) }
+ it { is_expected.not_to allow_value('1.2.3-4/../../').for(:version) }
+ it { is_expected.not_to allow_value('1.2.3-4%2e%2e%').for(:version) }
+ it { is_expected.not_to allow_value('../../../../../1.2.3').for(:version) }
+ it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
+ end
+
+ it_behaves_like 'validating version to be SemVer compliant for', :npm_package
+ it_behaves_like 'validating version to be SemVer compliant for', :nuget_package
+ end
+
+ describe '#package_already_taken' do
+ context 'npm package' do
+ let!(:package) { create(:npm_package) }
+
+ it 'will not allow a package of the same name' do
+ new_package = build(:npm_package, name: package.name)
+
+ expect(new_package).not_to be_valid
+ end
+ end
+
+ context 'maven package' do
+ let!(:package) { create(:maven_package) }
+
+ it 'will allow a package of the same name' do
+ new_package = build(:maven_package, name: package.name)
+
+ expect(new_package).to be_valid
+ end
+ end
+ end
+
+ context "recipe uniqueness for conan packages" do
+ let!(:package) { create('conan_package') }
+
+ it "will allow a conan package with same project, name, version and package_type" do
+ new_package = build('conan_package', project: package.project, name: package.name, version: package.version)
+ new_package.conan_metadatum.package_channel = 'beta'
+ expect(new_package).to be_valid
+ end
+
+ it "will not allow a conan package with same recipe (name, version, metadatum.package_channel, metadatum.package_username, and package_type)" do
+ new_package = build('conan_package', project: package.project, name: package.name, version: package.version)
+ expect(new_package).not_to be_valid
+ expect(new_package.errors.to_a).to include("Package recipe already exists")
+ end
+ end
+
+ Packages::Package.package_types.keys.without('conan').each do |pt|
+ context "project id, name, version and package type uniqueness for package type #{pt}" do
+ let(:package) { create("#{pt}_package") }
+
+ it "will not allow a #{pt} package with same project, name, version and package_type" do
+ new_package = build("#{pt}_package", project: package.project, name: package.name, version: package.version)
+ expect(new_package).not_to be_valid
+ expect(new_package.errors.to_a).to include("Name has already been taken")
+ end
+ end
+ end
+ end
+
+ describe '#destroy' do
+ let(:package) { create(:npm_package) }
+ let(:package_file) { package.package_files.first }
+ let(:project_statistics) { ProjectStatistics.for_project_ids(package.project.id).first }
+
+ it 'affects project statistics' do
+ expect { package.destroy! }
+ .to change { project_statistics.reload.packages_size }
+ .from(package_file.size).to(0)
+ end
+ end
+
+ describe '.by_name_and_file_name' do
+ let!(:package) { create(:npm_package) }
+ let!(:package_file) { package.package_files.first }
+
+ subject { described_class }
+
+ it 'finds a package with correct arguiments' do
+ expect(subject.by_name_and_file_name(package.name, package_file.file_name)).to eq(package)
+ end
+
+ it 'will raise error if not found' do
+ expect { subject.by_name_and_file_name('foo', 'foo-5.5.5.tgz') }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'version scopes' do
+ let!(:package1) { create(:npm_package, version: '1.0.0') }
+ let!(:package2) { create(:npm_package, version: '1.0.1') }
+ let!(:package3) { create(:npm_package, version: '1.0.1') }
+
+ describe '.last_of_each_version' do
+ subject { described_class.last_of_each_version }
+
+ it 'includes only latest package per version' do
+ is_expected.to include(package1, package3)
+ is_expected.not_to include(package2)
+ end
+ end
+
+ describe '.has_version' do
+ subject { described_class.has_version }
+
+ before do
+ create(:maven_metadatum).package.update!(version: nil)
+ end
+
+ it 'includes only packages with version attribute' do
+ is_expected.to match_array([package1, package2, package3])
+ end
+ end
+
+ describe '.with_version' do
+ subject { described_class.with_version('1.0.1') }
+
+ it 'includes only packages with specified version' do
+ is_expected.to match_array([package2, package3])
+ end
+ end
+
+ describe '.without_version_like' do
+ let(:version_pattern) { '%.0.0%' }
+
+ subject { described_class.without_version_like(version_pattern) }
+
+ it 'includes packages without the version pattern' do
+ is_expected.to match_array([package2, package3])
+ end
+ end
+ end
+
+ context 'conan scopes' do
+ let!(:package) { create(:conan_package) }
+
+ describe '.with_conan_channel' do
+ subject { described_class.with_conan_channel('stable') }
+
+ it 'includes only packages with specified version' do
+ is_expected.to include(package)
+ end
+ end
+
+ describe '.with_conan_username' do
+ subject do
+ described_class.with_conan_username(
+ Packages::Conan::Metadatum.package_username_from(full_path: package.project.full_path)
+ )
+ end
+
+ it 'includes only packages with specified version' do
+ is_expected.to match_array([package])
+ end
+ end
+ end
+
+ describe '.without_nuget_temporary_name' do
+ let!(:package1) { create(:nuget_package) }
+ let!(:package2) { create(:nuget_package, name: Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) }
+
+ subject { described_class.without_nuget_temporary_name }
+
+ it 'does not include nuget temporary packages' do
+ expect(subject).to eq([package1])
+ end
+ end
+
+ describe '.processed' do
+ let!(:package1) { create(:nuget_package) }
+ let!(:package2) { create(:npm_package) }
+ let!(:package3) { create(:nuget_package) }
+
+ subject { described_class.processed }
+
+ it { is_expected.to match_array([package1, package2, package3]) }
+
+ context 'with temporary packages' do
+ let!(:package1) { create(:nuget_package, name: Packages::Nuget::CreatePackageService::TEMPORARY_PACKAGE_NAME) }
+
+ it { is_expected.to match_array([package2, package3]) }
+ end
+ end
+
+ describe '.limit_recent' do
+ let!(:package1) { create(:nuget_package) }
+ let!(:package2) { create(:nuget_package) }
+ let!(:package3) { create(:nuget_package) }
+
+ subject { described_class.limit_recent(2) }
+
+ it { is_expected.to match_array([package3, package2]) }
+ end
+
+ context 'with several packages' do
+ let_it_be(:package1) { create(:nuget_package, name: 'FooBar') }
+ let_it_be(:package2) { create(:nuget_package, name: 'foobar') }
+ let_it_be(:package3) { create(:npm_package) }
+ let_it_be(:package4) { create(:npm_package) }
+
+ describe '.pluck_names' do
+ subject { described_class.pluck_names }
+
+ it { is_expected.to match_array([package1, package2, package3, package4].map(&:name)) }
+ end
+
+ describe '.pluck_versions' do
+ subject { described_class.pluck_versions }
+
+ it { is_expected.to match_array([package1, package2, package3, package4].map(&:version)) }
+ end
+
+ describe '.with_name_like' do
+ subject { described_class.with_name_like(name_term) }
+
+ context 'with downcase name' do
+ let(:name_term) { 'foobar' }
+
+ it { is_expected.to match_array([package1, package2]) }
+ end
+
+ context 'with prefix wildcard' do
+ let(:name_term) { '%ar' }
+
+ it { is_expected.to match_array([package1, package2]) }
+ end
+
+ context 'with suffix wildcard' do
+ let(:name_term) { 'foo%' }
+
+ it { is_expected.to match_array([package1, package2]) }
+ end
+
+ context 'with surrounding wildcards' do
+ let(:name_term) { '%ooba%' }
+
+ it { is_expected.to match_array([package1, package2]) }
+ end
+ end
+
+ describe '.search_by_name' do
+ let(:query) { 'oba' }
+
+ subject { described_class.search_by_name(query) }
+
+ it { is_expected.to match_array([package1, package2]) }
+ end
+ end
+
+ describe '.select_distinct_name' do
+ let_it_be(:nuget_package) { create(:nuget_package) }
+ let_it_be(:nuget_packages) { create_list(:nuget_package, 3, name: nuget_package.name, project: nuget_package.project) }
+ let_it_be(:maven_package) { create(:maven_package) }
+ let_it_be(:maven_packages) { create_list(:maven_package, 3, name: maven_package.name, project: maven_package.project) }
+
+ subject { described_class.select_distinct_name }
+
+ it 'returns only distinct names' do
+ packages = subject
+
+ expect(packages.size).to eq(2)
+ expect(packages.pluck(:name)).to match_array([nuget_package.name, maven_package.name])
+ end
+ end
+
+ describe '#versions' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package) { create(:maven_package, project: project) }
+ let_it_be(:package2) { create(:maven_package, project: project) }
+ let_it_be(:package3) { create(:maven_package, project: project, name: 'foo') }
+
+ it 'returns other package versions of the same package name belonging to the project' do
+ expect(package.versions).to contain_exactly(package2)
+ end
+
+ it 'does not return different packages' do
+ expect(package.versions).not_to include(package3)
+ end
+ end
+
+ describe '#pipeline' do
+ let_it_be(:package) { create(:maven_package) }
+
+ context 'package without pipeline' do
+ it 'returns nil if there is no pipeline' do
+ expect(package.pipeline).to be_nil
+ end
+ end
+
+ context 'package with pipeline' do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
+ before do
+ package.create_build_info!(pipeline: pipeline)
+ end
+
+ it 'returns the pipeline' do
+ expect(package.pipeline).to eq(pipeline)
+ end
+ end
+ end
+
+ describe '#tag_names' do
+ let_it_be(:package) { create(:nuget_package) }
+
+ subject { package.tag_names }
+
+ it { is_expected.to eq([]) }
+
+ context 'with tags' do
+ let(:tags) { %w(tag1 tag2 tag3) }
+
+ before do
+ tags.each { |t| create(:packages_tag, name: t, package: package) }
+ end
+
+ it { is_expected.to contain_exactly(*tags) }
+ end
+ end
+end
diff --git a/spec/models/packages/pypi/metadatum_spec.rb b/spec/models/packages/pypi/metadatum_spec.rb
new file mode 100644
index 00000000000..2c9893ef8f3
--- /dev/null
+++ b/spec/models/packages/pypi/metadatum_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Pypi::Metadatum, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:package) }
+
+ describe '#pypi_package_type' do
+ it 'will not allow a package with a different package_type' do
+ package = build('package')
+ pypi_metadatum = build('pypi_metadatum', package: package)
+
+ expect(pypi_metadatum).not_to be_valid
+ expect(pypi_metadatum.errors.to_a).to include('Package type must be PyPi')
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/sem_ver_spec.rb b/spec/models/packages/sem_ver_spec.rb
new file mode 100644
index 00000000000..419653dca19
--- /dev/null
+++ b/spec/models/packages/sem_ver_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::SemVer, type: :model do
+ shared_examples '#parse with a valid semver' do |str, major, minor, patch, prerelease, build|
+ context "with #{str}" do
+ it "returns #{described_class.new(major, minor, patch, prerelease, build, prefixed: true)} with prefix" do
+ expected = described_class.new(major, minor, patch, prerelease, build, prefixed: true)
+ expect(described_class.parse('v' + str, prefixed: true)).to eq(expected)
+ end
+
+ it "returns #{described_class.new(major, minor, patch, prerelease, build)} without prefix" do
+ expected = described_class.new(major, minor, patch, prerelease, build)
+ expect(described_class.parse(str)).to eq(expected)
+ end
+ end
+ end
+
+ shared_examples '#parse with an invalid semver' do |str|
+ context "with #{str}" do
+ it 'returns nil with prefix' do
+ expect(described_class.parse('v' + str, prefixed: true)).to be_nil
+ end
+
+ it 'returns nil without prefix' do
+ expect(described_class.parse(str)).to be_nil
+ end
+ end
+ end
+
+ describe '#parse' do
+ it_behaves_like '#parse with a valid semver', '1.0.0', 1, 0, 0, nil, nil
+ it_behaves_like '#parse with a valid semver', '1.0.0-pre', 1, 0, 0, 'pre', nil
+ it_behaves_like '#parse with a valid semver', '1.0.0+build', 1, 0, 0, nil, 'build'
+ it_behaves_like '#parse with a valid semver', '1.0.0-pre+build', 1, 0, 0, 'pre', 'build'
+ it_behaves_like '#parse with an invalid semver', '01.0.0'
+ it_behaves_like '#parse with an invalid semver', '0.01.0'
+ it_behaves_like '#parse with an invalid semver', '0.0.01'
+ it_behaves_like '#parse with an invalid semver', '1.0.0asdf'
+ end
+end
diff --git a/spec/models/packages/tag_spec.rb b/spec/models/packages/tag_spec.rb
new file mode 100644
index 00000000000..18ec99c3d51
--- /dev/null
+++ b/spec/models/packages/tag_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Tag, type: :model do
+ let!(:project) { create(:project) }
+ let!(:package) { create(:npm_package, version: '1.0.2', project: project, updated_at: 3.days.ago) }
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package).inverse_of(:tags) }
+ end
+
+ describe 'validations' do
+ subject { create(:packages_tag) }
+
+ it { is_expected.to validate_presence_of(:package) }
+ it { is_expected.to validate_presence_of(:name) }
+ end
+
+ describe '.for_packages' do
+ let(:package2) { create(:package, project: project, updated_at: 2.days.ago) }
+ let(:package3) { create(:package, project: project, updated_at: 1.day.ago) }
+ let!(:tag1) { create(:packages_tag, package: package) }
+ let!(:tag2) { create(:packages_tag, package: package2) }
+ let!(:tag3) { create(:packages_tag, package: package3) }
+
+ subject { described_class.for_packages(project.packages) }
+
+ it { is_expected.to match_array([tag1, tag2, tag3]) }
+
+ context 'with too many tags' do
+ before do
+ stub_const('Packages::Tag::FOR_PACKAGES_TAGS_LIMIT', 2)
+ end
+
+ it { is_expected.to match_array([tag2, tag3]) }
+ end
+ end
+
+ describe '.with_name' do
+ let_it_be(:package) { create(:package) }
+ let_it_be(:tag1) { create(:packages_tag, package: package, name: 'tag1') }
+ let_it_be(:tag2) { create(:packages_tag, package: package, name: 'tag2') }
+ let_it_be(:tag3) { create(:packages_tag, package: package, name: 'tag3') }
+ let(:name) { 'tag1' }
+
+ subject { described_class.with_name(name) }
+
+ it { is_expected.to contain_exactly(tag1) }
+
+ context 'with nil name' do
+ let(:name) { nil }
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'with multiple names' do
+ let(:name) { %w(tag1 tag3) }
+
+ it { is_expected.to contain_exactly(tag1, tag3) }
+ end
+ end
+end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 94b0b55d32c..c1c7892e2da 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -1666,6 +1666,14 @@ RSpec.describe Project do
let(:project_name) { 'Project' }
it { is_expected.to eq("http://group.example.com/project") }
+
+ context 'mixed case path' do
+ before do
+ project.update!(path: 'Project')
+ end
+
+ it { is_expected.to eq("http://group.example.com/Project") }
+ end
end
end
@@ -2905,28 +2913,73 @@ RSpec.describe Project do
subject { project.deployment_variables(environment: environment, kubernetes_namespace: namespace) }
- before do
- expect(project).to receive(:deployment_platform).with(environment: environment)
- .and_return(deployment_platform)
- end
+ context 'when the deployment platform is stubbed' do
+ before do
+ expect(project).to receive(:deployment_platform).with(environment: environment)
+ .and_return(deployment_platform)
+ end
+
+ context 'when project has a deployment platform' do
+ let(:platform_variables) { %w(platform variables) }
+ let(:deployment_platform) { double }
+
+ before do
+ expect(deployment_platform).to receive(:predefined_variables)
+ .with(project: project, environment_name: environment, kubernetes_namespace: namespace)
+ .and_return(platform_variables)
+ end
+
+ it { is_expected.to eq platform_variables }
+ end
- context 'when project has no deployment platform' do
- let(:deployment_platform) { nil }
+ context 'when project has no deployment platform' do
+ let(:deployment_platform) { nil }
- it { is_expected.to eq [] }
+ it { is_expected.to eq [] }
+ end
end
- context 'when project has a deployment platform' do
- let(:platform_variables) { %w(platform variables) }
- let(:deployment_platform) { double }
+ context 'when project has a deployment platforms' do
+ let(:project) { create(:project) }
+
+ let!(:default_cluster) do
+ create(:cluster,
+ :not_managed,
+ platform_type: :kubernetes,
+ projects: [project],
+ environment_scope: '*',
+ platform_kubernetes: default_cluster_kubernetes)
+ end
- before do
- expect(deployment_platform).to receive(:predefined_variables)
- .with(project: project, environment_name: environment, kubernetes_namespace: namespace)
- .and_return(platform_variables)
+ let!(:review_env_cluster) do
+ create(:cluster,
+ :not_managed,
+ platform_type: :kubernetes,
+ projects: [project],
+ environment_scope: 'review/*',
+ platform_kubernetes: review_env_cluster_kubernetes)
end
- it { is_expected.to eq platform_variables }
+ let(:default_cluster_kubernetes) { create(:cluster_platform_kubernetes, token: 'default-AAA') }
+ let(:review_env_cluster_kubernetes) { create(:cluster_platform_kubernetes, token: 'review-AAA') }
+
+ context 'when environment name is review/name' do
+ let!(:environment) { create(:environment, project: project, name: 'review/name') }
+
+ it 'returns variables from this service' do
+ expect(project.deployment_variables(environment: 'review/name'))
+ .to include(key: 'KUBE_TOKEN', value: 'review-AAA', public: false, masked: true)
+ end
+ end
+
+ context 'when environment name is other' do
+ let!(:environment) { create(:environment, project: project, name: 'staging/name') }
+
+ it 'returns variables from this service' do
+ expect(project.deployment_variables(environment: 'staging/name'))
+ .to include(key: 'KUBE_TOKEN', value: 'default-AAA', public: false, masked: true)
+ end
+ end
end
end
diff --git a/spec/models/prometheus_metric_spec.rb b/spec/models/prometheus_metric_spec.rb
index ed561c8b399..f284102b4a9 100644
--- a/spec/models/prometheus_metric_spec.rb
+++ b/spec/models/prometheus_metric_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe PrometheusMetric do
it { is_expected.to validate_presence_of(:title) }
it { is_expected.to validate_presence_of(:query) }
it { is_expected.to validate_presence_of(:group) }
+ it { is_expected.to validate_uniqueness_of(:identifier).scoped_to(:project_id).allow_nil }
describe 'common metrics' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/presenters/alert_management/prometheus_alert_presenter_spec.rb b/spec/presenters/alert_management/prometheus_alert_presenter_spec.rb
index 4e6683ee68e..95246914140 100644
--- a/spec/presenters/alert_management/prometheus_alert_presenter_spec.rb
+++ b/spec/presenters/alert_management/prometheus_alert_presenter_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe AlertManagement::PrometheusAlertPresenter do
let_it_be(:project) { create(:project) }
- let_it_be(:prometheus_payload) do
+ let_it_be(:payload) do
{
'annotations' => {
'title' => 'Alert title',
@@ -15,8 +15,8 @@ RSpec.describe AlertManagement::PrometheusAlertPresenter do
'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1'
}
end
- let_it_be(:alert) do
- create(:alert_management_alert, :prometheus, project: project, payload: prometheus_payload)
+ let(:alert) do
+ create(:alert_management_alert, :prometheus, project: project, payload: payload)
end
subject(:presenter) { described_class.new(alert) }
@@ -47,8 +47,22 @@ RSpec.describe AlertManagement::PrometheusAlertPresenter do
end
describe '#metrics_dashboard_url' do
- it 'is not defined' do
- expect(presenter.metrics_dashboard_url).to be_nil
+ subject { presenter.metrics_dashboard_url }
+
+ context 'for a non-prometheus alert' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'for a self-managed prometheus alert' do
+ include_context 'self-managed prometheus alert attributes'
+
+ it { is_expected.to eq(dashboard_url_for_alert) }
+ end
+
+ context 'for a gitlab-managed prometheus alert' do
+ include_context 'gitlab-managed prometheus alert attributes'
+
+ it { is_expected.to eq(dashboard_url_for_alert) }
end
end
end
diff --git a/spec/presenters/projects/prometheus/alert_presenter_spec.rb b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
index e8bcbb4378f..89c5438b074 100644
--- a/spec/presenters/projects/prometheus/alert_presenter_spec.rb
+++ b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
@@ -20,38 +20,6 @@ RSpec.describe Projects::Prometheus::AlertPresenter do
end
end
- shared_context 'self-managed prometheus alert with metrics data' do
- let!(:environment) { create(:environment, project: project, name: 'production') }
-
- let(:title) { 'title' }
- let(:y_label) { 'y_label' }
- let(:query) { 'avg(metric) > 1.0' }
- let(:embed_content) do
- {
- panel_groups: [{
- panels: [{
- type: 'line-graph',
- title: title,
- y_label: y_label,
- metrics: [{ query_range: query }]
- }]
- }]
- }
- end
-
- before do
- payload['startsAt'] = starts_at
- payload['generatorURL'] = "http://host?g0.expr=#{CGI.escape(query)}"
-
- payload['labels'] ||= {}
- payload['labels']['gitlab_environment_name'] = 'production'
-
- payload['annotations'] ||= {}
- payload['annotations']['title'] = 'title'
- payload['annotations']['gitlab_y_label'] = 'y_label'
- end
- end
-
describe '#project_full_path' do
subject { presenter.project_full_path }
@@ -214,30 +182,30 @@ RSpec.describe Projects::Prometheus::AlertPresenter do
Timecop.freeze(starts_at) { example.run }
end
+ before do
+ payload.delete('startsAt')
+ end
+
it { is_expected.to eq(expected_markdown) }
end
context 'with a starting time available' do
- before do
- payload['startsAt'] = starts_at
- end
-
it { is_expected.to eq(expected_markdown) }
end
end
context 'for gitlab-managed prometheus alerts' do
- include_context 'gitlab alert'
+ include_context 'gitlab-managed prometheus alert attributes'
- before do
- payload['labels'] = { 'gitlab_alert_id' => metric_id }
+ let(:alert) do
+ create(:alerting_alert, project: project, metric_id: prometheus_metric_id, payload: payload)
end
it_behaves_like 'markdown with metrics embed'
end
context 'for alerts from a self-managed prometheus' do
- include_context 'self-managed prometheus alert with metrics data'
+ include_context 'self-managed prometheus alert attributes'
it_behaves_like 'markdown with metrics embed'
@@ -369,28 +337,6 @@ RSpec.describe Projects::Prometheus::AlertPresenter do
it { is_expected.to eq(expected_link) }
end
-
- describe '#metrics_dashboard_url' do
- let(:starts_at) { '2018-03-12T09:06:00Z' }
- let(:expected_url) do
- metrics_dashboard_project_prometheus_alert_url(
- project,
- metric_id,
- environment_id: gitlab_alert.environment_id,
- embedded: true,
- end: '2018-03-12T09:36:00Z',
- start: '2018-03-12T08:36:00Z'
- )
- end
-
- subject { presenter.metrics_dashboard_url }
-
- before do
- payload['startsAt'] = starts_at
- end
-
- it { is_expected.to eq(expected_url) }
- end
end
context 'without gitlab alert' do
@@ -431,33 +377,29 @@ RSpec.describe Projects::Prometheus::AlertPresenter do
it { is_expected.to eq(expected_link) }
end
+ end
- describe '#metrics_dashboard_url' do
- subject { presenter.metrics_dashboard_url }
+ describe '#metrics_dashboard_url' do
+ subject { presenter.metrics_dashboard_url }
+ context 'for a non-prometheus alert' do
it { is_expected.to be_nil }
end
- end
- context 'with self-managed prometheus alert with metrics data' do
- include_context 'self-managed prometheus alert with metrics data'
+ context 'for a self-managed prometheus alert' do
+ include_context 'self-managed prometheus alert attributes'
- describe '#metrics_dashboard_url' do
- let(:starts_at) { '2018-03-12T09:06:00Z' }
- let(:expected_url) do
- metrics_dashboard_project_environment_url(
- project,
- environment,
- embed_json: embed_content.to_json,
- embedded: true,
- end: '2018-03-12T09:36:00Z',
- start: '2018-03-12T08:36:00Z'
- )
- end
+ let(:prometheus_payload) { payload }
+
+ it { is_expected.to eq(dashboard_url_for_alert) }
+ end
+
+ context 'for a gitlab-managed prometheus alert' do
+ include_context 'gitlab-managed prometheus alert attributes'
- subject { presenter.metrics_dashboard_url }
+ let(:prometheus_payload) { payload }
- it { is_expected.to eq(expected_url) }
+ it { is_expected.to eq(dashboard_url_for_alert) }
end
end
end
diff --git a/spec/requests/api/graphql/group/milestones_spec.rb b/spec/requests/api/graphql/group/milestones_spec.rb
index 1c884335da1..380eaea17f8 100644
--- a/spec/requests/api/graphql/group/milestones_spec.rb
+++ b/spec/requests/api/graphql/group/milestones_spec.rb
@@ -73,21 +73,6 @@ RSpec.describe 'Milestones through GroupQuery' do
submilestone_1.to_global_id.to_s, submilestone_2.to_global_id.to_s
)
end
-
- context 'when group_milestone_descendants is disabled' do
- before do
- stub_feature_flags(group_milestone_descendants: false)
- end
-
- it 'ignores descendant milestones' do
- fetch_milestones(user, args)
-
- expect_array_response(
- milestone_1.to_global_id.to_s, milestone_2.to_global_id.to_s,
- milestone_3.to_global_id.to_s, milestone_4.to_global_id.to_s
- )
- end
- end
end
def fetch_milestones(user = nil, args = {})
diff --git a/spec/requests/api/graphql/project/alert_management/alert/metrics_dashboard_url_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/metrics_dashboard_url_spec.rb
new file mode 100644
index 00000000000..352a94cfc1d
--- /dev/null
+++ b/spec/requests/api/graphql/project/alert_management/alert/metrics_dashboard_url_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting Alert Management Alert Assignees' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ iid
+ metricsDashboardUrl
+ }
+ QUERY
+ end
+
+ let(:graphql_query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('alertManagementAlerts', {}, fields)
+ )
+ end
+
+ let(:alerts) { graphql_data.dig('project', 'alertManagementAlerts', 'nodes') }
+ let(:first_alert) { alerts.first }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ context 'with self-managed prometheus payload' do
+ include_context 'self-managed prometheus alert attributes'
+
+ before do
+ create(:alert_management_alert, :prometheus, project: project, payload: payload)
+ end
+
+ it 'includes the correct metrics dashboard url' do
+ post_graphql(graphql_query, current_user: current_user)
+
+ expect(first_alert).to include('metricsDashboardUrl' => dashboard_url_for_alert)
+ end
+ end
+
+ context 'with gitlab-managed prometheus payload' do
+ include_context 'gitlab-managed prometheus alert attributes'
+
+ before do
+ create(:alert_management_alert, :prometheus, project: project, payload: payload, prometheus_alert: prometheus_alert)
+ end
+
+ it 'includes the correct metrics dashboard url' do
+ post_graphql(graphql_query, current_user: current_user)
+
+ expect(first_alert).to include('metricsDashboardUrl' => dashboard_url_for_alert)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
index 7734ba51014..f050c6873f3 100644
--- a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
@@ -136,28 +136,6 @@ RSpec.describe 'getting Alert Management Alerts' do
it { expect(alerts.size).to eq(0) }
end
end
-
- context 'with prometheus payload' do
- let_it_be(:gitlab_alert) { create(:prometheus_alert, project: project) }
- let_it_be(:metric_id) { gitlab_alert.prometheus_metric_id }
- let_it_be(:prometheus_payload) { { 'labels' => { 'gitlab_alert_id' => metric_id }, 'startsAt' => '2018-03-12T09:06:00Z' } }
- let_it_be(:self_managed_alert) { create(:alert_management_alert, :prometheus, project: project, payload: prometheus_payload) }
-
- let(:expected_url) do
- Gitlab::Routing.url_helpers.metrics_dashboard_project_prometheus_alert_url(
- project,
- metric_id,
- environment_id: gitlab_alert.environment_id,
- start: '2018-03-12T08:36:00Z',
- end: '2018-03-12T09:36:00Z',
- embedded: true
- )
- end
-
- it 'includes a metrics dashboard url' do
- expect(first_alert).to include('metricsDashboardUrl' => expected_url)
- end
- end
end
end
end
diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb
index 0e695cc64a2..068af1485e2 100644
--- a/spec/requests/api/group_clusters_spec.rb
+++ b/spec/requests/api/group_clusters_spec.rb
@@ -266,29 +266,51 @@ RSpec.describe API::GroupClusters do
end
end
- context 'when user tries to add multiple clusters' do
+ context 'non-authorized user' do
before do
- create(:cluster, :provided_by_gcp, :group,
- groups: [group])
-
- post api("/groups/#{group.id}/clusters/user", current_user), params: cluster_params
+ post api("/groups/#{group.id}/clusters/user", developer_user), params: cluster_params
end
- it 'responds with 400' do
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']['base'].first).to eq(_('Instance does not support multiple Kubernetes clusters'))
+ it 'responds with 403' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+
+ expect(json_response['message']).to eq('403 Forbidden')
end
end
+ end
- context 'non-authorized user' do
+ describe 'PUT /groups/:id/clusters/:cluster_id' do
+ let(:api_url) { 'https://kubernetes.example.com' }
+
+ let(:platform_kubernetes_attributes) do
+ {
+ api_url: api_url,
+ token: 'sample-token'
+ }
+ end
+
+ let(:cluster_params) do
+ {
+ name: 'test-cluster',
+ environment_scope: 'test/*',
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ context 'when another cluster exists' do
before do
- post api("/groups/#{group.id}/clusters/user", developer_user), params: cluster_params
+ create(:cluster, :provided_by_gcp, :group,
+ groups: [group])
+
+ post api("/groups/#{group.id}/clusters/user", current_user), params: cluster_params
end
- it 'responds with 403' do
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'responds with 201' do
+ expect(response).to have_gitlab_http_status(:created)
+ end
- expect(json_response['message']).to eq('403 Forbidden')
+ it 'allows multiple clusters to be associated to group' do
+ expect(group.reload.clusters.count).to eq(2)
end
end
end
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index f01b3bc9d5f..ff35e380476 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe API::ProjectClusters do
expect(response).to include_pagination_headers
end
- it 'onlies include authorized clusters' do
+ it 'only includes authorized clusters' do
cluster_ids = json_response.map { |cluster| cluster['id'] }
expect(response).to have_gitlab_http_status(:ok)
@@ -258,29 +258,52 @@ RSpec.describe API::ProjectClusters do
end
end
- context 'when user tries to add multiple clusters' do
+ context 'non-authorized user' do
before do
- create(:cluster, :provided_by_gcp, :project,
- projects: [project])
-
- post api("/projects/#{project.id}/clusters/user", current_user), params: cluster_params
+ post api("/projects/#{project.id}/clusters/user", developer_user), params: cluster_params
end
- it 'responds with 400' do
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']['base'].first)
- .to eq(_('Instance does not support multiple Kubernetes clusters'))
+ it 'responds with 403' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden')
end
end
+ end
- context 'non-authorized user' do
+ describe 'POST /projects/:id/clusters/user with multiple clusters' do
+ let(:api_url) { 'https://kubernetes.example.com' }
+ let(:namespace) { project.path }
+
+ let(:platform_kubernetes_attributes) do
+ {
+ api_url: api_url,
+ token: 'sample-token',
+ namespace: namespace
+ }
+ end
+
+ let(:cluster_params) do
+ {
+ name: 'test-cluster',
+ environment_scope: 'production/*',
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ context 'when another cluster exists' do
before do
- post api("/projects/#{project.id}/clusters/user", developer_user), params: cluster_params
+ create(:cluster, :provided_by_gcp, :project,
+ projects: [project])
+
+ post api("/projects/#{project.id}/clusters/user", current_user), params: cluster_params
end
- it 'responds with 403' do
- expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['message']).to eq('403 Forbidden')
+ it 'responds with 201' do
+ expect(response).to have_gitlab_http_status(:created)
+ end
+
+ it 'allows multiple clusters to be associated to project' do
+ expect(project.reload.clusters.count).to eq(2)
end
end
end
diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb
index 63193649bc7..f4d5f899519 100644
--- a/spec/routing/group_routing_spec.rb
+++ b/spec/routing/group_routing_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe "Groups", "routing" do
- let(:group_path) { 'complex.group-namegit' }
+RSpec.shared_examples 'groups routing' do
+ let(:group_path) { 'projects.abc123' }
let!(:group) { create(:group, path: group_path) }
it "to #show" do
@@ -56,3 +56,23 @@ RSpec.describe "Groups", "routing" do
expect(get('/groups/gitlabhq/-/boards')).to route_to('groups/boards#index', group_id: 'gitlabhq')
end
end
+
+RSpec.describe "Groups", "routing" do
+ context 'complex group path with dot' do
+ include_examples 'groups routing' do
+ let(:group_path) { 'complex.group-namegit' }
+ end
+ end
+
+ context 'group path starting with help' do
+ include_examples 'groups routing' do
+ let(:group_path) { 'help.abc123' }
+ end
+ end
+
+ context 'group path starting with projects' do
+ include_examples 'groups routing' do
+ let(:group_path) { 'projects.abc123' }
+ end
+ end
+end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 07c1c33ecb5..147151a6bb0 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -77,6 +77,10 @@ RSpec.describe 'project routing' do
# DELETE /:id(.:format) projects#destroy
# preview_markdown_project POST /:id/preview_markdown(.:format) projects#preview_markdown
describe ProjectsController, 'routing' do
+ it 'to #index' do
+ expect(get('/projects')).to route_to('projects#index')
+ end
+
it 'to #create' do
expect(post('/projects')).to route_to('projects#create')
end
diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb
index d45749b52d0..6e252bee7c0 100644
--- a/spec/services/clusters/create_service_spec.rb
+++ b/spec/services/clusters/create_service_spec.rb
@@ -53,13 +53,54 @@ RSpec.describe Clusters::CreateService do
include_context 'valid cluster create params'
let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
- it 'does not create a cluster' do
- expect(ClusterProvisionWorker).not_to receive(:perform_async)
- expect { subject }.to raise_error(ArgumentError).and change { Clusters::Cluster.count }.by(0)
+ it 'creates another cluster' do
+ expect(ClusterProvisionWorker).to receive(:perform_async)
+ expect { subject }.to change { Clusters::Cluster.count }.by(1)
end
end
end
+ context 'when another cluster exists' do
+ let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
+
+ context 'when correct params' do
+ let(:params) do
+ {
+ name: 'test-cluster',
+ provider_type: :gcp,
+ provider_gcp_attributes: {
+ gcp_project_id: 'gcp-project',
+ zone: 'us-central1-a',
+ num_nodes: 1,
+ machine_type: 'machine_type-a',
+ legacy_abac: 'true'
+ },
+ clusterable: project
+ }
+ end
+
+ include_examples 'create cluster service success'
+ end
+
+ context 'when invalid params' do
+ let(:params) do
+ {
+ name: 'test-cluster',
+ provider_type: :gcp,
+ provider_gcp_attributes: {
+ gcp_project_id: '!!!!!!!',
+ zone: 'us-central1-a',
+ num_nodes: 1,
+ machine_type: 'machine_type-a'
+ },
+ clusterable: project
+ }
+ end
+
+ include_examples 'create cluster service error'
+ end
+ end
+
context 'when params includes :management_project_id' do
subject(:cluster) { described_class.new(user, params).execute(access_token: access_token) }
diff --git a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
index 04ae50b8eb6..4a226fe386c 100644
--- a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
@@ -81,7 +81,18 @@ RSpec.describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memor
allow(::Gitlab::Metrics::Dashboard::Processor).to receive(:new).and_return(double(process: file_content_hash))
end
- it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH, [::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter, ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter, ::Gitlab::Metrics::Dashboard::Stages::Sorter]
+ it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH,
+ [
+ ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::Sorter
+ ]
+
+ it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH,
+ [
+ ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::Sorter
+ ]
it_behaves_like 'valid dashboard cloning process',
::Metrics::Dashboard::SelfMonitoringDashboardService::DASHBOARD_PATH,
diff --git a/spec/services/metrics/dashboard/cluster_dashboard_service_spec.rb b/spec/services/metrics/dashboard/cluster_dashboard_service_spec.rb
new file mode 100644
index 00000000000..bf606241efc
--- /dev/null
+++ b/spec/services/metrics/dashboard/cluster_dashboard_service_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Metrics::Dashboard::ClusterDashboardService, :use_clean_rails_memory_store_caching do
+ include MetricsDashboardHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:cluster_project) { create(:cluster_project) }
+ let_it_be(:cluster) { cluster_project.cluster }
+ let_it_be(:project) { cluster_project.project }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe '.valid_params?' do
+ let(:params) { { cluster: cluster, embedded: 'false' } }
+
+ subject { described_class.valid_params?(params) }
+
+ it { is_expected.to be_truthy }
+
+ context 'with matching dashboard_path' do
+ let(:params) { { dashboard_path: ::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'missing cluster without dashboard_path' do
+ let(:params) { {} }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#get_dashboard' do
+ let(:service_params) { [project, user, { cluster: cluster, cluster_type: :project }] }
+ let(:service_call) { described_class.new(*service_params).get_dashboard }
+
+ it_behaves_like 'valid dashboard service response'
+ it_behaves_like 'caches the unprocessed dashboard for subsequent calls'
+
+ context 'when called with a non-system dashboard' do
+ let(:dashboard_path) { 'garbage/dashboard/path' }
+
+ # We want to always return the cluster dashboard.
+ it_behaves_like 'valid dashboard service response'
+ end
+ end
+end
diff --git a/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb b/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb
new file mode 100644
index 00000000000..e80911d6265
--- /dev/null
+++ b/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Metrics::Dashboard::ClusterMetricsEmbedService, :use_clean_rails_memory_store_caching do
+ include MetricsDashboardHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:cluster_project) { create(:cluster_project) }
+ let_it_be(:cluster) { cluster_project.cluster }
+ let_it_be(:project) { cluster_project.project }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe '.valid_params?' do
+ let(:valid_params) { { cluster: 1, embedded: 'true', group: 'hello', title: 'world', y_label: 'countries' } }
+
+ subject { described_class }
+
+ it { expect(subject.valid_params?(valid_params)).to be_truthy }
+
+ context 'missing all params' do
+ let(:params) { {} }
+
+ it { expect(subject.valid_params?(params)).to be_falsy }
+ end
+
+ [:cluster, :embedded, :group, :title, :y_label].each do |param_key|
+ it 'returns false with missing param' do
+ params = valid_params.except(param_key)
+
+ expect(subject.valid_params?(params)).to be_falsy
+ end
+ end
+ end
+
+ describe '#get_dashboard' do
+ let(:service_params) do
+ [
+ project,
+ user,
+ {
+ cluster: cluster,
+ cluster_type: :project,
+ embedded: 'true',
+ group: 'Cluster Health',
+ title: 'CPU Usage',
+ y_label: 'CPU (cores)'
+ }
+ ]
+ end
+ let(:service_call) { described_class.new(*service_params).get_dashboard }
+ let(:panel_groups) { service_call[:dashboard][:panel_groups] }
+ let(:panel) { panel_groups.first[:panels].first }
+
+ it_behaves_like 'valid embedded dashboard service response'
+ it_behaves_like 'caches the unprocessed dashboard for subsequent calls'
+
+ it 'returns one panel' do
+ expect(panel_groups.size).to eq 1
+ expect(panel_groups.first[:panels].size).to eq 1
+ end
+
+ it 'returns panel by title and y_label' do
+ expect(panel[:title]).to eq(service_params.last[:title])
+ expect(panel[:y_label]).to eq(service_params.last[:y_label])
+ end
+ end
+end
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index 852382dd0d8..557bf216277 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -102,6 +102,41 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
let(:payload_alert_firing) { payload_raw['alerts'].first }
let(:token) { 'token' }
+ context 'with environment specific clusters' do
+ let(:prd_cluster) do
+ cluster
+ end
+
+ let(:stg_cluster) do
+ create(:cluster, :provided_by_user, projects: [project], enabled: true, environment_scope: 'stg/*')
+ end
+
+ let(:stg_environment) do
+ create(:environment, project: project, name: 'stg/1')
+ end
+
+ let(:alert_firing) do
+ create(:prometheus_alert, project: project, environment: stg_environment)
+ end
+
+ before do
+ create(:clusters_applications_prometheus, :installed,
+ cluster: prd_cluster, alert_manager_token: token)
+ create(:clusters_applications_prometheus, :installed,
+ cluster: stg_cluster, alert_manager_token: nil)
+ end
+
+ context 'without token' do
+ let(:token_input) { nil }
+
+ it_behaves_like 'notifies alerts'
+ end
+
+ context 'with token' do
+ it_behaves_like 'no notifications', http_status: :unauthorized
+ end
+ end
+
context 'with project specific cluster' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/support/services/clusters/create_service_shared.rb b/spec/support/services/clusters/create_service_shared.rb
index 31aee08baec..f8a58a828ce 100644
--- a/spec/support/services/clusters/create_service_shared.rb
+++ b/spec/support/services/clusters/create_service_shared.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
RSpec.shared_context 'valid cluster create params' do
+ let(:clusterable) { Clusters::Instance.new }
let(:params) do
{
name: 'test-cluster',
@@ -11,12 +12,14 @@ RSpec.shared_context 'valid cluster create params' do
num_nodes: 1,
machine_type: 'machine_type-a',
legacy_abac: 'true'
- }
+ },
+ clusterable: clusterable
}
end
end
RSpec.shared_context 'invalid cluster create params' do
+ let(:clusterable) { Clusters::Instance.new }
let(:params) do
{
name: 'test-cluster',
@@ -26,7 +29,9 @@ RSpec.shared_context 'invalid cluster create params' do
zone: 'us-central1-a',
num_nodes: 1,
machine_type: 'machine_type-a'
- }
+ },
+ clusterable: clusterable
+
}
end
end
diff --git a/spec/support/shared_contexts/prometheus/alert_shared_context.rb b/spec/support/shared_contexts/prometheus/alert_shared_context.rb
new file mode 100644
index 00000000000..330d2c4515f
--- /dev/null
+++ b/spec/support/shared_contexts/prometheus/alert_shared_context.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+# These contexts expect a `project` to be defined.
+# It is expected that these contexts are used to create an
+# alert.
+RSpec.shared_context 'self-managed prometheus alert attributes' do
+ let_it_be(:environment) { create(:environment, project: project, name: 'production') }
+
+ let(:starts_at) { '2018-03-12T09:06:00Z' }
+ let(:title) { 'title' }
+ let(:y_label) { 'y_label' }
+ let(:query) { 'avg(metric) > 1.0' }
+
+ let(:embed_content) do
+ {
+ panel_groups: [{
+ panels: [{
+ type: 'line-graph',
+ title: title,
+ y_label: y_label,
+ metrics: [{ query_range: query }]
+ }]
+ }]
+ }.to_json
+ end
+
+ let(:payload) do
+ {
+ 'startsAt' => starts_at,
+ 'generatorURL' => "http://host?g0.expr=#{CGI.escape(query)}",
+ 'labels' => {
+ 'gitlab_environment_name' => 'production'
+ },
+ 'annotations' => {
+ 'title' => title,
+ 'gitlab_y_label' => y_label
+ }
+ }
+ end
+
+ let(:dashboard_url_for_alert) do
+ Gitlab::Routing.url_helpers.metrics_dashboard_project_environment_url(
+ project,
+ environment,
+ embed_json: embed_content,
+ embedded: true,
+ end: '2018-03-12T09:36:00Z',
+ start: '2018-03-12T08:36:00Z'
+ )
+ end
+end
+
+RSpec.shared_context 'gitlab-managed prometheus alert attributes' do
+ let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
+ let(:prometheus_metric_id) { prometheus_alert.prometheus_metric_id }
+
+ let(:payload) do
+ {
+ 'startsAt' => '2018-03-12T09:06:00Z',
+ 'labels' => {
+ 'gitlab_alert_id' => prometheus_metric_id
+ }
+ }
+ end
+
+ let(:dashboard_url_for_alert) do
+ Gitlab::Routing.url_helpers.metrics_dashboard_project_prometheus_alert_url(
+ project,
+ prometheus_metric_id,
+ environment_id: prometheus_alert.environment_id,
+ embedded: true,
+ end: '2018-03-12T09:36:00Z',
+ start: '2018-03-12T08:36:00Z'
+ )
+ end
+end
diff --git a/yarn.lock b/yarn.lock
index cf2b1ac3224..524b5692796 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -843,15 +843,15 @@
eslint-plugin-vue "^6.2.1"
vue-eslint-parser "^7.0.0"
-"@gitlab/svgs@1.150.0":
- version "1.150.0"
- resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.150.0.tgz#7dbbdf1b50c4409adf30d23710bbe4515608e245"
- integrity sha512-jfD1EiawNlBM1XNEz7hriPJg2UOX6zE2/lKMIocSpkg9R58VGyIr+oyWOTn2AfknrepsLfnTiGJNveLdRYcy4w==
-
-"@gitlab/ui@17.16.0":
- version "17.16.0"
- resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-17.16.0.tgz#6c69016234ab669b619096f3f2ba5328c94864b2"
- integrity sha512-0h/Vvykh7AtgjikOsC86PUQ35P5BWFkHA9aQ/klYwlsjU395C4K/zyEibxsg0fs3jivGyzpQh1pQKwTsZEq/Tw==
+"@gitlab/svgs@1.151.0":
+ version "1.151.0"
+ resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.151.0.tgz#099905295d33eb31033f4a48eb3652da2f686239"
+ integrity sha512-2PTSM8CFhUjeTFKfcq6E/YwPpOVdSVWupf3NhKO/bz/cisSBS5P7aWxaXKIaxy28ySyBKEfKaAT6b4rXTwvVgg==
+
+"@gitlab/ui@17.18.1":
+ version "17.18.1"
+ resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-17.18.1.tgz#212b4560310919cc405a157da21a47b75981546c"
+ integrity sha512-1VRPg5YnDuEs7SiDdYrT2kkNUHJhbD0PobnME1QW2bjCFjgbVHc9SvKNq9cbb0ao/SAyCefG3iC/aKJsQVhUmQ==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0"