Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-12-03 15:10:23 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-12-03 15:10:23 +0300
commit5f0d27d131aced1a53e8cbc7db023d9f947f8a1a (patch)
tree7007c07fc37c95638f3e71c1902dcd055db1d8ca
parentcc8ea69201e2e4d020018c43efeb993c44cd8a71 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/assets/javascripts/crm/components/contacts_root.vue67
-rw-r--r--app/assets/javascripts/crm/components/new_contact_form.vue140
-rw-r--r--app/assets/javascripts/crm/components/queries/create_contact.mutation.graphql10
-rw-r--r--app/assets/javascripts/crm/components/queries/crm_contact_fields.fragment.graphql14
-rw-r--r--app/assets/javascripts/crm/components/queries/get_group_contacts.query.graphql15
-rw-r--r--app/assets/javascripts/crm/contacts_bundle.js20
-rw-r--r--app/assets/javascripts/editor/extensions/example_source_editor_extension.js4
-rw-r--r--app/assets/javascripts/editor/source_editor_instance.js2
-rw-r--r--app/assets/javascripts/pipeline_editor/components/lint/ci_lint_results.vue8
-rw-r--r--app/assets/javascripts/test_utils/simulate_drag.js42
-rw-r--r--app/assets/javascripts/vue_shared/components/notes/system_note.vue2
-rw-r--r--app/controllers/concerns/cycle_analytics_params.rb1
-rw-r--r--app/controllers/groups/crm/contacts_controller.rb17
-rw-r--r--app/controllers/groups/crm/organizations_controller.rb13
-rw-r--r--app/controllers/groups/crm_controller.rb30
-rw-r--r--app/controllers/groups/variables_controller.rb2
-rw-r--r--app/controllers/projects/ci/lints_controller.rb1
-rw-r--r--app/controllers/projects/ci/pipeline_editor_controller.rb2
-rw-r--r--app/controllers/projects/variables_controller.rb2
-rw-r--r--app/finders/group_descendants_finder.rb18
-rw-r--r--app/models/bulk_imports/entity.rb4
-rw-r--r--app/models/bulk_imports/file_transfer/base_config.rb4
-rw-r--r--app/models/bulk_imports/file_transfer/project_config.rb6
-rw-r--r--app/models/ci/pipeline.rb7
-rw-r--r--app/models/commit_status.rb13
-rw-r--r--app/services/audit_event_service.rb11
-rw-r--r--app/services/bulk_imports/uploads_export_service.rb3
-rw-r--r--app/services/ci/expire_pipeline_cache_service.rb23
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service.rb4
-rw-r--r--app/services/concerns/audit_event_save_type.rb26
-rw-r--r--app/services/merge_requests/after_create_service.rb6
-rw-r--r--app/views/groups/crm/contacts.html.haml4
-rw-r--r--app/views/groups/crm/contacts/index.html.haml4
-rw-r--r--app/views/groups/crm/organizations/index.html.haml (renamed from app/views/groups/crm/organizations.html.haml)0
-rw-r--r--app/views/projects/blob/show.html.haml4
-rw-r--r--app/workers/expire_job_cache_worker.rb15
-rw-r--r--config/feature_flags/development/expire_job_and_pipeline_cache_synchronously.yml8
-rw-r--r--config/feature_flags/development/linear_group_descendants_finder.yml8
-rw-r--r--config/metrics/settings/20211124061450_snowplow_enabled.yml24
-rw-r--r--config/metrics/settings/20211124085521_snowplow_configured_to_gitlab_collector_hostname.yml24
-rw-r--r--config/routes/group.rb8
-rw-r--r--doc/administration/geo/disaster_recovery/runbooks/planned_failover_multi_node.md12
-rw-r--r--doc/administration/geo/disaster_recovery/runbooks/planned_failover_single_node.md6
-rw-r--r--doc/administration/package_information/index.md10
-rw-r--r--doc/topics/autodevops/upgrading_postgresql.md2
-rw-r--r--doc/update/zero_downtime.md22
-rw-r--r--doc/user/profile/account/two_factor_authentication.md2
-rw-r--r--doc/user/project/issues/csv_import.md2
-rw-r--r--doc/user/project/merge_requests/load_performance_testing.md2
-rw-r--r--lib/api/ci/pipelines.rb2
-rw-r--r--lib/api/lint.rb4
-rw-r--r--lib/bulk_imports/common/pipelines/uploads_pipeline.rb29
-rw-r--r--lib/bulk_imports/groups/pipelines/group_avatar_pipeline.rb49
-rw-r--r--lib/bulk_imports/groups/stage.rb8
-rw-r--r--lib/gitlab/etag_caching/store.rb12
-rw-r--r--lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric.rb17
-rw-r--r--lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric.rb15
-rw-r--r--lib/gitlab/usage_data.rb4
-rw-r--r--lib/sidebars/groups/menus/customer_relations_menu.rb4
-rw-r--r--locale/gitlab.pot27
-rw-r--r--metrics_server/override_rails_constants.rb (renamed from scripts/override_rails_constants.rb)4
-rw-r--r--metrics_server/settings_overrides.rb2
-rw-r--r--qa/qa/resource/group_deploy_token.rb47
-rw-r--r--qa/qa/specs/features/browser_ui/5_package/package_registry/nuget_repository_spec.rb9
-rw-r--r--scripts/frontend/startup_css/get_startup_css.js4
-rwxr-xr-xscripts/setup-test-env12
-rw-r--r--spec/features/admin/admin_deploy_keys_spec.rb134
-rw-r--r--spec/finders/group_descendants_finder_spec.rb345
-rw-r--r--spec/frontend/crm/contacts_root_spec.js129
-rw-r--r--spec/frontend/crm/mock_data.js29
-rw-r--r--spec/frontend/crm/new_contact_form_spec.js108
-rw-r--r--spec/frontend/editor/helpers.js2
-rw-r--r--spec/frontend/editor/source_editor_instance_spec.js2
-rw-r--r--spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js4
-rw-r--r--spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb127
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb77
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb6
-rw-r--r--spec/lib/gitlab/etag_caching/store_spec.rb14
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb22
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb12
-rw-r--r--spec/models/bulk_imports/entity_spec.rb10
-rw-r--r--spec/models/ci/pipeline_spec.rb26
-rw-r--r--spec/models/commit_status_spec.rb35
-rw-r--r--spec/services/bulk_imports/uploads_export_service_spec.rb24
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb54
-rw-r--r--spec/services/concerns/audit_event_save_type_spec.rb28
87 files changed, 1503 insertions, 630 deletions
diff --git a/app/assets/javascripts/crm/components/contacts_root.vue b/app/assets/javascripts/crm/components/contacts_root.vue
index 97220a3409d..0242bdab541 100644
--- a/app/assets/javascripts/crm/components/contacts_root.vue
+++ b/app/assets/javascripts/crm/components/contacts_root.vue
@@ -1,22 +1,28 @@
<script>
-import { GlButton, GlLoadingIcon, GlTable, GlTooltipDirective } from '@gitlab/ui';
-import createFlash from '~/flash';
+import { GlAlert, GlButton, GlLoadingIcon, GlTable, GlTooltipDirective } from '@gitlab/ui';
import { s__, __ } from '~/locale';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import getGroupContactsQuery from './queries/get_group_contacts.query.graphql';
+import NewContactForm from './new_contact_form.vue';
export default {
components: {
+ GlAlert,
GlButton,
GlLoadingIcon,
GlTable,
+ NewContactForm,
},
directives: {
GlTooltip: GlTooltipDirective,
},
- inject: ['groupFullPath', 'groupIssuesPath'],
+ inject: ['groupFullPath', 'groupIssuesPath', 'canAdminCrmContact'],
data() {
- return { contacts: [] };
+ return {
+ contacts: [],
+ error: false,
+ errorMessages: [],
+ };
},
apollo: {
contacts: {
@@ -31,12 +37,8 @@ export default {
update(data) {
return this.extractContacts(data);
},
- error(error) {
- createFlash({
- message: __('Something went wrong. Please try again.'),
- error,
- captureError: true,
- });
+ error() {
+ this.error = true;
},
},
},
@@ -44,12 +46,31 @@ export default {
isLoading() {
return this.$apollo.queries.contacts.loading;
},
+ showNewForm() {
+ return this.$route.path.startsWith('/new');
+ },
},
methods: {
extractContacts(data) {
const contacts = data?.group?.contacts?.nodes || [];
return contacts.slice().sort((a, b) => a.firstName.localeCompare(b.firstName));
},
+ displayNewForm() {
+ if (this.showNewForm) return;
+
+ this.$router.push({ path: '/new' });
+ },
+ hideNewForm() {
+ this.$router.replace({ path: '/' });
+ },
+ handleError(errors) {
+ this.error = true;
+ if (errors) this.errorMessages = errors;
+ },
+ dismissError() {
+ this.error = false;
+ this.errorMessages = [];
+ },
},
fields: [
{ key: 'firstName', sortable: true },
@@ -75,15 +96,41 @@ export default {
i18n: {
emptyText: s__('Crm|No contacts found'),
issuesButtonLabel: __('View issues'),
+ title: s__('Crm|Customer Relations Contacts'),
+ newContact: s__('Crm|New contact'),
+ errorText: __('Something went wrong. Please try again.'),
},
};
</script>
<template>
<div>
+ <gl-alert v-if="error" variant="danger" class="gl-mt-6" @dismiss="dismissError">
+ <div v-if="errorMessages.length == 0">{{ $options.i18n.errorText }}</div>
+ <div v-for="(message, index) in errorMessages" :key="index">{{ message }}</div>
+ </gl-alert>
+ <div
+ class="gl-display-flex gl-align-items-baseline gl-flex-direction-row gl-justify-content-space-between gl-mt-6"
+ >
+ <h2 class="gl-font-size-h2 gl-my-0">
+ {{ $options.i18n.title }}
+ </h2>
+ <div class="gl-display-none gl-md-display-flex gl-align-items-center gl-justify-content-end">
+ <gl-button
+ v-if="canAdminCrmContact"
+ variant="confirm"
+ data-testid="new-contact-button"
+ @click="displayNewForm"
+ >
+ {{ $options.i18n.newContact }}
+ </gl-button>
+ </div>
+ </div>
+ <new-contact-form v-if="showNewForm" @close="hideNewForm" @error="handleError" />
<gl-loading-icon v-if="isLoading" class="gl-mt-5" size="lg" />
<gl-table
v-else
+ class="gl-mt-5"
:items="contacts"
:fields="$options.fields"
:empty-text="$options.i18n.emptyText"
diff --git a/app/assets/javascripts/crm/components/new_contact_form.vue b/app/assets/javascripts/crm/components/new_contact_form.vue
new file mode 100644
index 00000000000..77ff82c5af4
--- /dev/null
+++ b/app/assets/javascripts/crm/components/new_contact_form.vue
@@ -0,0 +1,140 @@
+<script>
+import { GlButton, GlFormGroup, GlFormInput } from '@gitlab/ui';
+import { produce } from 'immer';
+import { __, s__ } from '~/locale';
+import { convertToGraphQLId } from '~/graphql_shared/utils';
+import { TYPE_GROUP } from '~/graphql_shared/constants';
+import createContact from './queries/create_contact.mutation.graphql';
+import getGroupContactsQuery from './queries/get_group_contacts.query.graphql';
+
+export default {
+ components: {
+ GlButton,
+ GlFormGroup,
+ GlFormInput,
+ },
+ inject: ['groupFullPath', 'groupId'],
+ data() {
+ return {
+ firstName: '',
+ lastName: '',
+ phone: '',
+ email: '',
+ description: '',
+ submitting: false,
+ };
+ },
+ computed: {
+ invalid() {
+ return this.firstName === '' || this.lastName === '' || this.email === '';
+ },
+ },
+ methods: {
+ save() {
+ this.submitting = true;
+ return this.$apollo
+ .mutate({
+ mutation: createContact,
+ variables: {
+ input: {
+ groupId: convertToGraphQLId(TYPE_GROUP, this.groupId),
+ firstName: this.firstName,
+ lastName: this.lastName,
+ phone: this.phone,
+ email: this.email,
+ description: this.description,
+ },
+ },
+ update: this.updateCache,
+ })
+ .then(({ data }) => {
+ if (data.customerRelationsContactCreate.errors.length === 0) this.close();
+
+ this.submitting = false;
+ })
+ .catch(() => {
+ this.error();
+ this.submitting = false;
+ });
+ },
+ close() {
+ this.$emit('close');
+ },
+ error(errors = null) {
+ this.$emit('error', errors);
+ },
+ updateCache(store, { data: { customerRelationsContactCreate } }) {
+ if (customerRelationsContactCreate.errors.length > 0) {
+ this.error(customerRelationsContactCreate.errors);
+ return;
+ }
+
+ const variables = {
+ groupFullPath: this.groupFullPath,
+ };
+ const sourceData = store.readQuery({
+ query: getGroupContactsQuery,
+ variables,
+ });
+
+ const data = produce(sourceData, (draftState) => {
+ draftState.group.contacts.nodes = [
+ ...sourceData.group.contacts.nodes,
+ customerRelationsContactCreate.contact,
+ ];
+ });
+
+ store.writeQuery({
+ query: getGroupContactsQuery,
+ variables,
+ data,
+ });
+ },
+ },
+ i18n: {
+ buttonLabel: s__('Crm|Create new contact'),
+ cancel: __('Cancel'),
+ firstName: s__('Crm|First name'),
+ lastName: s__('Crm|Last name'),
+ email: s__('Crm|Email'),
+ phone: s__('Crm|Phone number (optional)'),
+ description: s__('Crm|Description (optional)'),
+ },
+};
+</script>
+
+<template>
+ <div class="col-md-4">
+ <form @submit.prevent="save">
+ <gl-form-group :label="$options.i18n.firstName" label-for="contact-first-name">
+ <gl-form-input id="contact-first-name" v-model="firstName" />
+ </gl-form-group>
+ <gl-form-group :label="$options.i18n.lastName" label-for="contact-last-name">
+ <gl-form-input id="contact-last-name" v-model="lastName" />
+ </gl-form-group>
+ <gl-form-group :label="$options.i18n.email" label-for="contact-email">
+ <gl-form-input id="contact-email" v-model="email" />
+ </gl-form-group>
+ <gl-form-group :label="$options.i18n.phone" label-for="contact-phone">
+ <gl-form-input id="contact-phone" v-model="phone" />
+ </gl-form-group>
+ <gl-form-group :label="$options.i18n.description" label-for="contact-description">
+ <gl-form-input id="contact-description" v-model="description" />
+ </gl-form-group>
+ <div class="form-actions">
+ <gl-button
+ variant="confirm"
+ :disabled="invalid"
+ :loading="submitting"
+ data-testid="create-new-contact-button"
+ type="submit"
+ >{{ $options.i18n.buttonLabel }}</gl-button
+ >
+ <gl-button data-testid="cancel-button" @click="close">
+ {{ $options.i18n.cancel }}
+ </gl-button>
+ </div>
+ </form>
+ <div class="gl-pb-5"></div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/crm/components/queries/create_contact.mutation.graphql b/app/assets/javascripts/crm/components/queries/create_contact.mutation.graphql
new file mode 100644
index 00000000000..e0192459609
--- /dev/null
+++ b/app/assets/javascripts/crm/components/queries/create_contact.mutation.graphql
@@ -0,0 +1,10 @@
+#import "./crm_contact_fields.fragment.graphql"
+
+mutation createContact($input: CustomerRelationsContactCreateInput!) {
+ customerRelationsContactCreate(input: $input) {
+ contact {
+ ...ContactFragment
+ }
+ errors
+ }
+}
diff --git a/app/assets/javascripts/crm/components/queries/crm_contact_fields.fragment.graphql b/app/assets/javascripts/crm/components/queries/crm_contact_fields.fragment.graphql
new file mode 100644
index 00000000000..cef4083b446
--- /dev/null
+++ b/app/assets/javascripts/crm/components/queries/crm_contact_fields.fragment.graphql
@@ -0,0 +1,14 @@
+fragment ContactFragment on CustomerRelationsContact {
+ __typename
+ id
+ firstName
+ lastName
+ email
+ phone
+ description
+ organization {
+ __typename
+ id
+ name
+ }
+}
diff --git a/app/assets/javascripts/crm/components/queries/get_group_contacts.query.graphql b/app/assets/javascripts/crm/components/queries/get_group_contacts.query.graphql
index f6acd258585..2a8150e42e3 100644
--- a/app/assets/javascripts/crm/components/queries/get_group_contacts.query.graphql
+++ b/app/assets/javascripts/crm/components/queries/get_group_contacts.query.graphql
@@ -1,21 +1,12 @@
+#import "./crm_contact_fields.fragment.graphql"
+
query contacts($groupFullPath: ID!) {
group(fullPath: $groupFullPath) {
__typename
id
contacts {
nodes {
- __typename
- id
- firstName
- lastName
- email
- phone
- description
- organization {
- __typename
- id
- name
- }
+ ...ContactFragment
}
}
}
diff --git a/app/assets/javascripts/crm/contacts_bundle.js b/app/assets/javascripts/crm/contacts_bundle.js
index b0edd0107b6..6ddc53840cc 100644
--- a/app/assets/javascripts/crm/contacts_bundle.js
+++ b/app/assets/javascripts/crm/contacts_bundle.js
@@ -1,9 +1,11 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import VueRouter from 'vue-router';
import createDefaultClient from '~/lib/graphql';
import CrmContactsRoot from './components/contacts_root.vue';
Vue.use(VueApollo);
+Vue.use(VueRouter);
export default () => {
const el = document.getElementById('js-crm-contacts-app');
@@ -16,12 +18,26 @@ export default () => {
return false;
}
- const { groupFullPath, groupIssuesPath } = el.dataset;
+ const { basePath, groupFullPath, groupIssuesPath, canAdminCrmContact, groupId } = el.dataset;
+
+ const router = new VueRouter({
+ base: basePath,
+ mode: 'history',
+ routes: [
+ {
+ // eslint-disable-next-line @gitlab/require-i18n-strings
+ name: 'Contacts List',
+ path: '/',
+ component: CrmContactsRoot,
+ },
+ ],
+ });
return new Vue({
el,
+ router,
apolloProvider,
- provide: { groupFullPath, groupIssuesPath },
+ provide: { groupFullPath, groupIssuesPath, canAdminCrmContact, groupId },
render(createElement) {
return createElement(CrmContactsRoot);
},
diff --git a/app/assets/javascripts/editor/extensions/example_source_editor_extension.js b/app/assets/javascripts/editor/extensions/example_source_editor_extension.js
index 119a2aea9eb..33be6cf9e5d 100644
--- a/app/assets/javascripts/editor/extensions/example_source_editor_extension.js
+++ b/app/assets/javascripts/editor/extensions/example_source_editor_extension.js
@@ -16,11 +16,11 @@ export class MyFancyExtension {
* actions, keystrokes, update options, etc.
* Is called only once before the extension gets registered
*
- * @param { Object } [setupOptions] The setupOptions object
* @param { Object } [instance] The Source Editor instance
+ * @param { Object } [setupOptions] The setupOptions object
*/
// eslint-disable-next-line class-methods-use-this,no-unused-vars
- onSetup(setupOptions, instance) {}
+ onSetup(instance, setupOptions) {}
/**
* The first thing called after the extension is
diff --git a/app/assets/javascripts/editor/source_editor_instance.js b/app/assets/javascripts/editor/source_editor_instance.js
index 052a73d7091..fcffdc587be 100644
--- a/app/assets/javascripts/editor/source_editor_instance.js
+++ b/app/assets/javascripts/editor/source_editor_instance.js
@@ -153,7 +153,7 @@ export default class EditorInstance {
const extensionInstance = new EditorExtension(extension);
const { setupOptions, obj: extensionObj } = extensionInstance;
if (extensionObj.onSetup) {
- extensionObj.onSetup(setupOptions, this);
+ extensionObj.onSetup(this, setupOptions);
}
if (extensionsStore) {
this.registerExtension(extensionInstance, extensionsStore);
diff --git a/app/assets/javascripts/pipeline_editor/components/lint/ci_lint_results.vue b/app/assets/javascripts/pipeline_editor/components/lint/ci_lint_results.vue
index 7f6dce05b6e..13e254f138a 100644
--- a/app/assets/javascripts/pipeline_editor/components/lint/ci_lint_results.vue
+++ b/app/assets/javascripts/pipeline_editor/components/lint/ci_lint_results.vue
@@ -1,5 +1,5 @@
<script>
-import { GlAlert, GlLink, GlSprintf, GlTable } from '@gitlab/ui';
+import { GlAlert, GlLink, GlSprintf, GlTableLite } from '@gitlab/ui';
import { __ } from '~/locale';
import CiLintResultsParam from './ci_lint_results_param.vue';
import CiLintResultsValue from './ci_lint_results_value.vue';
@@ -36,7 +36,7 @@ export default {
GlAlert,
GlLink,
GlSprintf,
- GlTable,
+ GlTableLite,
CiLintWarnings,
CiLintResultsValue,
CiLintResultsParam,
@@ -129,7 +129,7 @@ export default {
@dismiss="isWarningDismissed = true"
/>
- <gl-table
+ <gl-table-lite
v-if="shouldShowTable"
:items="jobs"
:fields="$options.fields"
@@ -142,6 +142,6 @@ export default {
<template #cell(value)="{ item }">
<ci-lint-results-value :item="item" :dry-run="dryRun" />
</template>
- </gl-table>
+ </gl-table-lite>
</div>
</template>
diff --git a/app/assets/javascripts/test_utils/simulate_drag.js b/app/assets/javascripts/test_utils/simulate_drag.js
index 321315d531b..4f3f1365f4a 100644
--- a/app/assets/javascripts/test_utils/simulate_drag.js
+++ b/app/assets/javascripts/test_utils/simulate_drag.js
@@ -122,7 +122,6 @@ export default function simulateDrag(options) {
const firstRect = getRect(firstEl);
const lastRect = getRect(lastEl);
- const startTime = new Date().getTime();
const duration = options.duration || 1000;
simulateEvent(fromEl, 'pointerdown', {
@@ -140,8 +139,28 @@ export default function simulateDrag(options) {
toRect.cy = lastRect.y + lastRect.h + 50;
}
- const dragInterval = setInterval(() => {
- const progress = (new Date().getTime() - startTime) / duration;
+ let startTime;
+
+ // Called within dragFn when the drag should finish
+ const finishFn = () => {
+ if (options.ondragend) options.ondragend();
+
+ if (options.performDrop) {
+ simulateEvent(toEl, 'mouseup');
+ }
+
+ window.SIMULATE_DRAG_ACTIVE = 0;
+ };
+
+ const dragFn = (timestamp) => {
+ if (!startTime) {
+ startTime = timestamp;
+ }
+
+ const elapsed = timestamp - startTime;
+
+ // Make sure that progress maxes at 1
+ const progress = Math.min(elapsed / duration, 1);
const x = fromRect.cx + (toRect.cx - fromRect.cx) * progress;
const y = fromRect.cy + (toRect.cy - fromRect.cy + options.extraHeight) * progress;
const overEl = fromEl.ownerDocument.elementFromPoint(x, y);
@@ -152,16 +171,15 @@ export default function simulateDrag(options) {
});
if (progress >= 1) {
- if (options.ondragend) options.ondragend();
-
- if (options.performDrop) {
- simulateEvent(toEl, 'mouseup');
- }
-
- clearInterval(dragInterval);
- window.SIMULATE_DRAG_ACTIVE = 0;
+ // finish on next frame, so we can pause in the correct position for a frame
+ requestAnimationFrame(finishFn);
+ } else {
+ requestAnimationFrame(dragFn);
}
- }, 100);
+ };
+
+ // Start the drag animation
+ requestAnimationFrame(dragFn);
return {
target: fromEl,
diff --git a/app/assets/javascripts/vue_shared/components/notes/system_note.vue b/app/assets/javascripts/vue_shared/components/notes/system_note.vue
index 8877cfa39fb..1963d1aa7fe 100644
--- a/app/assets/javascripts/vue_shared/components/notes/system_note.vue
+++ b/app/assets/javascripts/vue_shared/components/notes/system_note.vue
@@ -141,6 +141,7 @@ export default {
variant="link"
:icon="descriptionVersionToggleIcon"
data-testid="compare-btn"
+ class="gl-vertical-align-text-bottom"
@click="toggleDescriptionVersion"
>{{ __('Compare with previous version') }}</gl-button
>
@@ -149,6 +150,7 @@ export default {
:icon="showLines ? 'chevron-up' : 'chevron-down'"
variant="link"
data-testid="outdated-lines-change-btn"
+ class="gl-vertical-align-text-bottom"
@click="toggleDiff"
>
{{ __('Compare changes') }}
diff --git a/app/controllers/concerns/cycle_analytics_params.rb b/app/controllers/concerns/cycle_analytics_params.rb
index 626093b4588..70bcefe339c 100644
--- a/app/controllers/concerns/cycle_analytics_params.rb
+++ b/app/controllers/concerns/cycle_analytics_params.rb
@@ -23,6 +23,7 @@ module CycleAnalyticsParams
opts[:from] = params[:from] || start_date(params)
opts[:to] = params[:to] if params[:to]
opts[:end_event_filter] = params[:end_event_filter] if params[:end_event_filter]
+ opts[:use_aggregated_data_collector] = params[:use_aggregated_data_collector] if params[:use_aggregated_data_collector]
opts.merge!(params.slice(*::Gitlab::Analytics::CycleAnalytics::RequestParams::FINDER_PARAM_NAMES))
opts.merge!(date_range(params))
end
diff --git a/app/controllers/groups/crm/contacts_controller.rb b/app/controllers/groups/crm/contacts_controller.rb
new file mode 100644
index 00000000000..97904fdd2fd
--- /dev/null
+++ b/app/controllers/groups/crm/contacts_controller.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class Groups::Crm::ContactsController < Groups::ApplicationController
+ feature_category :team_planning
+
+ before_action :authorize_read_crm_contact!
+
+ def new
+ render action: "index"
+ end
+
+ private
+
+ def authorize_read_crm_contact!
+ render_404 unless can?(current_user, :read_crm_contact, group)
+ end
+end
diff --git a/app/controllers/groups/crm/organizations_controller.rb b/app/controllers/groups/crm/organizations_controller.rb
new file mode 100644
index 00000000000..6f285687e6b
--- /dev/null
+++ b/app/controllers/groups/crm/organizations_controller.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+class Groups::Crm::OrganizationsController < Groups::ApplicationController
+ feature_category :team_planning
+
+ before_action :authorize_read_crm_organization!
+
+ private
+
+ def authorize_read_crm_organization!
+ render_404 unless can?(current_user, :read_crm_organization, group)
+ end
+end
diff --git a/app/controllers/groups/crm_controller.rb b/app/controllers/groups/crm_controller.rb
deleted file mode 100644
index 40661b09be6..00000000000
--- a/app/controllers/groups/crm_controller.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-class Groups::CrmController < Groups::ApplicationController
- feature_category :team_planning
-
- before_action :authorize_read_crm_contact!, only: [:contacts]
- before_action :authorize_read_crm_organization!, only: [:organizations]
-
- def contacts
- respond_to do |format|
- format.html
- end
- end
-
- def organizations
- respond_to do |format|
- format.html
- end
- end
-
- private
-
- def authorize_read_crm_contact!
- render_404 unless can?(current_user, :read_crm_contact, group)
- end
-
- def authorize_read_crm_organization!
- render_404 unless can?(current_user, :read_crm_organization, group)
- end
-end
diff --git a/app/controllers/groups/variables_controller.rb b/app/controllers/groups/variables_controller.rb
index 9dbbd385ea8..1e23db9f32b 100644
--- a/app/controllers/groups/variables_controller.rb
+++ b/app/controllers/groups/variables_controller.rb
@@ -8,6 +8,8 @@ module Groups
feature_category :pipeline_authoring
+ urgency :low, [:show]
+
def show
respond_to do |format|
format.json do
diff --git a/app/controllers/projects/ci/lints_controller.rb b/app/controllers/projects/ci/lints_controller.rb
index 9dc3194df85..7ef5016ac00 100644
--- a/app/controllers/projects/ci/lints_controller.rb
+++ b/app/controllers/projects/ci/lints_controller.rb
@@ -6,6 +6,7 @@ class Projects::Ci::LintsController < Projects::ApplicationController
feature_category :pipeline_authoring
respond_to :json, only: [:create]
+ urgency :low, [:create]
def show
end
diff --git a/app/controllers/projects/ci/pipeline_editor_controller.rb b/app/controllers/projects/ci/pipeline_editor_controller.rb
index 600516f95a2..392a6afc636 100644
--- a/app/controllers/projects/ci/pipeline_editor_controller.rb
+++ b/app/controllers/projects/ci/pipeline_editor_controller.rb
@@ -9,6 +9,8 @@ class Projects::Ci::PipelineEditorController < Projects::ApplicationController
feature_category :pipeline_authoring
+ urgency :low, [:show]
+
def show
end
diff --git a/app/controllers/projects/variables_controller.rb b/app/controllers/projects/variables_controller.rb
index f93c75a203e..e7bccf5a243 100644
--- a/app/controllers/projects/variables_controller.rb
+++ b/app/controllers/projects/variables_controller.rb
@@ -5,6 +5,8 @@ class Projects::VariablesController < Projects::ApplicationController
feature_category :pipeline_authoring
+ urgency :low, [:show, :update]
+
def show
respond_to do |format|
format.json do
diff --git a/app/finders/group_descendants_finder.rb b/app/finders/group_descendants_finder.rb
index 18ccea330af..7974710e67b 100644
--- a/app/finders/group_descendants_finder.rb
+++ b/app/finders/group_descendants_finder.rb
@@ -87,9 +87,13 @@ class GroupDescendantsFinder
visible_to_user = visible_to_user.or(authorized_to_user)
end
- hierarchy_for_parent
- .descendants
- .where(visible_to_user)
+ group_to_query = if Feature.enabled?(:linear_group_descendants_finder, current_user, default_enabled: :yaml)
+ parent_group
+ else
+ hierarchy_for_parent
+ end
+
+ group_to_query.descendants.where(visible_to_user)
# rubocop: enable CodeReuse/Finder
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -155,7 +159,13 @@ class GroupDescendantsFinder
# rubocop: disable CodeReuse/ActiveRecord
def projects_matching_filter
# rubocop: disable CodeReuse/Finder
- projects_nested_in_group = Project.where(namespace_id: hierarchy_for_parent.base_and_descendants.select(:id))
+ objects_in_hierarchy = if Feature.enabled?(:linear_group_descendants_finder, current_user, default_enabled: :yaml)
+ parent_group.self_and_descendants.as_ids
+ else
+ hierarchy_for_parent.base_and_descendants.select(:id)
+ end
+
+ projects_nested_in_group = Project.where(namespace_id: objects_in_hierarchy)
params_with_search = params.merge(search: params[:filter])
ProjectsFinder.new(params: params_with_search,
diff --git a/app/models/bulk_imports/entity.rb b/app/models/bulk_imports/entity.rb
index 33ce4686e27..38b7da76306 100644
--- a/app/models/bulk_imports/entity.rb
+++ b/app/models/bulk_imports/entity.rb
@@ -134,6 +134,10 @@ class BulkImports::Entity < ApplicationRecord
source_type == 'group_entity'
end
+ def update_service
+ "::#{pluralized_name.capitalize}::UpdateService".constantize
+ end
+
private
def validate_parent_is_a_group
diff --git a/app/models/bulk_imports/file_transfer/base_config.rb b/app/models/bulk_imports/file_transfer/base_config.rb
index 4d370315ad5..e735503a47f 100644
--- a/app/models/bulk_imports/file_transfer/base_config.rb
+++ b/app/models/bulk_imports/file_transfer/base_config.rb
@@ -5,6 +5,8 @@ module BulkImports
class BaseConfig
include Gitlab::Utils::StrongMemoize
+ UPLOADS_RELATION = 'uploads'
+
def initialize(portable)
@portable = portable
end
@@ -78,7 +80,7 @@ module BulkImports
end
def file_relations
- []
+ [UPLOADS_RELATION]
end
def skipped_relations
diff --git a/app/models/bulk_imports/file_transfer/project_config.rb b/app/models/bulk_imports/file_transfer/project_config.rb
index 9a0434da08a..fdfb0dd0186 100644
--- a/app/models/bulk_imports/file_transfer/project_config.rb
+++ b/app/models/bulk_imports/file_transfer/project_config.rb
@@ -3,8 +3,6 @@
module BulkImports
module FileTransfer
class ProjectConfig < BaseConfig
- UPLOADS_RELATION = 'uploads'
-
SKIPPED_RELATIONS = %w(
project_members
group_members
@@ -14,10 +12,6 @@ module BulkImports
::Gitlab::ImportExport.config_file
end
- def file_relations
- [UPLOADS_RELATION]
- end
-
def skipped_relations
SKIPPED_RELATIONS
end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 2bf33f821ab..a18b760eeb4 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -236,7 +236,12 @@ module Ci
pipeline.run_after_commit do
PipelineHooksWorker.perform_async(pipeline.id)
- ExpirePipelineCacheWorker.perform_async(pipeline.id)
+
+ if Feature.enabled?(:expire_job_and_pipeline_cache_synchronously, pipeline.project, default_enabled: :yaml)
+ Ci::ExpirePipelineCacheService.new.execute(pipeline) # rubocop: disable CodeReuse/ServiceClass
+ else
+ ExpirePipelineCacheWorker.perform_async(pipeline.id)
+ end
end
end
diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb
index 1bbcf8837f6..d80b2fe37dc 100644
--- a/app/models/commit_status.rb
+++ b/app/models/commit_status.rb
@@ -188,7 +188,12 @@ class CommitStatus < Ci::ApplicationRecord
commit_status.run_after_commit do
PipelineProcessWorker.perform_async(pipeline_id) unless transition_options[:skip_pipeline_processing]
- ExpireJobCacheWorker.perform_async(id)
+
+ if Feature.enabled?(:expire_job_and_pipeline_cache_synchronously, project, default_enabled: :yaml)
+ expire_etag_cache!
+ else
+ ExpireJobCacheWorker.perform_async(id)
+ end
end
end
@@ -301,6 +306,12 @@ class CommitStatus < Ci::ApplicationRecord
.update_all(retried: true, processed: true)
end
+ def expire_etag_cache!
+ job_path = Gitlab::Routing.url_helpers.project_build_path(project, id, format: :json)
+
+ Gitlab::EtagCaching::Store.new.touch(job_path)
+ end
+
private
def unrecoverable_failure?
diff --git a/app/services/audit_event_service.rb b/app/services/audit_event_service.rb
index 563d4a924fc..1426bf25a00 100644
--- a/app/services/audit_event_service.rb
+++ b/app/services/audit_event_service.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class AuditEventService
+ include AuditEventSaveType
+
# Instantiates a new service
#
# @param [User] author the user who authors the change
@@ -10,13 +12,16 @@ class AuditEventService
# - Group: events are visible at Group and Instance level
# - User: events are visible at Instance level
# @param [Hash] details extra data of audit event
+ # @param [Symbol] save_type the type to save the event
+ # Can be selected from the following, :database, :stream, :database_and_stream .
#
# @return [AuditEventService]
- def initialize(author, entity, details = {})
+ def initialize(author, entity, details = {}, save_type = :database_and_stream)
@author = build_author(author)
@entity = entity
@details = details
@ip_address = resolve_ip_address(@author)
+ @save_type = save_type
end
# Builds the @details attribute for authentication
@@ -133,8 +138,8 @@ class AuditEventService
end
def save_or_track(event)
- event.save!
- stream_event_to_external_destinations(event)
+ event.save! if should_save_database?(@save_type)
+ stream_event_to_external_destinations(event) if should_save_stream?(@save_type)
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, audit_event_type: event.class.to_s)
end
diff --git a/app/services/bulk_imports/uploads_export_service.rb b/app/services/bulk_imports/uploads_export_service.rb
index 32cc48c152c..7f5ee7b8624 100644
--- a/app/services/bulk_imports/uploads_export_service.rb
+++ b/app/services/bulk_imports/uploads_export_service.rb
@@ -5,6 +5,7 @@ module BulkImports
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
+ AVATAR_PATH = 'avatar'
def initialize(portable, export_path)
@portable = portable
@@ -34,7 +35,7 @@ module BulkImports
def export_subdir_path(upload)
subdir = if upload.path == avatar_path
- 'avatar'
+ AVATAR_PATH
else
upload.try(:secret).to_s
end
diff --git a/app/services/ci/expire_pipeline_cache_service.rb b/app/services/ci/expire_pipeline_cache_service.rb
index 177c85cebcc..8622b1a5863 100644
--- a/app/services/ci/expire_pipeline_cache_service.rb
+++ b/app/services/ci/expire_pipeline_cache_service.rb
@@ -74,20 +74,25 @@ module Ci
def update_etag_cache(pipeline, store)
project = pipeline.project
- store.touch(project_pipelines_path(project))
- store.touch(commit_pipelines_path(project, pipeline.commit)) unless pipeline.commit.nil?
- store.touch(new_merge_request_pipelines_path(project))
+ etag_paths = [
+ project_pipelines_path(project),
+ new_merge_request_pipelines_path(project),
+ graphql_project_on_demand_scan_counts_path(project)
+ ]
+
+ etag_paths << commit_pipelines_path(project, pipeline.commit) unless pipeline.commit.nil?
+
each_pipelines_merge_request_path(pipeline) do |path|
- store.touch(path)
+ etag_paths << path
end
- pipeline.self_with_upstreams_and_downstreams.each do |relative_pipeline|
- store.touch(project_pipeline_path(relative_pipeline.project, relative_pipeline))
- store.touch(graphql_pipeline_path(relative_pipeline))
- store.touch(graphql_pipeline_sha_path(relative_pipeline.sha))
+ pipeline.self_with_upstreams_and_downstreams.includes(project: [:route, { namespace: :route }]).each do |relative_pipeline| # rubocop: disable CodeReuse/ActiveRecord
+ etag_paths << project_pipeline_path(relative_pipeline.project, relative_pipeline)
+ etag_paths << graphql_pipeline_path(relative_pipeline)
+ etag_paths << graphql_pipeline_sha_path(relative_pipeline.sha)
end
- store.touch(graphql_project_on_demand_scan_counts_path(project))
+ store.touch(*etag_paths)
end
def url_helpers
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service.rb b/app/services/ci/pipeline_processing/atomic_processing_service.rb
index 236d660d829..d8ce063ffb4 100644
--- a/app/services/ci/pipeline_processing/atomic_processing_service.rb
+++ b/app/services/ci/pipeline_processing/atomic_processing_service.rb
@@ -36,6 +36,10 @@ module Ci
update_pipeline!
update_statuses_processed!
+ if Feature.enabled?(:expire_job_and_pipeline_cache_synchronously, pipeline.project, default_enabled: :yaml)
+ Ci::ExpirePipelineCacheService.new.execute(pipeline)
+ end
+
true
end
diff --git a/app/services/concerns/audit_event_save_type.rb b/app/services/concerns/audit_event_save_type.rb
new file mode 100644
index 00000000000..6696e4adae7
--- /dev/null
+++ b/app/services/concerns/audit_event_save_type.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module AuditEventSaveType
+ SAVE_TYPES = {
+ database: 0b01,
+ stream: 0b10,
+ database_and_stream: 0b11
+ }.freeze
+
+ # def should_save_stream?(type)
+ # def should_save_database?(type)
+ [:database, :stream].each do |type|
+ define_method("should_save_#{type}?") do |param_type|
+ return false unless save_type_valid?(param_type)
+
+ # If the current type does not support query, the result of the `&` operation is 0 .
+ SAVE_TYPES[param_type] & SAVE_TYPES[type] != 0
+ end
+ end
+
+ private
+
+ def save_type_valid?(type)
+ SAVE_TYPES.key?(type)
+ end
+end
diff --git a/app/services/merge_requests/after_create_service.rb b/app/services/merge_requests/after_create_service.rb
index f120cb26d22..d2c83f82ff8 100644
--- a/app/services/merge_requests/after_create_service.rb
+++ b/app/services/merge_requests/after_create_service.rb
@@ -2,6 +2,8 @@
module MergeRequests
class AfterCreateService < MergeRequests::BaseService
+ include Gitlab::Utils::StrongMemoize
+
def execute(merge_request)
prepare_for_mergeability(merge_request) if early_prepare_for_mergeability?(merge_request)
prepare_merge_request(merge_request)
@@ -48,7 +50,9 @@ module MergeRequests
end
def early_prepare_for_mergeability?(merge_request)
- Feature.enabled?(:early_prepare_for_mergeability, merge_request.target_project)
+ strong_memoize("early_prepare_for_mergeability_#{merge_request.target_project_id}".to_sym) do
+ Feature.enabled?(:early_prepare_for_mergeability, merge_request.target_project)
+ end
end
def mark_as_unchecked(merge_request)
diff --git a/app/views/groups/crm/contacts.html.haml b/app/views/groups/crm/contacts.html.haml
deleted file mode 100644
index 7d0ee5b64b1..00000000000
--- a/app/views/groups/crm/contacts.html.haml
+++ /dev/null
@@ -1,4 +0,0 @@
-- breadcrumb_title _('Customer Relations Contacts')
-- page_title _('Customer Relations Contacts')
-
-#js-crm-contacts-app{ data: { group_full_path: @group.full_path, group_issues_path: issues_group_path(@group) } }
diff --git a/app/views/groups/crm/contacts/index.html.haml b/app/views/groups/crm/contacts/index.html.haml
new file mode 100644
index 00000000000..81293937f77
--- /dev/null
+++ b/app/views/groups/crm/contacts/index.html.haml
@@ -0,0 +1,4 @@
+- breadcrumb_title _('Customer Relations Contacts')
+- page_title _('Customer Relations Contacts')
+
+#js-crm-contacts-app{ data: { group_full_path: @group.full_path, group_issues_path: issues_group_path(@group), group_id: @group.id, can_admin_crm_contact: can?(current_user, :admin_crm_contact, @group).to_s, base_path: group_crm_contacts_path(@group) } }
diff --git a/app/views/groups/crm/organizations.html.haml b/app/views/groups/crm/organizations/index.html.haml
index e83dab9fda6..e83dab9fda6 100644
--- a/app/views/groups/crm/organizations.html.haml
+++ b/app/views/groups/crm/organizations/index.html.haml
diff --git a/app/views/projects/blob/show.html.haml b/app/views/projects/blob/show.html.haml
index 168b240c657..d4e7ee90a84 100644
--- a/app/views/projects/blob/show.html.haml
+++ b/app/views/projects/blob/show.html.haml
@@ -14,8 +14,8 @@
- if can_modify_blob?(@blob)
= render 'projects/blob/remove'
- - title = "Replace #{@blob.name}"
- = render 'projects/blob/upload', title: title, placeholder: title, button_title: 'Replace file', form_path: project_update_blob_path(@project, @id), method: :put
+ - title = _("Replace %{blob_name}") % { blob_name: @blob.name }
+ = render 'projects/blob/upload', title: title, placeholder: title, button_title: _('Replace file'), form_path: project_update_blob_path(@project, @id), method: :put
= render partial: 'pipeline_tour_success' if show_suggest_pipeline_creation_celebration?
= render 'shared/web_ide_path'
diff --git a/app/workers/expire_job_cache_worker.rb b/app/workers/expire_job_cache_worker.rb
index 3c5a7717d70..49f0222e9c9 100644
--- a/app/workers/expire_job_cache_worker.rb
+++ b/app/workers/expire_job_cache_worker.rb
@@ -15,19 +15,10 @@ class ExpireJobCacheWorker # rubocop:disable Scalability/IdempotentWorker
idempotent!
def perform(job_id)
- job = CommitStatus.preload(:pipeline, :project).find_by_id(job_id) # rubocop: disable CodeReuse/ActiveRecord
+ job = CommitStatus.find_by_id(job_id)
return unless job
- pipeline = job.pipeline
- project = job.project
-
- Gitlab::EtagCaching::Store.new.touch(project_job_path(project, job))
- ExpirePipelineCacheWorker.perform_async(pipeline.id)
- end
-
- private
-
- def project_job_path(project, job)
- Gitlab::Routing.url_helpers.project_build_path(project, job.id, format: :json)
+ job.expire_etag_cache!
+ ExpirePipelineCacheWorker.perform_async(job.pipeline_id)
end
end
diff --git a/config/feature_flags/development/expire_job_and_pipeline_cache_synchronously.yml b/config/feature_flags/development/expire_job_and_pipeline_cache_synchronously.yml
new file mode 100644
index 00000000000..dda23cb641a
--- /dev/null
+++ b/config/feature_flags/development/expire_job_and_pipeline_cache_synchronously.yml
@@ -0,0 +1,8 @@
+---
+name: expire_job_and_pipeline_cache_synchronously
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75611
+rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1380
+milestone: '14.6'
+type: development
+group: group::project management
+default_enabled: false
diff --git a/config/feature_flags/development/linear_group_descendants_finder.yml b/config/feature_flags/development/linear_group_descendants_finder.yml
new file mode 100644
index 00000000000..12f09c25c85
--- /dev/null
+++ b/config/feature_flags/development/linear_group_descendants_finder.yml
@@ -0,0 +1,8 @@
+---
+name: linear_group_descendants_finder
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68954
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/339440
+milestone: '14.6'
+type: development
+group: group::access
+default_enabled: false
diff --git a/config/metrics/settings/20211124061450_snowplow_enabled.yml b/config/metrics/settings/20211124061450_snowplow_enabled.yml
new file mode 100644
index 00000000000..ae947115704
--- /dev/null
+++ b/config/metrics/settings/20211124061450_snowplow_enabled.yml
@@ -0,0 +1,24 @@
+---
+key_path: settings.snowplow_enabled
+name: snowplow_enabled_gitlab_instance
+description: Whether snowplow is enabled for the GitLab instance
+product_section: growth
+product_stage: growth
+product_group: group::product intelligence
+product_category: product intelligence
+value_type: boolean
+status: active
+milestone: "14.6"
+introduced_by_url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75184'
+time_frame: none
+data_source: system
+instrumentation_class: SnowplowEnabledMetric
+data_category: optional
+performance_indicator_type: []
+distribution:
+- ce
+- ee
+tier:
+- free
+- premium
+- ultimate
diff --git a/config/metrics/settings/20211124085521_snowplow_configured_to_gitlab_collector_hostname.yml b/config/metrics/settings/20211124085521_snowplow_configured_to_gitlab_collector_hostname.yml
new file mode 100644
index 00000000000..4dff0b2af2f
--- /dev/null
+++ b/config/metrics/settings/20211124085521_snowplow_configured_to_gitlab_collector_hostname.yml
@@ -0,0 +1,24 @@
+---
+key_path: settings.snowplow_configured_to_gitlab_collector
+name: snowplow_configured_to_gitlab_collector
+description: Metric informs if currently configured Snowplow collector hostname points towards Gitlab Snowplow collection pipeline.
+product_section: growth
+product_stage: growth
+product_group: group::product intelligence
+product_category: product intelligence
+value_type: boolean
+status: active
+milestone: "14.6"
+introduced_by_url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75184'
+time_frame: none
+data_source: system
+instrumentation_class: SnowplowConfiguredToGitlabCollectorMetric
+data_category: optional
+performance_indicator_type: []
+distribution:
+- ce
+- ee
+tier:
+- free
+- premium
+- ultimate
diff --git a/config/routes/group.rb b/config/routes/group.rb
index 9a50d580747..b0255f1d8fa 100644
--- a/config/routes/group.rb
+++ b/config/routes/group.rb
@@ -126,11 +126,9 @@ constraints(::Constraints::GroupUrlConstrainer.new) do
end
end
- resources :crm, only: [] do
- collection do
- get 'contacts'
- get 'organizations'
- end
+ namespace :crm do
+ resources :contacts, only: [:index, :new]
+ resources :organizations, only: [:index]
end
end
diff --git a/doc/administration/geo/disaster_recovery/runbooks/planned_failover_multi_node.md b/doc/administration/geo/disaster_recovery/runbooks/planned_failover_multi_node.md
index 82a870aaa33..b207be47aa1 100644
--- a/doc/administration/geo/disaster_recovery/runbooks/planned_failover_multi_node.md
+++ b/doc/administration/geo/disaster_recovery/runbooks/planned_failover_multi_node.md
@@ -60,7 +60,7 @@ What is not covered:
NOTE:
Before following any of those steps, make sure you have `root` access to the
-**secondary** to promote it, since there isn't provided an automated way to
+**secondary** to promote it, because there isn't provided an automated way to
promote a Geo replica and perform a failover.
NOTE:
@@ -122,7 +122,7 @@ follow these steps to avoid unnecessary data loss:
From this point, users are unable to view their data or make changes on the
**primary** site. They are also unable to log in to the **secondary** site.
- However, existing sessions need to work for the remainder of the maintenance period, and
+ However, existing sessions must work for the remainder of the maintenance period, and
so public data is accessible throughout.
1. Verify the **primary** site is blocked to HTTP traffic by visiting it in browser via
@@ -135,7 +135,7 @@ follow these steps to avoid unnecessary data loss:
1. On the **primary** site:
1. On the top bar, select **Menu > Admin**.
1. On the left sidebar, select **Monitoring > Background Jobs**.
- 1. On the Sidekiq dhasboard, select **Cron**.
+ 1. On the Sidekiq dashboard, select **Cron**.
1. Select `Disable All` to disable any non-Geo periodic background jobs.
1. Select `Enable` for the `geo_sidekiq_cron_config_worker` cron job.
This job re-enables several other cron jobs that are essential for planned
@@ -176,7 +176,7 @@ follow these steps to avoid unnecessary data loss:
At this point, your **secondary** site contains an up-to-date copy of everything the
**primary** site has, meaning nothing is lost when you fail over.
-1. In this final step, you need to permanently disable the **primary** site.
+1. In this final step, you must permanently disable the **primary** site.
WARNING:
When the **primary** site goes offline, there may be data saved on the **primary** site
@@ -204,7 +204,7 @@ follow these steps to avoid unnecessary data loss:
```
NOTE:
- (**CentOS only**) In CentOS 6 or older, there is no easy way to prevent GitLab from being
+ (**CentOS only**) In CentOS 6 or older, it is challenging to prevent GitLab from being
started if the machine reboots isn't available (see [Omnibus GitLab issue #3058](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/3058)).
It may be safest to uninstall the GitLab package completely with `sudo yum remove gitlab-ee`.
@@ -216,7 +216,7 @@ follow these steps to avoid unnecessary data loss:
- If you do not have SSH access to the **primary** site, take the machine offline and
prevent it from rebooting. Since there are many ways you may prefer to accomplish
- this, we avoid a single recommendation. You may need to:
+ this, we avoid a single recommendation. You may have to:
- Reconfigure the load balancers.
- Change DNS records (for example, point the **primary** DNS record to the
diff --git a/doc/administration/geo/disaster_recovery/runbooks/planned_failover_single_node.md b/doc/administration/geo/disaster_recovery/runbooks/planned_failover_single_node.md
index 8a4f2ed4306..5a6f9eb8be7 100644
--- a/doc/administration/geo/disaster_recovery/runbooks/planned_failover_single_node.md
+++ b/doc/administration/geo/disaster_recovery/runbooks/planned_failover_single_node.md
@@ -52,7 +52,7 @@ Before following any of those steps, make sure you have `root` access to the
promote a Geo replica and perform a failover.
NOTE:
-GitLab 13.9 through GitLab 14.3 are affected by a bug in which the Geo secondary site statuses will appear to stop updating and become unhealthy. For more information, see [Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](../../replication/troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
+GitLab 13.9 through GitLab 14.3 are affected by a bug in which the Geo secondary site statuses appears to stop updating and become unhealthy. For more information, see [Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](../../replication/troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
On the **secondary** site, navigate to the **Admin Area > Geo** dashboard to
review its status. Replicated objects (shown in green) should be close to 100%,
@@ -73,7 +73,7 @@ A common cause of replication failures is the data being missing on the
**primary** site - you can resolve these failures by restoring the data from backup,
or removing references to the missing data.
-The maintenance window won't end until Geo replication and verification is
+The maintenance window does not end until Geo replication and verification is
completely finished. To keep the window as short as possible, you should
ensure these processes are close to 100% as possible during active use.
@@ -123,7 +123,7 @@ follow these steps to avoid unnecessary data loss:
1. On the Sidekiq dhasboard, select **Cron**.
1. Select `Disable All` to disable any non-Geo periodic background jobs.
1. Select `Enable` for the `geo_sidekiq_cron_config_worker` cron job.
- This job will re-enable several other cron jobs that are essential for planned
+ This job re-enables several other cron jobs that are essential for planned
failover to complete successfully.
1. Finish replicating and verifying all data:
diff --git a/doc/administration/package_information/index.md b/doc/administration/package_information/index.md
index 12f3274ecab..ab4b1edfa30 100644
--- a/doc/administration/package_information/index.md
+++ b/doc/administration/package_information/index.md
@@ -18,7 +18,7 @@ The released package versions are in the format `MAJOR.MINOR.PATCH-EDITION.OMNIB
|-------------------|---------|---------|
| MAJOR.MINOR.PATCH | The GitLab version this corresponds to. | 13.3.0 |
| EDITION | The edition of GitLab this corresponds to. | ee |
-| OMNIBUS_RELEASE | The Omnibus GitLab release. Usually, this will be 0. This is incremented if we need to build a new package without changing the GitLab version. | 0 |
+| OMNIBUS_RELEASE | The Omnibus GitLab release. Usually, this is 0. This is incremented if we need to build a new package without changing the GitLab version. | 0 |
## Licenses
@@ -27,7 +27,7 @@ See [licensing](licensing.md)
## Defaults
The Omnibus GitLab package requires various configuration to get the components
-in working order. If the configuration is not provided, the package will use
+in working order. If the configuration is not provided, the package uses
the default values assumed in the package.
These defaults are noted in the package [defaults document](defaults.md).
@@ -59,8 +59,8 @@ accidental overwrite of user configuration provided in `/etc/gitlab/gitlab.rb`.
New configuration options are noted in the
[`gitlab.rb.template` file](https://gitlab.com/gitlab-org/omnibus-gitlab/raw/master/files/gitlab-config-template/gitlab.rb.template).
-The Omnibus GitLab package also provides convenience command which will
-compare the existing user configuration with the latest version of the
+The Omnibus GitLab package also provides convenience command which
+compares the existing user configuration with the latest version of the
template contained in the package.
To view a diff between your configuration file and the latest version, run:
@@ -76,7 +76,7 @@ characters on each line.
## Init system detection
-Omnibus GitLab will attempt to query the underlaying system in order to
+Omnibus GitLab attempts to query the underlaying system in order to
check which init system it uses.
This manifests itself as a `WARNING` during the `sudo gitlab-ctl reconfigure`
run.
diff --git a/doc/topics/autodevops/upgrading_postgresql.md b/doc/topics/autodevops/upgrading_postgresql.md
index e526ba55465..258195bb89f 100644
--- a/doc/topics/autodevops/upgrading_postgresql.md
+++ b/doc/topics/autodevops/upgrading_postgresql.md
@@ -177,7 +177,7 @@ NOTE:
You can also
[scope](../../ci/environments/index.md#scope-environments-with-specs) the
`AUTO_DEVOPS_POSTGRES_CHANNEL`, `AUTO_DEVOPS_POSTGRES_DELETE_V1` and
-`POSTGRES_VERSION` variables to specific environments, e.g. `staging`.
+`POSTGRES_VERSION` variables to specific environments, for example, `staging`.
1. Set `AUTO_DEVOPS_POSTGRES_CHANNEL` to `2`. This opts into using the
newer 8.2.1-based PostgreSQL, and removes the older 0.7.1-based
diff --git a/doc/update/zero_downtime.md b/doc/update/zero_downtime.md
index b2d5cd6da76..244b558d375 100644
--- a/doc/update/zero_downtime.md
+++ b/doc/update/zero_downtime.md
@@ -29,9 +29,9 @@ If you meet all the requirements above, follow these instructions in order. Ther
| [Geo](#geo-deployment) | GitLab EE with Geo enabled |
| [Multi-node / HA with Geo](#multi-node--ha-deployment-with-geo) | GitLab CE/EE on multiple nodes |
-Each type of deployment will require that you hot reload the `puma` and `sidekiq` processes on all nodes running these
+Each type of deployment requires that you hot reload the `puma` and `sidekiq` processes on all nodes running these
services after you've upgraded. The reason for this is that those processes each load the GitLab Rails application which reads and loads
-the database schema into memory when starting up. Each of these processes will need to be reloaded (or restarted in the case of `sidekiq`)
+the database schema into memory when starting up. Each of these processes needs to be reloaded (or restarted in the case of `sidekiq`)
to re-read any database changes that have been made by post-deployment migrations.
Most of the time you can safely upgrade from a patch release to the next minor
@@ -176,14 +176,14 @@ Upgrades on web (Puma) nodes must be done in a rolling manner, one after
another, ensuring at least one node is always up to serve traffic. This is
required to ensure zero-downtime.
-Puma will enter a blackout period as part of the upgrade, during which they
-continue to accept connections but will mark their respective health check
+Puma enters a blackout period as part of the upgrade, during which nodes
+continue to accept connections but mark their respective health check
endpoints to be unhealthy. On seeing this, the load balancer should disconnect
them gracefully.
-Puma will restart only after completing all the currently processing requests.
+Puma restarts only after completing all the currently processing requests.
This ensures data and service integrity. Once they have restarted, the health
-check end points will be marked healthy.
+check end points are marked healthy.
The nodes must be updated in the following order to update an HA instance using
load balancer to latest GitLab version.
@@ -254,7 +254,7 @@ the application.
Before you update the main application you need to update Praefect.
Out of your Praefect nodes, pick one to be your Praefect deploy node.
-This is where you will install the new Omnibus package first and run
+This is where you install the new Omnibus package first and run
database migrations.
**Praefect deploy node**
@@ -360,7 +360,7 @@ node throughout the process.
- If you're using PgBouncer:
- You'll need to bypass PgBouncer and connect directly to the database master
+ You need to bypass PgBouncer and connect directly to the database master
before running migrations.
Rails uses an advisory lock when attempting to run a migration to prevent
@@ -457,7 +457,7 @@ following command to get address of current Redis primary
```
- If your application node is running a version older than GitLab 12.7.0, you
- will have to run the underlying `redis-cli` command (which `get-redis-master`
+ have to run the underlying `redis-cli` command (which `get-redis-master`
command uses) to fetch information about the primary.
1. Get the address of one of the sentinel nodes specified as
@@ -653,7 +653,7 @@ setting `gitlab_rails['auto_migrate'] = false` in
This section describes the steps required to upgrade a multi-node / HA
deployment with Geo. Some steps must be performed on a particular node. This
-node will be known as the “deploy node” and is noted through the following
+node is known as the “deploy node” and is noted through the following
instructions.
Updates must be performed in the following order:
@@ -737,7 +737,7 @@ sudo touch /etc/gitlab/skip-auto-reconfigure
1. If you're using PgBouncer:
- You'll need to bypass PgBouncer and connect directly to the database master
+ You need to bypass PgBouncer and connect directly to the database master
before running migrations.
Rails uses an advisory lock when attempting to run a migration to prevent
diff --git a/doc/user/profile/account/two_factor_authentication.md b/doc/user/profile/account/two_factor_authentication.md
index 13c2b14a0e4..32e3786f94c 100644
--- a/doc/user/profile/account/two_factor_authentication.md
+++ b/doc/user/profile/account/two_factor_authentication.md
@@ -350,7 +350,7 @@ request, and you're automatically signed in.
### Sign in by using a WebAuthn device
In supported browsers you should be automatically prompted to activate your WebAuthn device
-(e.g. by touching/pressing its button) after entering your credentials.
+(for example, by touching/pressing its button) after entering your credentials.
A message displays, indicating that your device responded to the authentication
request and you're automatically signed in.
diff --git a/doc/user/project/issues/csv_import.md b/doc/user/project/issues/csv_import.md
index 02a4f6a4384..f446fae5a88 100644
--- a/doc/user/project/issues/csv_import.md
+++ b/doc/user/project/issues/csv_import.md
@@ -48,7 +48,7 @@ When importing issues from a CSV file, it must be formatted in a certain way:
- **double-quote character:** The double-quote (`"`) character is used to quote fields,
enabling the use of the column separator within a field (see the third line in the
sample CSV data below). To insert a double-quote (`"`) within a quoted
- field, use two double-quote characters in succession, i.e. `""`.
+ field, use two double-quote characters in succession (`""`).
- **data rows:** After the header row, succeeding rows must follow the same column
order. The issue title is required while the description is optional.
diff --git a/doc/user/project/merge_requests/load_performance_testing.md b/doc/user/project/merge_requests/load_performance_testing.md
index e88edc7b517..7b157aa94d8 100644
--- a/doc/user/project/merge_requests/load_performance_testing.md
+++ b/doc/user/project/merge_requests/load_performance_testing.md
@@ -161,7 +161,7 @@ such as: ``http.get(`${__ENV.ENVIRONMENT_URL}`)``.
For example:
1. In the `review` job:
- 1. Capture the dynamic URL and save it into a `.env` file, e.g. `echo "ENVIRONMENT_URL=$CI_ENVIRONMENT_URL" >> review.env`.
+ 1. Capture the dynamic URL and save it into a `.env` file, for example, `echo "ENVIRONMENT_URL=$CI_ENVIRONMENT_URL" >> review.env`.
1. Set the `.env` file to be a [job artifact](../../../ci/pipelines/job_artifacts.md#job-artifacts).
1. In the `load_performance` job:
1. Set it to depend on the review job, so it inherits the environment file.
diff --git a/lib/api/ci/pipelines.rb b/lib/api/ci/pipelines.rb
index 03b59e7e6ad..4e5d6c264bf 100644
--- a/lib/api/ci/pipelines.rb
+++ b/lib/api/ci/pipelines.rb
@@ -166,7 +166,7 @@ module API
params do
requires :pipeline_id, type: Integer, desc: 'The pipeline ID'
end
- get ':id/pipelines/:pipeline_id/variables', feature_category: :pipeline_authoring do
+ get ':id/pipelines/:pipeline_id/variables', feature_category: :pipeline_authoring, urgency: :low do
authorize! :read_pipeline_variable, pipeline
present pipeline.variables, with: Entities::Ci::Variable
diff --git a/lib/api/lint.rb b/lib/api/lint.rb
index f1e19e9c3c5..299e3cabba3 100644
--- a/lib/api/lint.rb
+++ b/lib/api/lint.rb
@@ -33,7 +33,7 @@ module API
optional :dry_run, type: Boolean, default: false, desc: 'Run pipeline creation simulation, or only do static check.'
optional :include_jobs, type: Boolean, desc: 'Whether or not to include CI jobs in the response'
end
- get ':id/ci/lint' do
+ get ':id/ci/lint', urgency: :low do
authorize! :download_code, user_project
content = user_project.repository.gitlab_ci_yml_for(user_project.commit.id, user_project.ci_config_path_or_default)
@@ -54,7 +54,7 @@ module API
optional :dry_run, type: Boolean, default: false, desc: 'Run pipeline creation simulation, or only do static check.'
optional :include_jobs, type: Boolean, desc: 'Whether or not to include CI jobs in the response'
end
- post ':id/ci/lint' do
+ post ':id/ci/lint', urgency: :low do
authorize! :create_pipeline, user_project
result = Gitlab::Ci::Lint
diff --git a/lib/bulk_imports/common/pipelines/uploads_pipeline.rb b/lib/bulk_imports/common/pipelines/uploads_pipeline.rb
index 15e126e1646..49c16209661 100644
--- a/lib/bulk_imports/common/pipelines/uploads_pipeline.rb
+++ b/lib/bulk_imports/common/pipelines/uploads_pipeline.rb
@@ -8,6 +8,9 @@ module BulkImports
include Gitlab::ImportExport::CommandLineUtil
FILENAME = 'uploads.tar.gz'
+ AVATAR_PATTERN = %r{.*\/#{BulkImports::UploadsExportService::AVATAR_PATH}\/(?<identifier>.*)}.freeze
+
+ AvatarLoadingError = Class.new(StandardError)
def extract(context)
download_service(tmp_dir, context).execute
@@ -18,14 +21,18 @@ module BulkImports
end
def load(context, file_path)
- dynamic_path = FileUploader.extract_dynamic_path(file_path)
+ avatar_path = AVATAR_PATTERN.match(file_path)
+
+ return save_avatar(file_path) if avatar_path
+
+ dynamic_path = file_uploader.extract_dynamic_path(file_path)
return unless dynamic_path
return if File.directory?(file_path)
named_captures = dynamic_path.named_captures.symbolize_keys
- UploadService.new(context.portable, File.open(file_path, 'r'), FileUploader, **named_captures).execute
+ UploadService.new(context.portable, File.open(file_path, 'r'), file_uploader, **named_captures).execute
end
def after_run(_)
@@ -46,6 +53,24 @@ module BulkImports
def tmp_dir
@tmp_dir ||= Dir.mktmpdir('bulk_imports')
end
+
+ def file_uploader
+ @file_uploader ||= if context.entity.group?
+ NamespaceFileUploader
+ else
+ FileUploader
+ end
+ end
+
+ def save_avatar(file_path)
+ File.open(file_path) do |avatar|
+ service = context.entity.update_service.new(portable, current_user, avatar: avatar)
+
+ unless service.execute
+ raise AvatarLoadingError, portable.errors.full_messages.to_sentence
+ end
+ end
+ end
end
end
end
diff --git a/lib/bulk_imports/groups/pipelines/group_avatar_pipeline.rb b/lib/bulk_imports/groups/pipelines/group_avatar_pipeline.rb
deleted file mode 100644
index 6de8bbbc910..00000000000
--- a/lib/bulk_imports/groups/pipelines/group_avatar_pipeline.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-module BulkImports
- module Groups
- module Pipelines
- class GroupAvatarPipeline
- include Pipeline
-
- ALLOWED_AVATAR_DOWNLOAD_TYPES = (AvatarUploader::MIME_WHITELIST + %w(application/octet-stream)).freeze
-
- GroupAvatarLoadingError = Class.new(StandardError)
-
- def extract(context)
- context.extra[:tmpdir] = Dir.mktmpdir
-
- filepath = BulkImports::FileDownloadService.new(
- configuration: context.configuration,
- relative_url: "/groups/#{context.entity.encoded_source_full_path}/avatar",
- dir: context.extra[:tmpdir],
- file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
- allowed_content_types: ALLOWED_AVATAR_DOWNLOAD_TYPES
- ).execute
-
- BulkImports::Pipeline::ExtractedData.new(data: { filepath: filepath })
- end
-
- def load(context, data)
- return if data.blank?
-
- File.open(data[:filepath]) do |avatar|
- service = ::Groups::UpdateService.new(
- portable,
- current_user,
- avatar: avatar
- )
-
- unless service.execute
- raise GroupAvatarLoadingError, portable.errors.full_messages.first
- end
- end
- end
-
- def after_run(_)
- FileUtils.remove_entry(context.extra[:tmpdir]) if context.extra[:tmpdir].present?
- end
- end
- end
- end
-end
diff --git a/lib/bulk_imports/groups/stage.rb b/lib/bulk_imports/groups/stage.rb
index 6631c212913..1a3babe1679 100644
--- a/lib/bulk_imports/groups/stage.rb
+++ b/lib/bulk_imports/groups/stage.rb
@@ -11,10 +11,6 @@ module BulkImports
pipeline: BulkImports::Groups::Pipelines::GroupPipeline,
stage: 0
},
- avatar: {
- pipeline: BulkImports::Groups::Pipelines::GroupAvatarPipeline,
- stage: 1
- },
subgroups: {
pipeline: BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline,
stage: 1
@@ -39,6 +35,10 @@ module BulkImports
pipeline: BulkImports::Common::Pipelines::BoardsPipeline,
stage: 2
},
+ uploads: {
+ pipeline: BulkImports::Common::Pipelines::UploadsPipeline,
+ stage: 2
+ },
finisher: {
pipeline: BulkImports::Common::Pipelines::EntityFinisher,
stage: 3
diff --git a/lib/gitlab/etag_caching/store.rb b/lib/gitlab/etag_caching/store.rb
index d0d790a7c72..44c6984c09b 100644
--- a/lib/gitlab/etag_caching/store.rb
+++ b/lib/gitlab/etag_caching/store.rb
@@ -12,14 +12,18 @@ module Gitlab
Gitlab::Redis::SharedState.with { |redis| redis.get(redis_shared_state_key(key)) }
end
- def touch(key, only_if_missing: false)
- etag = generate_etag
+ def touch(*keys, only_if_missing: false)
+ etags = keys.map { generate_etag }
Gitlab::Redis::SharedState.with do |redis|
- redis.set(redis_shared_state_key(key), etag, ex: EXPIRY_TIME, nx: only_if_missing)
+ redis.pipelined do
+ keys.each_with_index do |key, i|
+ redis.set(redis_shared_state_key(key), etags[i], ex: EXPIRY_TIME, nx: only_if_missing)
+ end
+ end
end
- etag
+ keys.size > 1 ? etags : etags.first
end
private
diff --git a/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric.rb b/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric.rb
new file mode 100644
index 00000000000..3a92525303b
--- /dev/null
+++ b/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ module Instrumentations
+ class SnowplowConfiguredToGitlabCollectorMetric < GenericMetric
+ GITLAB_SNOWPLOW_COLLECTOR_HOSTNAME = 'snowplow.trx.gitlab.net'
+
+ def value
+ Gitlab::CurrentSettings.snowplow_collector_hostname == GITLAB_SNOWPLOW_COLLECTOR_HOSTNAME
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric.rb b/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric.rb
new file mode 100644
index 00000000000..5d504c70e73
--- /dev/null
+++ b/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ module Instrumentations
+ class SnowplowEnabledMetric < GenericMetric
+ def value
+ Gitlab::CurrentSettings.snowplow_enabled?
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_data.rb b/lib/gitlab/usage_data.rb
index a09165d8ff4..0301525ebbd 100644
--- a/lib/gitlab/usage_data.rb
+++ b/lib/gitlab/usage_data.rb
@@ -228,7 +228,9 @@ module Gitlab
operating_system: alt_usage_data(fallback: nil) { operating_system },
gitaly_apdex: alt_usage_data { gitaly_apdex },
collected_data_categories: add_metric('CollectedDataCategoriesMetric', time_frame: 'none'),
- service_ping_features_enabled: add_metric('ServicePingFeaturesMetric', time_frame: 'none')
+ service_ping_features_enabled: add_metric('ServicePingFeaturesMetric', time_frame: 'none'),
+ snowplow_enabled: add_metric('SnowplowEnabledMetric', time_frame: 'none'),
+ snowplow_configured_to_gitlab_collector: add_metric('SnowplowConfiguredToGitlabCollectorMetric', time_frame: 'none')
}
}
end
diff --git a/lib/sidebars/groups/menus/customer_relations_menu.rb b/lib/sidebars/groups/menus/customer_relations_menu.rb
index fdbbd662ad6..002197965d1 100644
--- a/lib/sidebars/groups/menus/customer_relations_menu.rb
+++ b/lib/sidebars/groups/menus/customer_relations_menu.rb
@@ -32,7 +32,7 @@ module Sidebars
def contacts_menu_item
::Sidebars::MenuItem.new(
title: _('Contacts'),
- link: contacts_group_crm_index_path(context.group),
+ link: group_crm_contacts_path(context.group),
active_routes: { path: 'groups/crm#contacts' },
item_id: :crm_contacts
)
@@ -41,7 +41,7 @@ module Sidebars
def organizations_menu_item
::Sidebars::MenuItem.new(
title: _('Organizations'),
- link: organizations_group_crm_index_path(context.group),
+ link: group_crm_organizations_path(context.group),
active_routes: { path: 'groups/crm#organizations' },
item_id: :crm_organizations
)
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index ed0f647dde1..5e6e61fb5c0 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -10154,12 +10154,36 @@ msgstr ""
msgid "Critical vulnerabilities present"
msgstr ""
+msgid "Crm|Create new contact"
+msgstr ""
+
+msgid "Crm|Customer Relations Contacts"
+msgstr ""
+
+msgid "Crm|Description (optional)"
+msgstr ""
+
+msgid "Crm|Email"
+msgstr ""
+
+msgid "Crm|First name"
+msgstr ""
+
+msgid "Crm|Last name"
+msgstr ""
+
+msgid "Crm|New contact"
+msgstr ""
+
msgid "Crm|No contacts found"
msgstr ""
msgid "Crm|No organizations found"
msgstr ""
+msgid "Crm|Phone number (optional)"
+msgstr ""
+
msgid "Cron Timezone"
msgstr ""
@@ -29196,6 +29220,9 @@ msgstr ""
msgid "Replace"
msgstr ""
+msgid "Replace %{blob_name}"
+msgstr ""
+
msgid "Replace %{name}"
msgstr ""
diff --git a/scripts/override_rails_constants.rb b/metrics_server/override_rails_constants.rb
index 1b255dd0011..76e49edfbb0 100644
--- a/scripts/override_rails_constants.rb
+++ b/metrics_server/override_rails_constants.rb
@@ -3,12 +3,12 @@
require 'active_support/environment_inquirer'
-module Rails # rubocop:disable Gitlab/NamespacedClass
+module Rails
extend self
def env
@env ||= ActiveSupport::EnvironmentInquirer.new(
- ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence || "test"
+ ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence || "development"
)
end
diff --git a/metrics_server/settings_overrides.rb b/metrics_server/settings_overrides.rb
index 4239f62ad1c..8572b4f86b0 100644
--- a/metrics_server/settings_overrides.rb
+++ b/metrics_server/settings_overrides.rb
@@ -7,7 +7,7 @@
# to all necessary constants. For example, we need Rails.root to
# determine the location of bin/metrics-server.
# Here we make the necessary constants available conditionally.
-require_relative '../scripts/override_rails_constants' unless Object.const_defined?('Rails')
+require_relative 'override_rails_constants' unless Object.const_defined?('Rails')
require_relative '../config/settings'
diff --git a/qa/qa/resource/group_deploy_token.rb b/qa/qa/resource/group_deploy_token.rb
index 410a7e6253f..c1d6be6547a 100644
--- a/qa/qa/resource/group_deploy_token.rb
+++ b/qa/qa/resource/group_deploy_token.rb
@@ -4,18 +4,11 @@ module QA
module Resource
class GroupDeployToken < Base
attr_accessor :name, :expires_at
+ attr_writer :scopes
- attribute :username do
- Page::Group::Settings::Repository.perform do |repository_page|
- repository_page.expand_deploy_tokens(&:token_username)
- end
- end
-
- attribute :password do
- Page::Group::Settings::Repository.perform do |repository_page|
- repository_page.expand_deploy_tokens(&:token_password)
- end
- end
+ attribute :id
+ attribute :token
+ attribute :username
attribute :group do
Group.fabricate! do |resource|
@@ -24,11 +17,33 @@ module QA
end
end
- attribute :project do
- Project.fabricate! do |resource|
- resource.name = 'project-to-deploy'
- resource.description = 'project for adding deploy token test'
- end
+ def fabricate_via_api!
+ super
+ end
+
+ def api_get_path
+ "/groups/#{group.id}/deploy_tokens"
+ end
+
+ def api_post_path
+ api_get_path
+ end
+
+ def api_post_body
+ {
+ name: @name,
+ scopes: @scopes
+ }
+ end
+
+ def api_delete_path
+ "/groups/#{group.id}/deploy_tokens/#{id}"
+ end
+
+ def resource_web_url(resource)
+ super
+ rescue ResourceURLMissingError
+ # this particular resource does not expose a web_url property
end
def fabricate!
diff --git a/qa/qa/specs/features/browser_ui/5_package/package_registry/nuget_repository_spec.rb b/qa/qa/specs/features/browser_ui/5_package/package_registry/nuget_repository_spec.rb
index 0b4825715c1..42d43dac33c 100644
--- a/qa/qa/specs/features/browser_ui/5_package/package_registry/nuget_repository_spec.rb
+++ b/qa/qa/specs/features/browser_ui/5_package/package_registry/nuget_repository_spec.rb
@@ -21,9 +21,14 @@ module QA
end
let(:group_deploy_token) do
- Resource::GroupDeployToken.fabricate_via_browser_ui! do |deploy_token|
+ Resource::GroupDeployToken.fabricate_via_api! do |deploy_token|
deploy_token.name = 'nuget-group-deploy-token'
deploy_token.group = project.group
+ deploy_token.scopes = %w[
+ read_repository
+ read_package_registry
+ write_package_registry
+ ]
end
end
@@ -70,7 +75,7 @@ module QA
when :ci_job_token
'${CI_JOB_TOKEN}'
when :group_deploy_token
- "\"#{group_deploy_token.password}\""
+ "\"#{group_deploy_token.token}\""
end
end
diff --git a/scripts/frontend/startup_css/get_startup_css.js b/scripts/frontend/startup_css/get_startup_css.js
index 10e8371df8c..2c8c3b4e321 100644
--- a/scripts/frontend/startup_css/get_startup_css.js
+++ b/scripts/frontend/startup_css/get_startup_css.js
@@ -29,7 +29,9 @@ const mergePurgeCSSOptions = (...options) =>
const getStartupCSS = async ({ htmlPaths, cssPaths, purgeOptions }) => {
const content = htmlPaths.map((htmlPath) => {
if (!fs.existsSync(htmlPath)) {
- die(`Could not find fixture "${htmlPath}". Have you run the fixtures?`);
+ die(
+ `Could not find fixture "${htmlPath}". Have you run the fixtures? (bundle exec rspec spec/frontend/fixtures/startup_css.rb)`,
+ );
}
const rawHtml = fs.readFileSync(htmlPath);
diff --git a/scripts/setup-test-env b/scripts/setup-test-env
index c955a01d769..a81aaa5cda3 100755
--- a/scripts/setup-test-env
+++ b/scripts/setup-test-env
@@ -13,7 +13,17 @@ require 'active_support/string_inquirer'
ENV['SKIP_RAILS_ENV_IN_RAKE'] = 'true'
-require_relative 'override_rails_constants'
+module Rails
+ extend self
+
+ def root
+ Pathname.new(File.expand_path('..', __dir__))
+ end
+
+ def env
+ @_env ||= ActiveSupport::StringInquirer.new(ENV["RAILS_ENV"] || ENV["RACK_ENV"] || "test")
+ end
+end
ActiveSupport::Dependencies.autoload_paths << 'lib'
diff --git a/spec/features/admin/admin_deploy_keys_spec.rb b/spec/features/admin/admin_deploy_keys_spec.rb
index 53caf0fac33..9b74aa2ac5a 100644
--- a/spec/features/admin/admin_deploy_keys_spec.rb
+++ b/spec/features/admin/admin_deploy_keys_spec.rb
@@ -3,101 +3,125 @@
require 'spec_helper'
RSpec.describe 'admin deploy keys' do
+ include Spec::Support::Helpers::ModalHelpers
+
let_it_be(:admin) { create(:admin) }
let!(:deploy_key) { create(:deploy_key, public: true) }
let!(:another_deploy_key) { create(:another_deploy_key, public: true) }
before do
- stub_feature_flags(admin_deploy_keys_vue: false)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
end
- it 'show all public deploy keys' do
- visit admin_deploy_keys_path
+ shared_examples 'renders deploy keys correctly' do
+ it 'show all public deploy keys' do
+ visit admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).to have_content(deploy_key.title)
- expect(page).to have_content(another_deploy_key.title)
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).to have_content(deploy_key.title)
+ expect(page).to have_content(another_deploy_key.title)
+ end
end
- end
- it 'shows all the projects the deploy key has write access' do
- write_key = create(:deploy_keys_project, :write_access, deploy_key: deploy_key)
+ it 'shows all the projects the deploy key has write access' do
+ write_key = create(:deploy_keys_project, :write_access, deploy_key: deploy_key)
- visit admin_deploy_keys_path
+ visit admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).to have_content(write_key.project.full_name)
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).to have_content(write_key.project.full_name)
+ end
end
- end
- describe 'create a new deploy key' do
- let(:new_ssh_key) { attributes_for(:key)[:key] }
+ describe 'create a new deploy key' do
+ let(:new_ssh_key) { attributes_for(:key)[:key] }
- before do
- visit admin_deploy_keys_path
- click_link 'New deploy key'
- end
+ before do
+ visit admin_deploy_keys_path
+ click_link 'New deploy key'
+ end
- it 'creates a new deploy key' do
- fill_in 'deploy_key_title', with: 'laptop'
- fill_in 'deploy_key_key', with: new_ssh_key
- click_button 'Create'
+ it 'creates a new deploy key' do
+ fill_in 'deploy_key_title', with: 'laptop'
+ fill_in 'deploy_key_key', with: new_ssh_key
+ click_button 'Create'
- expect(current_path).to eq admin_deploy_keys_path
+ expect(current_path).to eq admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).to have_content('laptop')
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).to have_content('laptop')
+ end
end
end
- end
- describe 'update an existing deploy key' do
- before do
- visit admin_deploy_keys_path
- find('tr', text: deploy_key.title).click_link('Edit')
- end
+ describe 'update an existing deploy key' do
+ before do
+ visit admin_deploy_keys_path
+ page.within('tr', text: deploy_key.title) do
+ click_link(_('Edit deploy key'))
+ end
+ end
- it 'updates an existing deploy key' do
- fill_in 'deploy_key_title', with: 'new-title'
- click_button 'Save changes'
+ it 'updates an existing deploy key' do
+ fill_in 'deploy_key_title', with: 'new-title'
+ click_button 'Save changes'
- expect(current_path).to eq admin_deploy_keys_path
+ expect(current_path).to eq admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).to have_content('new-title')
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).to have_content('new-title')
+ end
end
end
end
- describe 'remove an existing deploy key' do
- before do
- visit admin_deploy_keys_path
- end
+ context 'when `admin_deploy_keys_vue` feature flag is enabled', :js do
+ it_behaves_like 'renders deploy keys correctly'
- it 'removes an existing deploy key' do
- find('tr', text: deploy_key.title).click_link('Remove')
+ describe 'remove an existing deploy key' do
+ before do
+ visit admin_deploy_keys_path
+ end
- expect(current_path).to eq admin_deploy_keys_path
- page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
- expect(page).not_to have_content(deploy_key.title)
+ it 'removes an existing deploy key' do
+ accept_gl_confirm('Are you sure you want to delete this deploy key?', button_text: 'Delete') do
+ page.within('tr', text: deploy_key.title) do
+ click_button _('Delete deploy key')
+ end
+ end
+
+ expect(current_path).to eq admin_deploy_keys_path
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).not_to have_content(deploy_key.title)
+ end
end
end
end
- context 'when `admin_deploy_keys_vue` feature flag is enabled', :js do
+ context 'when `admin_deploy_keys_vue` feature flag is disabled' do
before do
- stub_feature_flags(admin_deploy_keys_vue: true)
-
- visit admin_deploy_keys_path
+ stub_feature_flags(admin_deploy_keys_vue: false)
end
- it 'renders the Vue app', :aggregate_failures do
- expect(page).to have_content('Public deploy keys')
- expect(page).to have_selector('[data-testid="deploy-keys-list"]')
- expect(page).to have_link('New deploy key', href: new_admin_deploy_key_path)
+ it_behaves_like 'renders deploy keys correctly'
+
+ describe 'remove an existing deploy key' do
+ before do
+ visit admin_deploy_keys_path
+ end
+
+ it 'removes an existing deploy key' do
+ page.within('tr', text: deploy_key.title) do
+ click_link _('Remove deploy key')
+ end
+
+ expect(current_path).to eq admin_deploy_keys_path
+ page.within(find('[data-testid="deploy-keys-list"]', match: :first)) do
+ expect(page).not_to have_content(deploy_key.title)
+ end
+ end
end
end
end
diff --git a/spec/finders/group_descendants_finder_spec.rb b/spec/finders/group_descendants_finder_spec.rb
index 01c6eb05907..f6b87f7eeab 100644
--- a/spec/finders/group_descendants_finder_spec.rb
+++ b/spec/finders/group_descendants_finder_spec.rb
@@ -4,7 +4,12 @@ require 'spec_helper'
RSpec.describe GroupDescendantsFinder do
let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
+
+ let_it_be_with_reload(:group) do
+ create(:group).tap do |g|
+ g.add_owner(user)
+ end
+ end
let(:params) { {} }
@@ -12,254 +17,262 @@ RSpec.describe GroupDescendantsFinder do
described_class.new(current_user: user, parent_group: group, params: params)
end
- before do
- group.add_owner(user)
- end
-
- describe '#has_children?' do
- it 'is true when there are projects' do
- create(:project, namespace: group)
-
- expect(finder.has_children?).to be_truthy
- end
-
- context 'when there are subgroups' do
+ shared_examples 'group descentants finder examples' do
+ describe '#has_children?' do
it 'is true when there are projects' do
- create(:group, parent: group)
+ create(:project, namespace: group)
expect(finder.has_children?).to be_truthy
end
- end
- end
- describe '#execute' do
- it 'includes projects' do
- project = create(:project, namespace: group)
+ context 'when there are subgroups' do
+ it 'is true when there are projects' do
+ create(:group, parent: group)
- expect(finder.execute).to contain_exactly(project)
+ expect(finder.has_children?).to be_truthy
+ end
+ end
end
- context 'when archived is `true`' do
- let(:params) { { archived: 'true' } }
-
- it 'includes archived projects' do
- archived_project = create(:project, namespace: group, archived: true)
+ describe '#execute' do
+ it 'includes projects' do
project = create(:project, namespace: group)
- expect(finder.execute).to contain_exactly(archived_project, project)
+ expect(finder.execute).to contain_exactly(project)
end
- end
- context 'when archived is `only`' do
- let(:params) { { archived: 'only' } }
+ context 'when archived is `true`' do
+ let(:params) { { archived: 'true' } }
- it 'includes only archived projects' do
- archived_project = create(:project, namespace: group, archived: true)
- _project = create(:project, namespace: group)
+ it 'includes archived projects' do
+ archived_project = create(:project, namespace: group, archived: true)
+ project = create(:project, namespace: group)
- expect(finder.execute).to contain_exactly(archived_project)
+ expect(finder.execute).to contain_exactly(archived_project, project)
+ end
end
- end
- it 'does not include archived projects' do
- _archived_project = create(:project, :archived, namespace: group)
+ context 'when archived is `only`' do
+ let(:params) { { archived: 'only' } }
- expect(finder.execute).to be_empty
- end
+ it 'includes only archived projects' do
+ archived_project = create(:project, namespace: group, archived: true)
+ _project = create(:project, namespace: group)
- context 'with a filter' do
- let(:params) { { filter: 'test' } }
+ expect(finder.execute).to contain_exactly(archived_project)
+ end
+ end
- it 'includes only projects matching the filter' do
- _other_project = create(:project, namespace: group)
- matching_project = create(:project, namespace: group, name: 'testproject')
+ it 'does not include archived projects' do
+ _archived_project = create(:project, :archived, namespace: group)
- expect(finder.execute).to contain_exactly(matching_project)
+ expect(finder.execute).to be_empty
end
- end
- it 'sorts elements by name as default' do
- project1 = create(:project, namespace: group, name: 'z')
- project2 = create(:project, namespace: group, name: 'a')
+ context 'with a filter' do
+ let(:params) { { filter: 'test' } }
- expect(subject.execute).to eq([project2, project1])
- end
+ it 'includes only projects matching the filter' do
+ _other_project = create(:project, namespace: group)
+ matching_project = create(:project, namespace: group, name: 'testproject')
- context 'sorting by name' do
- let!(:project1) { create(:project, namespace: group, name: 'a', path: 'project-a') }
- let!(:project2) { create(:project, namespace: group, name: 'z', path: 'project-z') }
- let(:params) do
- {
- sort: 'name_asc'
- }
+ expect(finder.execute).to contain_exactly(matching_project)
+ end
end
- it 'sorts elements by name' do
- expect(subject.execute).to eq(
- [
- project1,
- project2
- ]
- )
+ it 'sorts elements by name as default' do
+ project1 = create(:project, namespace: group, name: 'z')
+ project2 = create(:project, namespace: group, name: 'a')
+
+ expect(subject.execute).to match_array([project2, project1])
end
- context 'with nested groups' do
- let!(:subgroup1) { create(:group, parent: group, name: 'a', path: 'sub-a') }
- let!(:subgroup2) { create(:group, parent: group, name: 'z', path: 'sub-z') }
+ context 'sorting by name' do
+ let!(:project1) { create(:project, namespace: group, name: 'a', path: 'project-a') }
+ let!(:project2) { create(:project, namespace: group, name: 'z', path: 'project-z') }
+ let(:params) do
+ {
+ sort: 'name_asc'
+ }
+ end
it 'sorts elements by name' do
expect(subject.execute).to eq(
[
- subgroup1,
- subgroup2,
project1,
project2
]
)
end
- end
- end
-
- it 'does not include projects shared with the group' do
- project = create(:project, namespace: group)
- other_project = create(:project)
- other_project.project_group_links.create!(group: group,
- group_access: Gitlab::Access::MAINTAINER)
- expect(finder.execute).to contain_exactly(project)
- end
- end
+ context 'with nested groups' do
+ let!(:subgroup1) { create(:group, parent: group, name: 'a', path: 'sub-a') }
+ let!(:subgroup2) { create(:group, parent: group, name: 'z', path: 'sub-z') }
+
+ it 'sorts elements by name' do
+ expect(subject.execute).to eq(
+ [
+ subgroup1,
+ subgroup2,
+ project1,
+ project2
+ ]
+ )
+ end
+ end
+ end
- context 'with shared groups' do
- let_it_be(:other_group) { create(:group) }
- let_it_be(:shared_group_link) do
- create(:group_group_link,
- shared_group: group,
- shared_with_group: other_group)
- end
+ it 'does not include projects shared with the group' do
+ project = create(:project, namespace: group)
+ other_project = create(:project)
+ other_project.project_group_links.create!(group: group,
+ group_access: Gitlab::Access::MAINTAINER)
- context 'without common ancestor' do
- it { expect(finder.execute).to be_empty }
+ expect(finder.execute).to contain_exactly(project)
+ end
end
- context 'with common ancestor' do
- let_it_be(:common_ancestor) { create(:group) }
- let_it_be(:other_group) { create(:group, parent: common_ancestor) }
- let_it_be(:group) { create(:group, parent: common_ancestor) }
+ context 'with shared groups' do
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:shared_group_link) do
+ create(:group_group_link,
+ shared_group: group,
+ shared_with_group: other_group)
+ end
- context 'querying under the common ancestor' do
+ context 'without common ancestor' do
it { expect(finder.execute).to be_empty }
end
- context 'querying the common ancestor' do
- subject(:finder) do
- described_class.new(current_user: user, parent_group: common_ancestor, params: params)
+ context 'with common ancestor' do
+ let_it_be(:common_ancestor) { create(:group) }
+ let_it_be(:other_group) { create(:group, parent: common_ancestor) }
+ let_it_be(:group) { create(:group, parent: common_ancestor) }
+
+ context 'querying under the common ancestor' do
+ it { expect(finder.execute).to be_empty }
end
- it 'contains shared subgroups' do
- expect(finder.execute).to contain_exactly(group, other_group)
+ context 'querying the common ancestor' do
+ subject(:finder) do
+ described_class.new(current_user: user, parent_group: common_ancestor, params: params)
+ end
+
+ it 'contains shared subgroups' do
+ expect(finder.execute).to contain_exactly(group, other_group)
+ end
end
end
end
- end
- context 'with nested groups' do
- let!(:project) { create(:project, namespace: group) }
- let!(:subgroup) { create(:group, :private, parent: group) }
+ context 'with nested groups' do
+ let!(:project) { create(:project, namespace: group) }
+ let!(:subgroup) { create(:group, :private, parent: group) }
- describe '#execute' do
- it 'contains projects and subgroups' do
- expect(finder.execute).to contain_exactly(subgroup, project)
- end
+ describe '#execute' do
+ it 'contains projects and subgroups' do
+ expect(finder.execute).to contain_exactly(subgroup, project)
+ end
- it 'does not include subgroups the user does not have access to' do
- subgroup.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ it 'does not include subgroups the user does not have access to' do
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- public_subgroup = create(:group, :public, parent: group, path: 'public-group')
- other_subgroup = create(:group, :private, parent: group, path: 'visible-private-group')
- other_user = create(:user)
- other_subgroup.add_developer(other_user)
+ public_subgroup = create(:group, :public, parent: group, path: 'public-group')
+ other_subgroup = create(:group, :private, parent: group, path: 'visible-private-group')
+ other_user = create(:user)
+ other_subgroup.add_developer(other_user)
- finder = described_class.new(current_user: other_user, parent_group: group)
+ finder = described_class.new(current_user: other_user, parent_group: group)
- expect(finder.execute).to contain_exactly(public_subgroup, other_subgroup)
- end
+ expect(finder.execute).to contain_exactly(public_subgroup, other_subgroup)
+ end
- it 'only includes public groups when no user is given' do
- public_subgroup = create(:group, :public, parent: group)
- _private_subgroup = create(:group, :private, parent: group)
+ it 'only includes public groups when no user is given' do
+ public_subgroup = create(:group, :public, parent: group)
+ _private_subgroup = create(:group, :private, parent: group)
- finder = described_class.new(current_user: nil, parent_group: group)
+ finder = described_class.new(current_user: nil, parent_group: group)
- expect(finder.execute).to contain_exactly(public_subgroup)
- end
+ expect(finder.execute).to contain_exactly(public_subgroup)
+ end
- context 'when archived is `true`' do
- let(:params) { { archived: 'true' } }
+ context 'when archived is `true`' do
+ let(:params) { { archived: 'true' } }
- it 'includes archived projects in the count of subgroups' do
- create(:project, namespace: subgroup, archived: true)
+ it 'includes archived projects in the count of subgroups' do
+ create(:project, namespace: subgroup, archived: true)
- expect(finder.execute.first.preloaded_project_count).to eq(1)
+ expect(finder.execute.first.preloaded_project_count).to eq(1)
+ end
end
- end
- context 'with a filter' do
- let(:params) { { filter: 'test' } }
+ context 'with a filter' do
+ let(:params) { { filter: 'test' } }
- it 'contains only matching projects and subgroups' do
- matching_project = create(:project, namespace: group, name: 'Testproject')
- matching_subgroup = create(:group, name: 'testgroup', parent: group)
+ it 'contains only matching projects and subgroups' do
+ matching_project = create(:project, namespace: group, name: 'Testproject')
+ matching_subgroup = create(:group, name: 'testgroup', parent: group)
- expect(finder.execute).to contain_exactly(matching_subgroup, matching_project)
- end
+ expect(finder.execute).to contain_exactly(matching_subgroup, matching_project)
+ end
- it 'does not include subgroups the user does not have access to' do
- _invisible_subgroup = create(:group, :private, parent: group, name: 'test1')
- other_subgroup = create(:group, :private, parent: group, name: 'test2')
- public_subgroup = create(:group, :public, parent: group, name: 'test3')
- other_subsubgroup = create(:group, :private, parent: other_subgroup, name: 'test4')
- other_user = create(:user)
- other_subgroup.add_developer(other_user)
+ it 'does not include subgroups the user does not have access to' do
+ _invisible_subgroup = create(:group, :private, parent: group, name: 'test1')
+ other_subgroup = create(:group, :private, parent: group, name: 'test2')
+ public_subgroup = create(:group, :public, parent: group, name: 'test3')
+ other_subsubgroup = create(:group, :private, parent: other_subgroup, name: 'test4')
+ other_user = create(:user)
+ other_subgroup.add_developer(other_user)
- finder = described_class.new(current_user: other_user,
- parent_group: group,
- params: params)
+ finder = described_class.new(current_user: other_user,
+ parent_group: group,
+ params: params)
- expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup)
- end
+ expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup)
+ end
- context 'with matching children' do
- it 'includes a group that has a subgroup matching the query and its parent' do
- matching_subgroup = create(:group, :private, name: 'testgroup', parent: subgroup)
+ context 'with matching children' do
+ it 'includes a group that has a subgroup matching the query and its parent' do
+ matching_subgroup = create(:group, :private, name: 'testgroup', parent: subgroup)
- expect(finder.execute).to contain_exactly(subgroup, matching_subgroup)
- end
+ expect(finder.execute).to contain_exactly(subgroup, matching_subgroup)
+ end
- it 'includes the parent of a matching project' do
- matching_project = create(:project, namespace: subgroup, name: 'Testproject')
+ it 'includes the parent of a matching project' do
+ matching_project = create(:project, namespace: subgroup, name: 'Testproject')
- expect(finder.execute).to contain_exactly(subgroup, matching_project)
- end
+ expect(finder.execute).to contain_exactly(subgroup, matching_project)
+ end
- context 'with a small page size' do
- let(:params) { { filter: 'test', per_page: 1 } }
+ context 'with a small page size' do
+ let(:params) { { filter: 'test', per_page: 1 } }
- it 'contains all the ancestors of a matching subgroup regardless the page size' do
- subgroup = create(:group, :private, parent: group)
- matching = create(:group, :private, name: 'testgroup', parent: subgroup)
+ it 'contains all the ancestors of a matching subgroup regardless the page size' do
+ subgroup = create(:group, :private, parent: group)
+ matching = create(:group, :private, name: 'testgroup', parent: subgroup)
- expect(finder.execute).to contain_exactly(subgroup, matching)
+ expect(finder.execute).to contain_exactly(subgroup, matching)
+ end
end
- end
- it 'does not include the parent itself' do
- group.update!(name: 'test')
+ it 'does not include the parent itself' do
+ group.update!(name: 'test')
- expect(finder.execute).not_to include(group)
+ expect(finder.execute).not_to include(group)
+ end
end
end
end
end
end
+
+ it_behaves_like 'group descentants finder examples'
+
+ context 'when feature flag :linear_group_descendants_finder is disabled' do
+ before do
+ stub_feature_flags(linear_group_descendants_finder: false)
+ end
+
+ it_behaves_like 'group descentants finder examples'
+ end
end
diff --git a/spec/frontend/crm/contacts_root_spec.js b/spec/frontend/crm/contacts_root_spec.js
index fec1e924da3..c7410d13365 100644
--- a/spec/frontend/crm/contacts_root_spec.js
+++ b/spec/frontend/crm/contacts_root_spec.js
@@ -1,40 +1,62 @@
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import VueRouter from 'vue-router';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
import ContactsRoot from '~/crm/components/contacts_root.vue';
+import NewContactForm from '~/crm/components/new_contact_form.vue';
import getGroupContactsQuery from '~/crm/components/queries/get_group_contacts.query.graphql';
import { getGroupContactsQueryResponse } from './mock_data';
-jest.mock('~/flash');
-
describe('Customer relations contacts root app', () => {
Vue.use(VueApollo);
+ Vue.use(VueRouter);
let wrapper;
let fakeApollo;
+ let router;
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findRowByName = (rowName) => wrapper.findAllByRole('row', { name: rowName });
const findIssuesLinks = () => wrapper.findAllByTestId('issues-link');
+ const findNewContactButton = () => wrapper.findByTestId('new-contact-button');
+ const findNewContactForm = () => wrapper.findComponent(NewContactForm);
+ const findError = () => wrapper.findComponent(GlAlert);
const successQueryHandler = jest.fn().mockResolvedValue(getGroupContactsQueryResponse);
+ const basePath = '/groups/flightjs/-/crm/contacts';
+
const mountComponent = ({
queryHandler = successQueryHandler,
mountFunction = shallowMountExtended,
+ canAdminCrmContact = true,
} = {}) => {
fakeApollo = createMockApollo([[getGroupContactsQuery, queryHandler]]);
wrapper = mountFunction(ContactsRoot, {
- provide: { groupFullPath: 'flightjs', groupIssuesPath: '/issues' },
+ router,
+ provide: {
+ groupFullPath: 'flightjs',
+ groupIssuesPath: '/issues',
+ groupId: 26,
+ canAdminCrmContact,
+ },
apolloProvider: fakeApollo,
});
};
+ beforeEach(() => {
+ router = new VueRouter({
+ base: basePath,
+ mode: 'history',
+ routes: [],
+ });
+ });
+
afterEach(() => {
wrapper.destroy();
fakeApollo = null;
+ router = null;
});
it('should render loading spinner', () => {
@@ -43,23 +65,94 @@ describe('Customer relations contacts root app', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
- it('should render error message on reject', async () => {
- mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
- await waitForPromises();
+ describe('new contact button', () => {
+ it('should exist when user has permission', () => {
+ mountComponent();
+
+ expect(findNewContactButton().exists()).toBe(true);
+ });
+
+ it('should not exist when user has no permission', () => {
+ mountComponent({ canAdminCrmContact: false });
+
+ expect(findNewContactButton().exists()).toBe(false);
+ });
+ });
+
+ describe('new contact form', () => {
+ it('should not exist by default', async () => {
+ mountComponent();
+ await waitForPromises();
+
+ expect(findNewContactForm().exists()).toBe(false);
+ });
+
+ it('should exist when user clicks new contact button', async () => {
+ mountComponent();
+
+ findNewContactButton().vm.$emit('click');
+ await waitForPromises();
+
+ expect(findNewContactForm().exists()).toBe(true);
+ });
+
+ it('should exist when user navigates directly to /new', async () => {
+ router.replace({ path: '/new' });
+ mountComponent();
+ await waitForPromises();
+
+ expect(findNewContactForm().exists()).toBe(true);
+ });
+
+ it('should not exist when form emits close', async () => {
+ router.replace({ path: '/new' });
+ mountComponent();
+
+ findNewContactForm().vm.$emit('close');
+ await waitForPromises();
+
+ expect(findNewContactForm().exists()).toBe(false);
+ });
+ });
+
+ describe('error', () => {
+ it('should exist on reject', async () => {
+ mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
+ await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(findError().exists()).toBe(true);
+ });
+
+ it('should exist when new contact form emits error', async () => {
+ router.replace({ path: '/new' });
+ mountComponent();
+
+ findNewContactForm().vm.$emit('error');
+ await waitForPromises();
+
+ expect(findError().exists()).toBe(true);
+ });
});
- it('renders correct results', async () => {
- mountComponent({ mountFunction: mountExtended });
- await waitForPromises();
+ describe('on successful load', () => {
+ it('should not render error', async () => {
+ mountComponent();
+ await waitForPromises();
- expect(findRowByName(/Marty/i)).toHaveLength(1);
- expect(findRowByName(/George/i)).toHaveLength(1);
- expect(findRowByName(/jd@gitlab.com/i)).toHaveLength(1);
+ expect(findError().exists()).toBe(false);
+ });
+
+ it('renders correct results', async () => {
+ mountComponent({ mountFunction: mountExtended });
+ await waitForPromises();
- const issueLink = findIssuesLinks().at(0);
- expect(issueLink.exists()).toBe(true);
- expect(issueLink.attributes('href')).toBe('/issues?scope=all&state=opened&crm_contact_id=16');
+ expect(findRowByName(/Marty/i)).toHaveLength(1);
+ expect(findRowByName(/George/i)).toHaveLength(1);
+ expect(findRowByName(/jd@gitlab.com/i)).toHaveLength(1);
+
+ const issueLink = findIssuesLinks().at(0);
+ expect(issueLink.exists()).toBe(true);
+ expect(issueLink.attributes('href')).toBe('/issues?scope=all&state=opened&crm_contact_id=16');
+ });
});
});
diff --git a/spec/frontend/crm/mock_data.js b/spec/frontend/crm/mock_data.js
index 4197621aaa6..e784ac3764d 100644
--- a/spec/frontend/crm/mock_data.js
+++ b/spec/frontend/crm/mock_data.js
@@ -40,7 +40,6 @@ export const getGroupContactsQueryResponse = {
organization: null,
},
],
- __typename: 'CustomerRelationsContactConnection',
},
},
},
@@ -79,3 +78,31 @@ export const getGroupOrganizationsQueryResponse = {
},
},
};
+
+export const createContactMutationResponse = {
+ data: {
+ customerRelationsContactCreate: {
+ __typeName: 'CustomerRelationsContactCreatePayload',
+ contact: {
+ __typename: 'CustomerRelationsContact',
+ id: 'gid://gitlab/CustomerRelations::Contact/1',
+ firstName: 'A',
+ lastName: 'B',
+ email: 'C',
+ phone: null,
+ description: null,
+ organization: null,
+ },
+ errors: [],
+ },
+ },
+};
+
+export const createContactMutationErrorResponse = {
+ data: {
+ customerRelationsContactCreate: {
+ contact: null,
+ errors: ['Phone is invalid.'],
+ },
+ },
+};
diff --git a/spec/frontend/crm/new_contact_form_spec.js b/spec/frontend/crm/new_contact_form_spec.js
new file mode 100644
index 00000000000..681c0539536
--- /dev/null
+++ b/spec/frontend/crm/new_contact_form_spec.js
@@ -0,0 +1,108 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import NewContactForm from '~/crm/components/new_contact_form.vue';
+import createContactMutation from '~/crm/components/queries/create_contact.mutation.graphql';
+import getGroupContactsQuery from '~/crm/components/queries/get_group_contacts.query.graphql';
+import {
+ createContactMutationErrorResponse,
+ createContactMutationResponse,
+ getGroupContactsQueryResponse,
+} from './mock_data';
+
+describe('Customer relations contacts root app', () => {
+ Vue.use(VueApollo);
+ let wrapper;
+ let fakeApollo;
+ let queryHandler;
+
+ const findCreateNewContactButton = () => wrapper.findByTestId('create-new-contact-button');
+ const findCancelButton = () => wrapper.findByTestId('cancel-button');
+ const findForm = () => wrapper.find('form');
+
+ const mountComponent = ({ mountFunction = shallowMountExtended } = {}) => {
+ fakeApollo = createMockApollo([[createContactMutation, queryHandler]]);
+ fakeApollo.clients.defaultClient.cache.writeQuery({
+ query: getGroupContactsQuery,
+ variables: { groupFullPath: 'flightjs' },
+ data: getGroupContactsQueryResponse.data,
+ });
+ wrapper = mountFunction(NewContactForm, {
+ provide: { groupId: 26, groupFullPath: 'flightjs' },
+ apolloProvider: fakeApollo,
+ });
+ };
+
+ beforeEach(() => {
+ queryHandler = jest.fn().mockResolvedValue(createContactMutationResponse);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ describe('Create new contact button', () => {
+ it('should be disabled by default', () => {
+ mountComponent();
+
+ expect(findCreateNewContactButton().attributes('disabled')).toBeTruthy();
+ });
+
+ it('should not be disabled when first, last and email have values', async () => {
+ mountComponent();
+
+ wrapper.find('#contact-first-name').vm.$emit('input', 'A');
+ wrapper.find('#contact-last-name').vm.$emit('input', 'B');
+ wrapper.find('#contact-email').vm.$emit('input', 'C');
+ await waitForPromises();
+
+ expect(findCreateNewContactButton().attributes('disabled')).toBeFalsy();
+ });
+ });
+
+ it("should emit 'close' when cancel button is clicked", () => {
+ mountComponent();
+
+ findCancelButton().vm.$emit('click');
+
+ expect(wrapper.emitted().close).toBeTruthy();
+ });
+
+ describe('when query is successful', () => {
+ it("should emit 'close'", async () => {
+ mountComponent();
+
+ findForm().trigger('submit');
+ await waitForPromises();
+
+ expect(wrapper.emitted().close).toBeTruthy();
+ });
+ });
+
+ describe('when query fails', () => {
+ it('should emit error on reject', async () => {
+ queryHandler = jest.fn().mockRejectedValue('ERROR');
+ mountComponent();
+
+ findForm().trigger('submit');
+ await waitForPromises();
+
+ expect(wrapper.emitted().error).toBeTruthy();
+ });
+
+ it('should emit error on error response', async () => {
+ queryHandler = jest.fn().mockResolvedValue(createContactMutationErrorResponse);
+ mountComponent();
+
+ findForm().trigger('submit');
+ await waitForPromises();
+
+ expect(wrapper.emitted().error[0][0]).toEqual(
+ createContactMutationErrorResponse.data.customerRelationsContactCreate.errors,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/editor/helpers.js b/spec/frontend/editor/helpers.js
index c77be4f8c58..e4942c36f6c 100644
--- a/spec/frontend/editor/helpers.js
+++ b/spec/frontend/editor/helpers.js
@@ -31,7 +31,7 @@ export const SEConstExt = () => {
export function SEWithSetupExt() {
return {
- onSetup: (setupOptions = {}, instance) => {
+ onSetup: (instance, setupOptions = {}) => {
if (setupOptions && !Array.isArray(setupOptions)) {
Object.entries(setupOptions).forEach(([key, value]) => {
Object.assign(instance, {
diff --git a/spec/frontend/editor/source_editor_instance_spec.js b/spec/frontend/editor/source_editor_instance_spec.js
index a46eea4c4cd..38844b6cafe 100644
--- a/spec/frontend/editor/source_editor_instance_spec.js
+++ b/spec/frontend/editor/source_editor_instance_spec.js
@@ -424,7 +424,7 @@ describe('Source Editor Instance', () => {
definition: MyFullExtWithCallbacks,
setupOptions: defSetupOptions,
});
- expect(onSetup).toHaveBeenCalledWith(defSetupOptions, seInstance);
+ expect(onSetup).toHaveBeenCalledWith(seInstance, defSetupOptions);
expect(onUse).toHaveBeenCalledWith(seInstance);
});
diff --git a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
index 5fc0880b09e..ae19ed9ab02 100644
--- a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
+++ b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
@@ -1,4 +1,4 @@
-import { GlTable, GlLink } from '@gitlab/ui';
+import { GlTableLite, GlLink } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
import CiLintResults from '~/pipeline_editor/components/lint/ci_lint_results.vue';
@@ -24,7 +24,7 @@ describe('CI Lint Results', () => {
});
};
- const findTable = () => wrapper.find(GlTable);
+ const findTable = () => wrapper.find(GlTableLite);
const findByTestId = (selector) => () => wrapper.find(`[data-testid="ci-lint-${selector}"]`);
const findAllByTestId = (selector) => () =>
wrapper.findAll(`[data-testid="ci-lint-${selector}"]`);
diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
index a3cc866a406..0f6238e10dc 100644
--- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:project) { create(:project) }
- let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- let_it_be(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
- let_it_be(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
+ let_it_be(:group) { create(:group) }
+
+ let(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
+ let(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject(:pipeline) { described_class.new(context) }
@@ -24,57 +25,101 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
- describe '#run' do
- it 'imports uploads into destination portable and removes tmpdir' do
- allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
- allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
+ shared_examples 'uploads import' do
+ describe '#run' do
+ before do
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path]))
+ end
- pipeline.run
+ it 'imports uploads into destination portable and removes tmpdir' do
+ pipeline.run
- expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
+ expect(portable.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt')
- expect(Dir.exist?(tmpdir)).to eq(false)
- end
- end
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
- describe '#extract' do
- it 'downloads & extracts upload paths' do
- allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
- expect(pipeline).to receive(:untar_zxf)
- file_download_service = instance_double("BulkImports::FileDownloadService")
+ context 'when importing avatar' do
+ let(:uploads_dir_path) { File.join(tmpdir, 'avatar') }
- expect(BulkImports::FileDownloadService)
- .to receive(:new)
- .with(
- configuration: context.configuration,
- relative_url: "/projects/test/export_relations/download?relation=uploads",
- dir: tmpdir,
- filename: 'uploads.tar.gz')
- .and_return(file_download_service)
+ it 'imports avatar' do
+ FileUtils.touch(File.join(uploads_dir_path, 'avatar.png'))
- expect(file_download_service).to receive(:execute)
+ expect_next_instance_of(entity.update_service) do |service|
+ expect(service).to receive(:execute)
+ end
- extracted_data = pipeline.extract(context)
+ pipeline.run
+ end
- expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
- end
- end
+ context 'when something goes wrong' do
+ it 'raises exception' do
+ allow_next_instance_of(entity.update_service) do |service|
+ allow(service).to receive(:execute).and_return(nil)
+ end
+
+ pipeline.run
- describe '#load' do
- it 'creates a file upload' do
- expect { pipeline.load(context, upload_file_path) }.to change { project.uploads.count }.by(1)
+ expect(entity.failures.first.exception_class).to include('AvatarLoadingError')
+ end
+ end
+ end
end
- context 'when dynamic path is nil' do
- it 'returns' do
- expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { project.uploads.count }
+ describe '#extract' do
+ it 'downloads & extracts upload paths' do
+ allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ expect(pipeline).to receive(:untar_zxf)
+ file_download_service = instance_double("BulkImports::FileDownloadService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=uploads",
+ dir: tmpdir,
+ filename: 'uploads.tar.gz')
+ .and_return(file_download_service)
+
+ expect(file_download_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path)
end
end
- context 'when path is a directory' do
- it 'returns' do
- expect { pipeline.load(context, uploads_dir_path) }.not_to change { project.uploads.count }
+ describe '#load' do
+ it 'creates a file upload' do
+ expect { pipeline.load(context, upload_file_path) }.to change { portable.uploads.count }.by(1)
+ end
+
+ context 'when dynamic path is nil' do
+ it 'returns' do
+ expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { portable.uploads.count }
+ end
+ end
+
+ context 'when path is a directory' do
+ it 'returns' do
+ expect { pipeline.load(context, uploads_dir_path) }.not_to change { portable.uploads.count }
+ end
end
end
end
+
+ context 'when importing to group' do
+ let(:portable) { group }
+ let(:entity) { create(:bulk_import_entity, :group_entity, group: group, source_full_path: 'test') }
+
+ include_examples 'uploads import'
+ end
+
+ context 'when importing to project' do
+ let(:portable) { project }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
+
+ include_examples 'uploads import'
+ end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
deleted file mode 100644
index c68284aa580..00000000000
--- a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Groups::Pipelines::GroupAvatarPipeline do
- let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user) }
-
- let_it_be(:entity) do
- create(
- :bulk_import_entity,
- group: group,
- bulk_import: bulk_import,
- source_full_path: 'source/full/path',
- destination_name: 'My Destination Group',
- destination_namespace: group.full_path
- )
- end
-
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
-
- subject { described_class.new(context) }
-
- describe '#run' do
- it 'updates the group avatar' do
- avatar_path = 'spec/fixtures/dk.png'
- stub_file_download(
- avatar_path,
- configuration: context.configuration,
- relative_url: "/groups/source%2Ffull%2Fpath/avatar",
- dir: an_instance_of(String),
- file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
- allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
- )
-
- expect { subject.run }.to change(context.group, :avatar)
-
- expect(context.group.avatar.filename).to eq(File.basename(avatar_path))
- end
-
- it 'raises an error when the avatar upload fails' do
- avatar_path = 'spec/fixtures/aosp_manifest.xml'
- stub_file_download(
- avatar_path,
- configuration: context.configuration,
- relative_url: "/groups/source%2Ffull%2Fpath/avatar",
- dir: an_instance_of(String),
- file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
- allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
- )
-
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:error)
- .with(
- bulk_import_id: context.bulk_import.id,
- bulk_import_entity_id: context.entity.id,
- bulk_import_entity_type: context.entity.source_type,
- context_extra: context.extra,
- exception_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline::GroupAvatarLoadingError",
- exception_message: "Avatar file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon",
- pipeline_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline",
- pipeline_step: :loader
- )
- end
-
- expect { subject.run }.to change(BulkImports::Failure, :count)
- end
- end
-
- def stub_file_download(filepath = 'file/path.png', **params)
- expect_next_instance_of(BulkImports::FileDownloadService, params.presence) do |downloader|
- expect(downloader).to receive(:execute).and_return(filepath)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index a7acd661282..55a8e40f480 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -8,13 +8,13 @@ RSpec.describe BulkImports::Groups::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
- [1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Common::Pipelines::LabelsPipeline],
[1, BulkImports::Common::Pipelines::MilestonesPipeline],
[1, BulkImports::Common::Pipelines::BadgesPipeline],
- [2, BulkImports::Common::Pipelines::BoardsPipeline]
+ [2, BulkImports::Common::Pipelines::BoardsPipeline],
+ [2, BulkImports::Common::Pipelines::UploadsPipeline]
]
end
@@ -24,7 +24,7 @@ RSpec.describe BulkImports::Groups::Stage do
describe '.pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.new(bulk_import).pipelines & pipelines).to eq(pipelines)
+ expect(described_class.new(bulk_import).pipelines & pipelines).to contain_exactly(*pipelines)
expect(described_class.new(bulk_import).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
end
diff --git a/spec/lib/gitlab/etag_caching/store_spec.rb b/spec/lib/gitlab/etag_caching/store_spec.rb
index 46195e64715..6188a3fc8b3 100644
--- a/spec/lib/gitlab/etag_caching/store_spec.rb
+++ b/spec/lib/gitlab/etag_caching/store_spec.rb
@@ -80,5 +80,19 @@ RSpec.describe Gitlab::EtagCaching::Store, :clean_gitlab_redis_shared_state do
expect(store.get(key)).to eq(etag)
end
end
+
+ context 'with multiple keys' do
+ let(:keys) { ['/my-group/my-project/builds/234.json', '/api/graphql:pipelines/id/5'] }
+
+ it 'stores and returns multiple values' do
+ etags = store.touch(*keys)
+
+ expect(etags.size).to eq(keys.size)
+
+ keys.each_with_index do |key, i|
+ expect(store.get(key)).to eq(etags[i])
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb
new file mode 100644
index 00000000000..c9bc101374f
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_configured_to_gitlab_collector_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowConfiguredToGitlabCollectorMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'for collector_hostname option' do
+ where(:collector_hostname, :expected_value) do
+ 'snowplow.trx.gitlab.net' | true
+ 'foo.bar.something.net' | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(snowplow_collector_hostname: collector_hostname)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb
new file mode 100644
index 00000000000..1e0cdd650fa
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/snowplow_enabled_metric_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::SnowplowEnabledMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'for snowplow enabled option' do
+ where(:snowplow_enabled, :expected_value) do
+ true | true
+ false | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(snowplow_enabled: snowplow_enabled)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index f6d3047609e..c3c35279fe8 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -1045,6 +1045,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe ".system_usage_data_settings" do
let(:prometheus_client) { double(Gitlab::PrometheusClient) }
+ let(:snowplow_gitlab_host?) { Gitlab::CurrentSettings.snowplow_collector_hostname == 'snowplow.trx.gitlab.net' }
before do
allow(described_class).to receive(:operating_system).and_return('ubuntu-20.04')
@@ -1089,6 +1090,17 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'gathers user_cap_feature_enabled' do
expect(subject[:settings][:user_cap_feature_enabled]).to eq(Gitlab::CurrentSettings.new_user_signups_cap)
end
+
+ context 'snowplow stats' do
+ before do
+ stub_feature_flags(usage_data_instrumentation: false)
+ end
+
+ it 'gathers snowplow stats' do
+ expect(subject[:settings][:snowplow_enabled]).to eq(Gitlab::CurrentSettings.snowplow_enabled?)
+ expect(subject[:settings][:snowplow_configured_to_gitlab_collector]).to eq(snowplow_gitlab_host?)
+ end
+ end
end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 90cb0185b9c..e5bbac62dcc 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -298,4 +298,14 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(entity.wikis_url_path).to eq("/groups/#{entity.encoded_source_full_path}/wikis")
end
end
+
+ describe '#update_service' do
+ it 'returns correct update service class' do
+ group_entity = build(:bulk_import_entity)
+ project_entity = build(:bulk_import_entity, :project_entity)
+
+ expect(group_entity.update_service).to eq(::Groups::UpdateService)
+ expect(project_entity.update_service).to eq(::Projects::UpdateService)
+ end
+ end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 841608b96fe..3b6d756a738 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -1503,10 +1503,30 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe 'pipeline caching' do
- it 'performs ExpirePipelinesCacheWorker' do
- expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
+ context 'when expire_job_and_pipeline_cache_synchronously is enabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true)
+ end
- pipeline.cancel
+ it 'executes Ci::ExpirePipelineCacheService' do
+ expect_next_instance_of(Ci::ExpirePipelineCacheService) do |service|
+ expect(service).to receive(:execute).with(pipeline)
+ end
+
+ pipeline.cancel
+ end
+ end
+
+ context 'when expire_job_and_pipeline_cache_synchronously is disabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false)
+ end
+
+ it 'performs ExpirePipelinesCacheWorker' do
+ expect(ExpirePipelineCacheWorker).to receive(:perform_async).with(pipeline.id)
+
+ pipeline.cancel
+ end
end
end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index a4d4d0a58ff..d675b0e7221 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -46,10 +46,28 @@ RSpec.describe CommitStatus do
describe 'status state machine' do
let!(:commit_status) { create(:commit_status, :running, project: project) }
- it 'invalidates the cache after a transition' do
- expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id)
+ context 'when expire_job_and_pipeline_cache_synchronously is enabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: true)
+ end
+
+ it 'invalidates the cache after a transition' do
+ expect(commit_status).to receive(:expire_etag_cache!)
+
+ commit_status.success!
+ end
+ end
+
+ context 'when expire_job_and_pipeline_cache_synchronously is disabled' do
+ before do
+ stub_feature_flags(expire_job_and_pipeline_cache_synchronously: false)
+ end
+
+ it 'invalidates the cache after a transition' do
+ expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id)
- commit_status.success!
+ commit_status.success!
+ end
end
describe 'transitioning to running' do
@@ -949,4 +967,15 @@ RSpec.describe CommitStatus do
described_class.bulk_insert_tags!(statuses, tag_list_by_build)
end
end
+
+ describe '#expire_etag_cache!' do
+ it 'expires the etag cache' do
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |etag_store|
+ job_path = Gitlab::Routing.url_helpers.project_build_path(project, commit_status.id, format: :json)
+ expect(etag_store).to receive(:touch).with(job_path)
+ end
+
+ commit_status.expire_etag_cache!
+ end
+ end
end
diff --git a/spec/services/bulk_imports/uploads_export_service_spec.rb b/spec/services/bulk_imports/uploads_export_service_spec.rb
new file mode 100644
index 00000000000..39bcacfdc5e
--- /dev/null
+++ b/spec/services/bulk_imports/uploads_export_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::UploadsExportService do
+ let_it_be(:project) { create(:project, avatar: fixture_file_upload('spec/fixtures/rails_sample.png', 'image/png')) }
+ let_it_be(:upload) { create(:upload, :with_file, :issuable_upload, uploader: FileUploader, model: project) }
+ let_it_be(:export_path) { Dir.mktmpdir }
+
+ subject(:service) { described_class.new(project, export_path) }
+
+ after do
+ FileUtils.remove_entry(export_path) if Dir.exist?(export_path)
+ end
+
+ describe '#execute' do
+ it 'exports project uploads and avatar' do
+ subject.execute
+
+ expect(File.exist?(File.join(export_path, 'avatar', 'rails_sample.png'))).to eq(true)
+ expect(File.exist?(File.join(export_path, upload.secret, upload.retrieve_uploader.filename))).to eq(true)
+ end
+ end
+end
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index bc975938365..8cfe756faf3 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -18,14 +18,14 @@ RSpec.describe Ci::ExpirePipelineCacheService do
graphql_pipeline_sha_path = "/api/graphql:pipelines/sha/#{pipeline.sha}"
graphql_project_on_demand_scan_counts_path = "/api/graphql:on_demand_scan/counts/#{project.full_path}"
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
- expect(store).to receive(:touch).with(pipelines_path)
- expect(store).to receive(:touch).with(new_mr_pipelines_path)
- expect(store).to receive(:touch).with(pipeline_path)
- expect(store).to receive(:touch).with(graphql_pipeline_path)
- expect(store).to receive(:touch).with(graphql_pipeline_sha_path)
- expect(store).to receive(:touch).with(graphql_project_on_demand_scan_counts_path)
- end
+ expect_touched_etag_caching_paths(
+ pipelines_path,
+ new_mr_pipelines_path,
+ pipeline_path,
+ graphql_pipeline_path,
+ graphql_pipeline_sha_path,
+ graphql_project_on_demand_scan_counts_path
+ )
subject.execute(pipeline)
end
@@ -37,9 +37,10 @@ RSpec.describe Ci::ExpirePipelineCacheService do
merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json"
- allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch)
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path)
- expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_widget_path)
+ expect_touched_etag_caching_paths(
+ merge_request_pipelines_path,
+ merge_request_widget_path
+ )
subject.execute(merge_request.all_pipelines.last)
end
@@ -78,10 +79,7 @@ RSpec.describe Ci::ExpirePipelineCacheService do
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.source_project.full_path}/-/pipelines/#{source.source_pipeline.id}.json"
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
- allow(store).to receive(:touch)
- expect(store).to receive(:touch).with(dependent_pipeline_path)
- end
+ expect_touched_etag_caching_paths(dependent_pipeline_path)
subject.execute(pipeline)
end
@@ -94,13 +92,31 @@ RSpec.describe Ci::ExpirePipelineCacheService do
it 'updates the cache of dependent pipeline' do
dependent_pipeline_path = "/#{source.project.full_path}/-/pipelines/#{source.pipeline.id}.json"
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
- allow(store).to receive(:touch)
- expect(store).to receive(:touch).with(dependent_pipeline_path)
- end
+ expect_touched_etag_caching_paths(dependent_pipeline_path)
subject.execute(pipeline)
end
end
+
+ it 'does not do N+1 queries' do
+ subject.execute(pipeline)
+
+ control = ActiveRecord::QueryRecorder.new { subject.execute(pipeline) }
+
+ create(:ci_sources_pipeline, pipeline: pipeline)
+ create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline))
+
+ expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count)
+ end
+ end
+
+ def expect_touched_etag_caching_paths(*paths)
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
+ expect(store).to receive(:touch).and_wrap_original do |m, *args|
+ expect(args).to include(*paths)
+
+ m.call(*args)
+ end
+ end
end
end
diff --git a/spec/services/concerns/audit_event_save_type_spec.rb b/spec/services/concerns/audit_event_save_type_spec.rb
new file mode 100644
index 00000000000..fbaebd9f85c
--- /dev/null
+++ b/spec/services/concerns/audit_event_save_type_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuditEventSaveType do
+ subject(:target) { Object.new.extend(described_class) }
+
+ describe '#should_save_database? and #should_save_stream?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:query_method, :query_param, :result) do
+ :should_save_stream? | :stream | true
+ :should_save_stream? | :database_and_stream | true
+ :should_save_database? | :database | true
+ :should_save_database? | :database_and_stream | true
+ :should_save_stream? | :database | false
+ :should_save_stream? | nil | false
+ :should_save_database? | :stream | false
+ :should_save_database? | nil | false
+ end
+
+ with_them do
+ it 'returns corresponding results according to the query_method and query_param' do
+ expect(target.send(query_method, query_param)).to eq result
+ end
+ end
+ end
+end