Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2024-01-24 00:09:27 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2024-01-24 00:09:27 +0300
commit17bb9dd270c78fad45851c6cc6ec6e6fdb3d23bf (patch)
treeaa7235893811d97055b3fc750d139a039ae95b0a
parentabd2c6b32aabff4654b6be9cb98b59dcd3193fc4 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop_todo/layout/empty_line_after_magic_comment.yml1
-rw-r--r--.rubocop_todo/rspec/feature_category.yml1
-rw-r--r--.rubocop_todo/style/inline_disable_annotation.yml2
-rw-r--r--app/assets/javascripts/ci/runner/admin_new_runner/admin_new_runner_app.vue3
-rw-r--r--app/assets/javascripts/ci/runner/components/runner_cloud_connection_form.vue15
-rw-r--r--app/assets/javascripts/ci/runner/components/runner_platforms_radio_group.vue26
-rw-r--r--app/assets/javascripts/ci/runner/constants.js1
-rw-r--r--app/assets/javascripts/ci/runner/group_new_runner/group_new_runner_app.vue24
-rw-r--r--app/assets/javascripts/ci/runner/project_new_runner/project_new_runner_app.vue24
-rw-r--r--app/assets/javascripts/constants.js2
-rw-r--r--app/assets/javascripts/gl_form.js16
-rw-r--r--app/assets/javascripts/observability/client.js20
-rw-r--r--app/assets/javascripts/pages/projects/blob/show/index.js2
-rw-r--r--app/assets/javascripts/repository/components/blob_content_viewer.vue12
-rw-r--r--app/assets/javascripts/repository/components/blob_viewers/index.js8
-rw-r--r--app/assets/javascripts/repository/index.js2
-rw-r--r--app/assets/javascripts/repository/mixins/highlight_mixin.js2
-rw-r--r--app/assets/javascripts/vue_shared/components/source_viewer/source_viewer.vue356
-rw-r--r--app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue175
-rw-r--r--app/assets/stylesheets/page_bundles/login.scss6
-rw-r--r--app/controllers/groups/runners_controller.rb4
-rw-r--r--app/controllers/projects/blob_controller.rb1
-rw-r--r--app/controllers/projects/deployments_controller.rb6
-rw-r--r--app/controllers/projects/runners_controller.rb4
-rw-r--r--app/controllers/projects/tree_controller.rb1
-rw-r--r--app/controllers/projects_controller.rb1
-rw-r--r--app/models/packages/protection/rule.rb13
-rw-r--r--app/services/packages/npm/create_package_service.rb2
-rw-r--r--app/views/layouts/devise.html.haml5
-rw-r--r--app/views/projects/deployments/show.html.haml4
-rw-r--r--config/feature_flags/development/highlight_js_worker.yml8
-rw-r--r--config/feature_flags/gitlab_com_derisk/ci_text_interpolation.yml9
-rw-r--r--config/feature_flags/gitlab_com_derisk/external_pipeline_validation_migration.yml9
-rw-r--r--config/feature_flags/ops/log_large_in_list_queries.yml9
-rw-r--r--config/feature_flags/wip/deployment_details_page.yml8
-rw-r--r--config/feature_flags/wip/gcp_runner.yml9
-rw-r--r--config/initializers/database_query_analyzers.rb1
-rw-r--r--config/routes/project.rb2
-rw-r--r--db/post_migrate/20240123155252_remove_project_import_level_from_namespace_settings.rb10
-rw-r--r--db/schema_migrations/202401231552521
-rw-r--r--db/structure.sql1
-rw-r--r--doc/administration/geo/replication/multiple_servers.md28
-rw-r--r--doc/administration/geo/replication/troubleshooting.md2
-rw-r--r--doc/administration/geo/setup/database.md2
-rw-r--r--doc/development/database/index.md10
-rw-r--r--doc/development/database/partitioning/int_range.md2
-rw-r--r--doc/update/index.md4
-rw-r--r--doc/user/project/repository/git_blame.md6
-rw-r--r--lib/backup/tasks/task.rb32
-rw-r--r--lib/gitlab/background_migration/backfill_project_import_level.rb38
-rw-r--r--lib/gitlab/ci/config/interpolation/text_interpolator.rb16
-rw-r--r--lib/gitlab/ci/config/yaml/documents.rb9
-rw-r--r--lib/gitlab/ci/config/yaml/loader.rb47
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/external.rb11
-rw-r--r--lib/gitlab/database/query_analyzer.rb21
-rw-r--r--lib/gitlab/database/query_analyzers/log_large_in_lists.rb74
-rw-r--r--locale/gitlab.pot11
-rw-r--r--spec/controllers/projects/deployments_controller_spec.rb43
-rw-r--r--spec/dot_gitlab_ci/rules_spec.rb14
-rw-r--r--spec/fixtures/gitlab/database/query_analyzers/large_query_with_in_list.txt1
-rw-r--r--spec/fixtures/gitlab/database/query_analyzers/small_query_with_in_list.txt1
-rw-r--r--spec/fixtures/gitlab/database/query_analyzers/small_query_without_in_list.txt1
-rw-r--r--spec/frontend/__helpers__/mock_observability_client.js1
-rw-r--r--spec/frontend/ci/runner/components/runner_cloud_form_spec.js16
-rw-r--r--spec/frontend/ci/runner/components/runner_platforms_radio_group_spec.js123
-rw-r--r--spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js122
-rw-r--r--spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js122
-rw-r--r--spec/frontend/observability/client_spec.js192
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js22
-rw-r--r--spec/frontend/repository/mixins/highlight_mixin_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js191
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js330
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb122
-rw-r--r--spec/lib/gitlab/ci/config/external/file/base_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/config/yaml/documents_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/config/yaml/loader_spec.rb237
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb37
-rw-r--r--spec/lib/gitlab/database/query_analyzer_spec.rb28
-rw-r--r--spec/lib/gitlab/database/query_analyzers/ci/partitioning_id_analyzer_spec.rb2
-rw-r--r--spec/lib/gitlab/database/query_analyzers/ci/partitioning_routing_analyzer_spec.rb2
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb2
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb2
-rw-r--r--spec/lib/gitlab/database/query_analyzers/log_large_in_lists_spec.rb148
-rw-r--r--spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb2
-rw-r--r--spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb4
-rw-r--r--spec/models/packages/protection/rule_spec.rb11
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb4
-rw-r--r--spec/support/rspec_order_todo.yml1
90 files changed, 1636 insertions, 1298 deletions
diff --git a/.rubocop_todo/layout/empty_line_after_magic_comment.yml b/.rubocop_todo/layout/empty_line_after_magic_comment.yml
index 1467f835e3f..98968588650 100644
--- a/.rubocop_todo/layout/empty_line_after_magic_comment.yml
+++ b/.rubocop_todo/layout/empty_line_after_magic_comment.yml
@@ -368,7 +368,6 @@ Layout/EmptyLineAfterMagicComment:
- 'lib/gitlab/auth/otp/fortinet.rb'
- 'lib/gitlab/background_migration/backfill_imported_issue_search_data.rb'
- 'lib/gitlab/background_migration/backfill_issue_search_data.rb'
- - 'lib/gitlab/background_migration/backfill_project_import_level.rb'
- 'lib/gitlab/background_migration/backfill_project_namespace_details.rb'
- 'lib/gitlab/background_migration/mailers/unconfirm_mailer.rb'
- 'lib/gitlab/ci/secure_files/mobile_provision.rb'
diff --git a/.rubocop_todo/rspec/feature_category.yml b/.rubocop_todo/rspec/feature_category.yml
index a9cbafd04a7..ff159a848f2 100644
--- a/.rubocop_todo/rspec/feature_category.yml
+++ b/.rubocop_todo/rspec/feature_category.yml
@@ -2749,7 +2749,6 @@ RSpec/FeatureCategory:
- 'spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_note_discussion_id_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb'
- - 'spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_project_namespace_details_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_project_repositories_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb'
diff --git a/.rubocop_todo/style/inline_disable_annotation.yml b/.rubocop_todo/style/inline_disable_annotation.yml
index b8132af6b62..470ede21304 100644
--- a/.rubocop_todo/style/inline_disable_annotation.yml
+++ b/.rubocop_todo/style/inline_disable_annotation.yml
@@ -2261,7 +2261,6 @@ Style/InlineDisableAnnotation:
- 'lib/gitlab/background_migration/backfill_missing_vulnerability_dismissal_details.rb'
- 'lib/gitlab/background_migration/backfill_nuget_normalized_version.rb'
- 'lib/gitlab/background_migration/backfill_partitioned_table.rb'
- - 'lib/gitlab/background_migration/backfill_project_import_level.rb'
- 'lib/gitlab/background_migration/backfill_project_member_namespace_id.rb'
- 'lib/gitlab/background_migration/backfill_project_namespace_on_issues.rb'
- 'lib/gitlab/background_migration/backfill_project_repositories.rb'
@@ -2852,7 +2851,6 @@ Style/InlineDisableAnnotation:
- 'spec/lib/gitlab/background_migration/backfill_finding_id_in_vulnerabilities_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_has_merge_request_of_vulnerability_reads_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_missing_ci_cd_settings_spec.rb'
- - 'spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_project_repositories_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_resource_link_events_spec.rb'
- 'spec/lib/gitlab/background_migration/backfill_root_storage_statistics_fork_storage_sizes_spec.rb'
diff --git a/app/assets/javascripts/ci/runner/admin_new_runner/admin_new_runner_app.vue b/app/assets/javascripts/ci/runner/admin_new_runner/admin_new_runner_app.vue
index 97163c1f55c..1e4ef535e1b 100644
--- a/app/assets/javascripts/ci/runner/admin_new_runner/admin_new_runner_app.vue
+++ b/app/assets/javascripts/ci/runner/admin_new_runner/admin_new_runner_app.vue
@@ -59,7 +59,8 @@ export default {
<h2 class="gl-font-size-h2 gl-my-5">
{{ s__('Runners|Platform') }}
</h2>
- <runner-platforms-radio-group v-model="platform" />
+
+ <runner-platforms-radio-group v-model="platform" admin />
<hr aria-hidden="true" />
diff --git a/app/assets/javascripts/ci/runner/components/runner_cloud_connection_form.vue b/app/assets/javascripts/ci/runner/components/runner_cloud_connection_form.vue
new file mode 100644
index 00000000000..c213607670e
--- /dev/null
+++ b/app/assets/javascripts/ci/runner/components/runner_cloud_connection_form.vue
@@ -0,0 +1,15 @@
+<script>
+import { s__ } from '~/locale';
+
+export default {
+ name: 'RunnerCloudForm',
+ i18n: {
+ title: s__('Runners|Google Cloud connection'),
+ },
+};
+</script>
+<template>
+ <div>
+ <h2 class="gl-font-size-h2">{{ $options.i18n.title }}</h2>
+ </div>
+</template>
diff --git a/app/assets/javascripts/ci/runner/components/runner_platforms_radio_group.vue b/app/assets/javascripts/ci/runner/components/runner_platforms_radio_group.vue
index a841f66b566..ba50932be4e 100644
--- a/app/assets/javascripts/ci/runner/components/runner_platforms_radio_group.vue
+++ b/app/assets/javascripts/ci/runner/components/runner_platforms_radio_group.vue
@@ -3,11 +3,13 @@ import DOCKER_LOGO_URL from '@gitlab/svgs/dist/illustrations/third-party-logos/c
import LINUX_LOGO_URL from '@gitlab/svgs/dist/illustrations/third-party-logos/linux.svg?url';
import KUBERNETES_LOGO_URL from '@gitlab/svgs/dist/illustrations/logos/kubernetes.svg?url';
import { GlFormRadioGroup, GlIcon, GlLink } from '@gitlab/ui';
+import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import {
LINUX_PLATFORM,
MACOS_PLATFORM,
WINDOWS_PLATFORM,
+ GOOGLE_CLOUD_PLATFORM,
DOCKER_HELP_URL,
KUBERNETES_HELP_URL,
} from '../constants';
@@ -21,18 +23,29 @@ export default {
GlIcon,
RunnerPlatformsRadio,
},
+ mixins: [glFeatureFlagsMixin()],
props: {
value: {
type: String,
required: false,
default: null,
},
+ admin: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
},
data() {
return {
model: this.value,
};
},
+ computed: {
+ gcpEnabled() {
+ return this.glFeatures.gcpRunner && !this.admin;
+ },
+ },
watch: {
model() {
this.$emit('input', this.model);
@@ -42,7 +55,7 @@ export default {
LINUX_LOGO_URL,
MACOS_PLATFORM,
WINDOWS_PLATFORM,
-
+ GOOGLE_CLOUD_PLATFORM,
DOCKER_HELP_URL,
DOCKER_LOGO_URL,
KUBERNETES_HELP_URL,
@@ -73,6 +86,17 @@ export default {
</div>
</div>
+ <div v-if="gcpEnabled" class="gl-mt-3 gl-mb-6">
+ <label>{{ s__('Runners|Cloud') }}</label>
+
+ <div class="gl-display-flex gl-flex-wrap gl-gap-3">
+ <!-- eslint-disable @gitlab/vue-require-i18n-strings -->
+ <runner-platforms-radio v-model="model" :value="$options.GOOGLE_CLOUD_PLATFORM">
+ Google Cloud
+ </runner-platforms-radio>
+ </div>
+ </div>
+
<div class="gl-mt-3 gl-mb-6">
<label>{{ s__('Runners|Containers') }}</label>
diff --git a/app/assets/javascripts/ci/runner/constants.js b/app/assets/javascripts/ci/runner/constants.js
index d04d75b6e75..b275a8f5749 100644
--- a/app/assets/javascripts/ci/runner/constants.js
+++ b/app/assets/javascripts/ci/runner/constants.js
@@ -220,6 +220,7 @@ export const GROUP_FILTERED_SEARCH_NAMESPACE = 'group_runners';
export const LINUX_PLATFORM = 'linux';
export const MACOS_PLATFORM = 'osx';
export const WINDOWS_PLATFORM = 'windows';
+export const GOOGLE_CLOUD_PLATFORM = 'google';
// About Gitlab Runner Package host
export const RUNNER_PACKAGE_HOST = 'gitlab-runner-downloads.s3.amazonaws.com';
diff --git a/app/assets/javascripts/ci/runner/group_new_runner/group_new_runner_app.vue b/app/assets/javascripts/ci/runner/group_new_runner/group_new_runner_app.vue
index c907f9c8982..21058c93d15 100644
--- a/app/assets/javascripts/ci/runner/group_new_runner/group_new_runner_app.vue
+++ b/app/assets/javascripts/ci/runner/group_new_runner/group_new_runner_app.vue
@@ -2,11 +2,17 @@
import { createAlert, VARIANT_SUCCESS } from '~/alert';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
import { s__ } from '~/locale';
-
+import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import RegistrationCompatibilityAlert from '~/ci/runner/components/registration/registration_compatibility_alert.vue';
import RunnerPlatformsRadioGroup from '~/ci/runner/components/runner_platforms_radio_group.vue';
+import RunnerCloudConnectionForm from '~/ci/runner/components/runner_cloud_connection_form.vue';
import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue';
-import { DEFAULT_PLATFORM, GROUP_TYPE, PARAM_KEY_PLATFORM } from '../constants';
+import {
+ DEFAULT_PLATFORM,
+ GOOGLE_CLOUD_PLATFORM,
+ GROUP_TYPE,
+ PARAM_KEY_PLATFORM,
+} from '../constants';
import { saveAlertToLocalStorage } from '../local_storage_alert/save_alert_to_local_storage';
export default {
@@ -14,8 +20,10 @@ export default {
components: {
RegistrationCompatibilityAlert,
RunnerPlatformsRadioGroup,
+ RunnerCloudConnectionForm,
RunnerCreateForm,
},
+ mixins: [glFeatureFlagsMixin()],
props: {
groupId: {
type: String,
@@ -27,6 +35,14 @@ export default {
platform: DEFAULT_PLATFORM,
};
},
+ computed: {
+ gcpEnabled() {
+ return this.glFeatures.gcpRunner;
+ },
+ showCloudForm() {
+ return this.platform === GOOGLE_CLOUD_PLATFORM && this.gcpEnabled;
+ },
+ },
methods: {
onSaved(runner) {
const params = { [PARAM_KEY_PLATFORM]: this.platform };
@@ -65,11 +81,15 @@ export default {
<h2 class="gl-font-size-h2 gl-my-5">
{{ s__('Runners|Platform') }}
</h2>
+
<runner-platforms-radio-group v-model="platform" />
<hr aria-hidden="true" />
+ <runner-cloud-connection-form v-if="showCloudForm" />
+
<runner-create-form
+ v-else
:runner-type="$options.GROUP_TYPE"
:group-id="groupId"
@saved="onSaved"
diff --git a/app/assets/javascripts/ci/runner/project_new_runner/project_new_runner_app.vue b/app/assets/javascripts/ci/runner/project_new_runner/project_new_runner_app.vue
index 241479a8c98..8f3dfbf42ad 100644
--- a/app/assets/javascripts/ci/runner/project_new_runner/project_new_runner_app.vue
+++ b/app/assets/javascripts/ci/runner/project_new_runner/project_new_runner_app.vue
@@ -2,11 +2,17 @@
import { createAlert, VARIANT_SUCCESS } from '~/alert';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
import { s__ } from '~/locale';
-
+import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import RegistrationCompatibilityAlert from '~/ci/runner/components/registration/registration_compatibility_alert.vue';
import RunnerPlatformsRadioGroup from '~/ci/runner/components/runner_platforms_radio_group.vue';
+import RunnerCloudConnectionForm from '~/ci/runner/components/runner_cloud_connection_form.vue';
import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue';
-import { DEFAULT_PLATFORM, PARAM_KEY_PLATFORM, PROJECT_TYPE } from '../constants';
+import {
+ DEFAULT_PLATFORM,
+ PARAM_KEY_PLATFORM,
+ GOOGLE_CLOUD_PLATFORM,
+ PROJECT_TYPE,
+} from '../constants';
import { saveAlertToLocalStorage } from '../local_storage_alert/save_alert_to_local_storage';
export default {
@@ -14,8 +20,10 @@ export default {
components: {
RegistrationCompatibilityAlert,
RunnerPlatformsRadioGroup,
+ RunnerCloudConnectionForm,
RunnerCreateForm,
},
+ mixins: [glFeatureFlagsMixin()],
props: {
projectId: {
type: String,
@@ -27,6 +35,14 @@ export default {
platform: DEFAULT_PLATFORM,
};
},
+ computed: {
+ gcpEnabled() {
+ return this.glFeatures.gcpRunner;
+ },
+ showCloudForm() {
+ return this.platform === GOOGLE_CLOUD_PLATFORM && this.gcpEnabled;
+ },
+ },
methods: {
onSaved(runner) {
const params = { [PARAM_KEY_PLATFORM]: this.platform };
@@ -65,11 +81,15 @@ export default {
<h2 class="gl-font-size-h2 gl-my-5">
{{ s__('Runners|Platform') }}
</h2>
+
<runner-platforms-radio-group v-model="platform" />
<hr aria-hidden="true" />
+ <runner-cloud-connection-form v-if="showCloudForm" />
+
<runner-create-form
+ v-else
:runner-type="$options.PROJECT_TYPE"
:project-id="projectId"
@saved="onSaved"
diff --git a/app/assets/javascripts/constants.js b/app/assets/javascripts/constants.js
index f43a2d5d8ff..631968ff531 100644
--- a/app/assets/javascripts/constants.js
+++ b/app/assets/javascripts/constants.js
@@ -3,3 +3,5 @@ export const getModifierKey = (removeSuffix = false) => {
const winKey = `Ctrl${removeSuffix ? '' : '+'}`;
return window.gl?.client?.isMac ? '⌘' : winKey;
};
+
+export const PRELOAD_THROTTLE_TIMEOUT_MS = 4000;
diff --git a/app/assets/javascripts/gl_form.js b/app/assets/javascripts/gl_form.js
index f4008fe3cc9..776f27a8583 100644
--- a/app/assets/javascripts/gl_form.js
+++ b/app/assets/javascripts/gl_form.js
@@ -5,6 +5,7 @@ import GfmAutoComplete, { defaultAutocompleteConfig } from 'ee_else_ce/gfm_auto_
import { disableButtonIfEmptyField } from '~/lib/utils/common_utils';
import dropzoneInput from './dropzone_input';
import { addMarkdownListeners, removeMarkdownListeners } from './lib/utils/text_markdown';
+import { PRELOAD_THROTTLE_TIMEOUT_MS } from './constants';
export default class GLForm {
/**
@@ -68,6 +69,21 @@ export default class GLForm {
);
this.autoComplete = new GfmAutoComplete(dataSources);
this.autoComplete.setup(this.form.find('.js-gfm-input'), this.enableGFM);
+
+ if (this.preloadMembers && dataSources?.members) {
+ // for now the preload is only implemented for the members
+ // timeout helping to trottle the preloads in the case content_editor
+ // is set as main comment editor and support for rspec tests
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/427437
+
+ requestIdleCallback(() =>
+ setTimeout(
+ () => this.autoComplete?.fetchData($('.js-gfm-input'), '@'),
+ PRELOAD_THROTTLE_TIMEOUT_MS,
+ ),
+ );
+ }
+
this.formDropzone = dropzoneInput(this.form, { parallelUploads: 1 });
if (this.form.is(':not(.js-no-autosize)')) {
diff --git a/app/assets/javascripts/observability/client.js b/app/assets/javascripts/observability/client.js
index 4fc4ce06528..d3ed168b68e 100644
--- a/app/assets/javascripts/observability/client.js
+++ b/app/assets/javascripts/observability/client.js
@@ -235,6 +235,20 @@ async function fetchTraces(tracingUrl, { filters = {}, pageToken, pageSize, sort
}
}
+async function fetchTracesAnalytics(tracingAnalyticsUrl, { filters = {} } = {}) {
+ const params = filterObjToQueryParams(filters);
+
+ try {
+ const { data } = await axios.get(tracingAnalyticsUrl, {
+ withCredentials: true,
+ params,
+ });
+ return data.results ?? [];
+ } catch (e) {
+ return reportErrorAndThrow(e);
+ }
+}
+
async function fetchServices(servicesUrl) {
try {
const { data } = await axios.get(servicesUrl, {
@@ -339,6 +353,7 @@ export function buildClient(config) {
const {
provisioningUrl,
tracingUrl,
+ tracingAnalyticsUrl,
servicesUrl,
operationsUrl,
metricsUrl,
@@ -353,6 +368,10 @@ export function buildClient(config) {
throw new Error('tracingUrl param must be a string');
}
+ if (typeof tracingAnalyticsUrl !== 'string') {
+ throw new Error('tracingAnalyticsUrl param must be a string');
+ }
+
if (typeof servicesUrl !== 'string') {
throw new Error('servicesUrl param must be a string');
}
@@ -373,6 +392,7 @@ export function buildClient(config) {
enableObservability: () => enableObservability(provisioningUrl),
isObservabilityEnabled: () => isObservabilityEnabled(provisioningUrl),
fetchTraces: (options) => fetchTraces(tracingUrl, options),
+ fetchTracesAnalytics: (options) => fetchTracesAnalytics(tracingAnalyticsUrl, options),
fetchTrace: (traceId) => fetchTrace(tracingUrl, traceId),
fetchServices: () => fetchServices(servicesUrl),
fetchOperations: (serviceName) => fetchOperations(operationsUrl, serviceName),
diff --git a/app/assets/javascripts/pages/projects/blob/show/index.js b/app/assets/javascripts/pages/projects/blob/show/index.js
index d42fb10063e..399ea1cc257 100644
--- a/app/assets/javascripts/pages/projects/blob/show/index.js
+++ b/app/assets/javascripts/pages/projects/blob/show/index.js
@@ -85,7 +85,7 @@ if (viewBlobEl) {
router,
apolloProvider,
provide: {
- highlightWorker: gon.features.highlightJsWorker ? new HighlightWorker() : null,
+ highlightWorker: new HighlightWorker(),
targetBranch,
originalBranch,
resourceId,
diff --git a/app/assets/javascripts/repository/components/blob_content_viewer.vue b/app/assets/javascripts/repository/components/blob_content_viewer.vue
index 8cca70e07a2..8033b5f1225 100644
--- a/app/assets/javascripts/repository/components/blob_content_viewer.vue
+++ b/app/assets/javascripts/repository/components/blob_content_viewer.vue
@@ -9,7 +9,6 @@ import axios from '~/lib/utils/axios_utils';
import { isLoggedIn, handleLocationHash } from '~/lib/utils/common_utils';
import { __ } from '~/locale';
import { redirectTo, getLocationHash } from '~/lib/utils/url_utility'; // eslint-disable-line import/no-deprecated
-import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import CodeIntelligence from '~/code_navigation/components/app.vue';
import LineHighlighter from '~/blob/line_highlighter';
import blobInfoQuery from 'shared_queries/repository/blob_info.query.graphql';
@@ -33,7 +32,7 @@ export default {
CodeIntelligence,
AiGenie: () => import('ee_component/ai/components/ai_genie.vue'),
},
- mixins: [getRefMixin, glFeatureFlagMixin(), highlightMixin],
+ mixins: [getRefMixin, highlightMixin],
inject: {
originalBranch: {
default: '',
@@ -150,14 +149,7 @@ export default {
},
blobViewer() {
const { fileType } = this.viewer;
- return this.shouldLoadLegacyViewer
- ? null
- : loadViewer(
- fileType,
- this.isUsingLfs,
- this.glFeatures.highlightJsWorker,
- this.blobInfo.language,
- );
+ return this.shouldLoadLegacyViewer ? null : loadViewer(fileType, this.isUsingLfs);
},
shouldLoadLegacyViewer() {
return LEGACY_FILE_TYPES.includes(this.blobInfo.fileType) || this.useFallback;
diff --git a/app/assets/javascripts/repository/components/blob_viewers/index.js b/app/assets/javascripts/repository/components/blob_viewers/index.js
index 016f7f9fe43..96efbc26a33 100644
--- a/app/assets/javascripts/repository/components/blob_viewers/index.js
+++ b/app/assets/javascripts/repository/components/blob_viewers/index.js
@@ -1,5 +1,3 @@
-import { TEXT_FILE_TYPE } from '../../constants';
-
export const viewers = {
csv: () => import('./csv_viewer.vue'),
download: () => import('./download_viewer.vue'),
@@ -17,13 +15,9 @@ export const viewers = {
geo_json: () => import('./geo_json/geo_json_viewer.vue'),
};
-export const loadViewer = (type, isUsingLfs, hljsWorkerEnabled) => {
+export const loadViewer = (type, isUsingLfs) => {
let viewer = viewers[type];
- if (hljsWorkerEnabled && type === TEXT_FILE_TYPE) {
- viewer = () => import('~/vue_shared/components/source_viewer/source_viewer_new.vue');
- }
-
if (!viewer && isUsingLfs) {
viewer = viewers.lfs;
}
diff --git a/app/assets/javascripts/repository/index.js b/app/assets/javascripts/repository/index.js
index afe3f7b1983..ddec4039c73 100644
--- a/app/assets/javascripts/repository/index.js
+++ b/app/assets/javascripts/repository/index.js
@@ -293,7 +293,7 @@ export default function setupVueRepositoryList() {
resourceId,
userId,
explainCodeAvailable: parseBoolean(explainCodeAvailable),
- highlightWorker: gon.features.highlightJsWorker ? new HighlightWorker() : null,
+ highlightWorker: new HighlightWorker(),
},
render(h) {
return h(App);
diff --git a/app/assets/javascripts/repository/mixins/highlight_mixin.js b/app/assets/javascripts/repository/mixins/highlight_mixin.js
index 422a84dff40..1cf182e8f90 100644
--- a/app/assets/javascripts/repository/mixins/highlight_mixin.js
+++ b/app/assets/javascripts/repository/mixins/highlight_mixin.js
@@ -49,7 +49,7 @@ export default {
initHighlightWorker(blob, isUsingLfs) {
const { rawTextBlob, language, fileType, externalStorageUrl, rawPath, simpleViewer } = blob;
- if (!this.glFeatures.highlightJsWorker || simpleViewer?.fileType !== TEXT_FILE_TYPE) return;
+ if (simpleViewer?.fileType !== TEXT_FILE_TYPE) return;
if (this.isUnsupportedLanguage(language)) {
this.handleUnsupportedLanguage(language);
diff --git a/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer.vue b/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer.vue
index 4d5d877d43b..1dd001bd4f5 100644
--- a/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer.vue
+++ b/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer.vue
@@ -1,46 +1,42 @@
<script>
-import { GlLoadingIcon } from '@gitlab/ui';
-import LineHighlighter from '~/blob/line_highlighter';
-import eventHub from '~/notes/event_hub';
-import languageLoader from '~/content_editor/services/highlight_js_language_loader';
-import addBlobLinksTracking from '~/blob/blob_links_tracking';
+import { debounce } from 'lodash';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+import SafeHtml from '~/vue_shared/directives/safe_html';
import Tracking from '~/tracking';
-import axios from '~/lib/utils/axios_utils';
-import {
- EVENT_ACTION,
- EVENT_LABEL_VIEWER,
- EVENT_LABEL_FALLBACK,
- ROUGE_TO_HLJS_LANGUAGE_MAP,
- LINES_PER_CHUNK,
- LEGACY_FALLBACKS,
- CODEOWNERS_FILE_NAME,
- CODEOWNERS_LANGUAGE,
- SVELTE_LANGUAGE,
-} from './constants';
-import Chunk from './components/chunk.vue';
-import { registerPlugins } from './plugins/index';
+import addBlobLinksTracking from '~/blob/blob_links_tracking';
+import LineHighlighter from '~/blob/line_highlighter';
+import { EVENT_ACTION, EVENT_LABEL_VIEWER, CODEOWNERS_FILE_NAME } from './constants';
+import Chunk from './components/chunk_new.vue';
+import Blame from './components/blame_info.vue';
+import { calculateBlameOffset, shouldRender, toggleBlameClasses } from './utils';
+import blameDataQuery from './queries/blame_data.query.graphql';
-/*
- * This component is optimized to handle source code with many lines of code by splitting source code into chunks of 70 lines of code,
- * we highlight and display the 1st chunk (L1-70) to the user as quickly as possible.
- *
- * The rest of the lines (L71+) is rendered once the browser goes into an idle state (requestIdleCallback).
- * Each chunk is self-contained, this ensures when for example the width of a container on line 1000 changes,
- * it does not trigger a repaint on a parent element that wraps all 1000 lines.
- */
export default {
name: 'SourceViewer',
components: {
- GlLoadingIcon,
Chunk,
+ Blame,
CodeownersValidation: () => import('ee_component/blob/components/codeowners_validation.vue'),
},
+ directives: {
+ SafeHtml,
+ },
mixins: [Tracking.mixin()],
props: {
blob: {
type: Object,
required: true,
},
+ chunks: {
+ type: Array,
+ required: false,
+ default: () => [],
+ },
+ showBlame: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
projectPath: {
type: String,
required: true,
@@ -52,249 +48,123 @@ export default {
},
data() {
return {
- languageDefinition: null,
- content: this.blob.rawTextBlob,
- hljs: null,
- firstChunk: null,
- chunks: {},
- isLoading: true,
- lineHighlighter: null,
+ lineHighlighter: new LineHighlighter(),
+ blameData: [],
+ renderedChunks: [],
};
},
computed: {
- isLfsBlob() {
- const { storedExternally, externalStorage, simpleViewer } = this.blob;
-
- return storedExternally && externalStorage === 'lfs' && simpleViewer?.fileType === 'text';
- },
- splitContent() {
- return this.content.split(/\r?\n/);
- },
- language() {
- if (this.blob.name && this.blob.name.endsWith(`.${SVELTE_LANGUAGE}`)) {
- // override for svelte files until https://github.com/rouge-ruby/rouge/issues/1717 is resolved
- return SVELTE_LANGUAGE;
- }
- if (this.isCodeownersFile) {
- // override for codeowners files
- return this.$options.codeownersLanguage;
- }
-
- return ROUGE_TO_HLJS_LANGUAGE_MAP[this.blob.language?.toLowerCase()];
- },
- lineNumbers() {
- return this.splitContent.length;
- },
- unsupportedLanguage() {
- const supportedLanguages = Object.keys(languageLoader);
- const unsupportedLanguage =
- !supportedLanguages.includes(this.language) &&
- !supportedLanguages.includes(this.blob.language?.toLowerCase());
+ blameInfo() {
+ return this.blameData.reduce((result, blame, index) => {
+ if (shouldRender(this.blameData, index)) {
+ result.push({
+ ...blame,
+ blameOffset: calculateBlameOffset(blame.lineno, index),
+ });
+ }
- return LEGACY_FALLBACKS.includes(this.language) || unsupportedLanguage;
- },
- totalChunks() {
- return Object.keys(this.chunks).length;
+ return result;
+ }, []);
},
isCodeownersFile() {
return this.blob.name === CODEOWNERS_FILE_NAME;
},
},
- async created() {
- if (this.isLfsBlob) {
- await axios
- .get(this.blob.externalStorageUrl || this.blob.rawPath)
- .then((result) => {
- this.content = result.data;
- })
- .catch(() => this.$emit('error'));
- }
-
+ watch: {
+ showBlame: {
+ handler(shouldShow) {
+ toggleBlameClasses(this.blameData, shouldShow);
+ this.requestBlameInfo(this.renderedChunks[0]);
+ },
+ immediate: true,
+ },
+ blameData: {
+ handler(blameData) {
+ if (!this.showBlame) return;
+ toggleBlameClasses(blameData, true);
+ },
+ immediate: true,
+ },
+ },
+ created() {
+ this.handleAppear = debounce(this.handleChunkAppear, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+ this.track(EVENT_ACTION, { label: EVENT_LABEL_VIEWER, property: this.blob.language });
addBlobLinksTracking();
- this.trackEvent(EVENT_LABEL_VIEWER);
-
- if (this.unsupportedLanguage) {
- this.handleUnsupportedLanguage();
- return;
- }
-
- this.generateFirstChunk();
- this.hljs = await this.loadHighlightJS();
-
- if (this.language) {
- this.languageDefinition = await this.loadLanguage();
- }
-
- // Highlight the first chunk as soon as highlight.js is available
- this.highlightChunk(null, true);
-
- window.requestIdleCallback(async () => {
- // Generate the remaining chunks once the browser idles to ensure the browser resources are spent on the most important things first
- this.generateRemainingChunks();
- this.isLoading = false;
- await this.$nextTick();
- this.selectLine();
- });
+ },
+ mounted() {
+ this.selectLine();
},
methods: {
- trackEvent(label) {
- this.track(EVENT_ACTION, { label, property: this.blob.language });
- },
- handleUnsupportedLanguage() {
- this.trackEvent(EVENT_LABEL_FALLBACK);
- this.$emit('error');
- },
- generateFirstChunk() {
- const lines = this.splitContent.splice(0, LINES_PER_CHUNK);
- this.firstChunk = this.createChunk(lines);
- },
- generateRemainingChunks() {
- const result = {};
- for (let i = 0; i < this.splitContent.length; i += LINES_PER_CHUNK) {
- const chunkIndex = Math.floor(i / LINES_PER_CHUNK);
- const lines = this.splitContent.slice(i, i + LINES_PER_CHUNK);
- result[chunkIndex] = this.createChunk(lines, i + LINES_PER_CHUNK);
- }
-
- this.chunks = result;
- },
- createChunk(lines, startingFrom = 0) {
- return {
- content: lines.join('\n'),
- startingFrom,
- totalLines: lines.length,
- language: this.language,
- isHighlighted: false,
- };
- },
- highlightChunk(index, isFirstChunk) {
- const chunk = isFirstChunk ? this.firstChunk : this.chunks[index];
-
- if (chunk.isHighlighted) {
- return;
- }
-
- const { highlightedContent, language } = this.highlight(chunk.content, this.language);
-
- Object.assign(chunk, { language, content: highlightedContent, isHighlighted: true });
-
- this.selectLine();
-
- this.$nextTick(() => eventHub.$emit('showBlobInteractionZones', this.blob.path));
- },
- highlight(content, language) {
- let detectedLanguage = language;
- let highlightedContent;
- if (this.hljs) {
- registerPlugins(this.hljs, this.blob.fileType, this.content);
- if (!detectedLanguage) {
- const hljsHighlightAuto = this.hljs.highlightAuto(content);
- highlightedContent = hljsHighlightAuto.value;
- detectedLanguage = hljsHighlightAuto.language;
- } else if (this.languageDefinition) {
- highlightedContent = this.hljs.highlight(content, { language: this.language }).value;
+ async handleChunkAppear(chunkIndex, handleOverlappingChunk = true) {
+ if (!this.renderedChunks.includes(chunkIndex)) {
+ this.renderedChunks.push(chunkIndex);
+ await this.requestBlameInfo(chunkIndex);
+
+ if (chunkIndex > 0 && handleOverlappingChunk) {
+ // request the blame information for overlapping chunk incase it is visible in the DOM
+ this.handleChunkAppear(chunkIndex - 1, false);
}
}
-
- return { highlightedContent, language: detectedLanguage };
},
- loadHighlightJS() {
- // If no language can be mapped to highlight.js we load all common languages else we load only the core (smallest footprint)
- return !this.language ? import('highlight.js/lib/common') : import('highlight.js/lib/core');
- },
- async loadSubLanguages(languageDefinition) {
- if (!languageDefinition?.contains) return;
-
- // generate list of languages to load
- const languages = new Set(
- languageDefinition.contains
- .filter((component) => Boolean(component.subLanguage))
- .map((component) => component.subLanguage),
- );
-
- if (languageDefinition.subLanguage) {
- languages.add(languageDefinition.subLanguage);
- }
-
- // load all sub-languages at once
- await Promise.all(
- [...languages].map(async (subLanguage) => {
- const subLanguageDefinition = await languageLoader[subLanguage]();
- this.hljs.registerLanguage(subLanguage, subLanguageDefinition.default);
- }),
- );
- },
- async loadLanguage() {
- let languageDefinition;
-
- try {
- languageDefinition = await languageLoader[this.language]();
- this.hljs.registerLanguage(this.language, languageDefinition.default);
-
- await this.loadSubLanguages(this.hljs.getLanguage(this.language));
- } catch (message) {
- this.$emit('error', message);
- }
-
- return languageDefinition;
+ async requestBlameInfo(chunkIndex) {
+ const chunk = this.chunks[chunkIndex];
+ if (!this.showBlame || !chunk) return;
+
+ const { data } = await this.$apollo.query({
+ query: blameDataQuery,
+ variables: {
+ ref: this.currentRef,
+ fullPath: this.projectPath,
+ filePath: this.blob.path,
+ fromLine: chunk.startingFrom + 1,
+ toLine: chunk.startingFrom + chunk.totalLines,
+ },
+ });
+
+ const blob = data?.project?.repository?.blobs?.nodes[0];
+ const blameGroups = blob?.blame?.groups;
+ const isDuplicate = this.blameData.includes(blameGroups[0]);
+ if (blameGroups && !isDuplicate) this.blameData.push(...blameGroups);
},
async selectLine() {
- if (!this.lineHighlighter) {
- this.lineHighlighter = new LineHighlighter({ scrollBehavior: 'auto' });
- }
await this.$nextTick();
- const scrollEnabled = false;
- this.lineHighlighter.highlightHash(this.$route.hash, scrollEnabled);
+ this.lineHighlighter.highlightHash(this.$route.hash);
},
},
userColorScheme: window.gon.user_color_scheme,
- currentlySelectedLine: null,
- codeownersLanguage: CODEOWNERS_LANGUAGE,
};
</script>
-<template>
- <div
- class="file-content code js-syntax-highlight blob-content gl-display-flex gl-flex-direction-column gl-overflow-auto"
- :class="$options.userColorScheme"
- data-type="simple"
- :data-path="blob.path"
- data-testid="blob-viewer-file-content"
- >
- <codeowners-validation
- v-if="isCodeownersFile"
- class="gl-text-black-normal"
- :current-ref="currentRef"
- :project-path="projectPath"
- :file-path="blob.path"
- />
- <chunk
- v-if="firstChunk"
- :lines="firstChunk.lines"
- :total-lines="firstChunk.totalLines"
- :content="firstChunk.content"
- :starting-from="firstChunk.startingFrom"
- :is-highlighted="firstChunk.isHighlighted"
- is-first-chunk
- :language="firstChunk.language"
- :blame-path="blob.blamePath"
- />
- <gl-loading-icon v-if="isLoading" size="sm" class="gl-my-5" />
- <template v-else>
+<template>
+ <div class="gl-display-flex">
+ <blame v-if="showBlame && blameInfo.length" :blame-info="blameInfo" />
+
+ <div
+ class="file-content code js-syntax-highlight blob-content gl-display-flex gl-flex-direction-column gl-overflow-auto gl-w-full blob-viewer"
+ :class="$options.userColorScheme"
+ data-type="simple"
+ :data-path="blob.path"
+ data-testid="blob-viewer-file-content"
+ >
+ <codeowners-validation
+ v-if="isCodeownersFile"
+ class="gl-text-black-normal"
+ :current-ref="currentRef"
+ :project-path="projectPath"
+ :file-path="blob.path"
+ />
<chunk
- v-for="(chunk, key, index) in chunks"
- :key="key"
- :lines="chunk.lines"
- :content="chunk.content"
+ v-for="(chunk, index) in chunks"
+ :key="index"
+ :chunk-index="index"
+ :is-highlighted="Boolean(chunk.isHighlighted)"
+ :raw-content="chunk.rawContent"
+ :highlighted-content="chunk.highlightedContent"
:total-lines="chunk.totalLines"
:starting-from="chunk.startingFrom"
- :is-highlighted="chunk.isHighlighted"
- :chunk-index="index"
- :language="chunk.language"
:blame-path="blob.blamePath"
- :total-chunks="totalChunks"
- @appear="highlightChunk"
+ @appear="() => handleAppear(index)"
/>
- </template>
+ </div>
</div>
</template>
diff --git a/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue b/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue
deleted file mode 100644
index e62f38d9ca3..00000000000
--- a/app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_new.vue
+++ /dev/null
@@ -1,175 +0,0 @@
-<script>
-import { debounce } from 'lodash';
-import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
-import SafeHtml from '~/vue_shared/directives/safe_html';
-import Tracking from '~/tracking';
-import addBlobLinksTracking from '~/blob/blob_links_tracking';
-import LineHighlighter from '~/blob/line_highlighter';
-import { EVENT_ACTION, EVENT_LABEL_VIEWER, CODEOWNERS_FILE_NAME } from './constants';
-import Chunk from './components/chunk_new.vue';
-import Blame from './components/blame_info.vue';
-import { calculateBlameOffset, shouldRender, toggleBlameClasses } from './utils';
-import blameDataQuery from './queries/blame_data.query.graphql';
-
-/*
- * Note, this is a new experimental version of the SourceViewer, it is not ready for production use.
- * See the following issue for more details: https://gitlab.com/gitlab-org/gitlab/-/issues/391586
- */
-
-export default {
- name: 'SourceViewerNew',
- components: {
- Chunk,
- Blame,
- CodeownersValidation: () => import('ee_component/blob/components/codeowners_validation.vue'),
- },
- directives: {
- SafeHtml,
- },
- mixins: [Tracking.mixin()],
- props: {
- blob: {
- type: Object,
- required: true,
- },
- chunks: {
- type: Array,
- required: false,
- default: () => [],
- },
- showBlame: {
- type: Boolean,
- required: false,
- default: false,
- },
- projectPath: {
- type: String,
- required: true,
- },
- currentRef: {
- type: String,
- required: true,
- },
- },
- data() {
- return {
- lineHighlighter: new LineHighlighter(),
- blameData: [],
- renderedChunks: [],
- };
- },
- computed: {
- blameInfo() {
- return this.blameData.reduce((result, blame, index) => {
- if (shouldRender(this.blameData, index)) {
- result.push({
- ...blame,
- blameOffset: calculateBlameOffset(blame.lineno, index),
- });
- }
-
- return result;
- }, []);
- },
- isCodeownersFile() {
- return this.blob.name === CODEOWNERS_FILE_NAME;
- },
- },
- watch: {
- showBlame: {
- handler(shouldShow) {
- toggleBlameClasses(this.blameData, shouldShow);
- this.requestBlameInfo(this.renderedChunks[0]);
- },
- immediate: true,
- },
- blameData: {
- handler(blameData) {
- if (!this.showBlame) return;
- toggleBlameClasses(blameData, true);
- },
- immediate: true,
- },
- },
- created() {
- this.handleAppear = debounce(this.handleChunkAppear, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
- this.track(EVENT_ACTION, { label: EVENT_LABEL_VIEWER, property: this.blob.language });
- addBlobLinksTracking();
- },
- mounted() {
- this.selectLine();
- },
- methods: {
- async handleChunkAppear(chunkIndex, handleOverlappingChunk = true) {
- if (!this.renderedChunks.includes(chunkIndex)) {
- this.renderedChunks.push(chunkIndex);
- await this.requestBlameInfo(chunkIndex);
-
- if (chunkIndex > 0 && handleOverlappingChunk) {
- // request the blame information for overlapping chunk incase it is visible in the DOM
- this.handleChunkAppear(chunkIndex - 1, false);
- }
- }
- },
- async requestBlameInfo(chunkIndex) {
- const chunk = this.chunks[chunkIndex];
- if (!this.showBlame || !chunk) return;
-
- const { data } = await this.$apollo.query({
- query: blameDataQuery,
- variables: {
- ref: this.currentRef,
- fullPath: this.projectPath,
- filePath: this.blob.path,
- fromLine: chunk.startingFrom + 1,
- toLine: chunk.startingFrom + chunk.totalLines,
- },
- });
-
- const blob = data?.project?.repository?.blobs?.nodes[0];
- const blameGroups = blob?.blame?.groups;
- const isDuplicate = this.blameData.includes(blameGroups[0]);
- if (blameGroups && !isDuplicate) this.blameData.push(...blameGroups);
- },
- async selectLine() {
- await this.$nextTick();
- this.lineHighlighter.highlightHash(this.$route.hash);
- },
- },
- userColorScheme: window.gon.user_color_scheme,
-};
-</script>
-
-<template>
- <div class="gl-display-flex">
- <blame v-if="showBlame && blameInfo.length" :blame-info="blameInfo" />
-
- <div
- class="file-content code js-syntax-highlight blob-content gl-display-flex gl-flex-direction-column gl-overflow-auto gl-w-full blob-viewer"
- :class="$options.userColorScheme"
- data-type="simple"
- :data-path="blob.path"
- data-testid="blob-viewer-file-content"
- >
- <codeowners-validation
- v-if="isCodeownersFile"
- class="gl-text-black-normal"
- :current-ref="currentRef"
- :project-path="projectPath"
- :file-path="blob.path"
- />
- <chunk
- v-for="(chunk, index) in chunks"
- :key="index"
- :chunk-index="index"
- :is-highlighted="Boolean(chunk.isHighlighted)"
- :raw-content="chunk.rawContent"
- :highlighted-content="chunk.highlightedContent"
- :total-lines="chunk.totalLines"
- :starting-from="chunk.startingFrom"
- :blame-path="blob.blamePath"
- @appear="() => handleAppear(index)"
- />
- </div>
- </div>
-</template>
diff --git a/app/assets/stylesheets/page_bundles/login.scss b/app/assets/stylesheets/page_bundles/login.scss
index f46d80e2525..6444df66849 100644
--- a/app/assets/stylesheets/page_bundles/login.scss
+++ b/app/assets/stylesheets/page_bundles/login.scss
@@ -6,12 +6,6 @@
max-width: 960px;
}
- .flash-container {
- margin-bottom: $gl-padding;
- position: relative;
- top: 8px;
- }
-
.borderless {
.login-box {
box-shadow: none;
diff --git a/app/controllers/groups/runners_controller.rb b/app/controllers/groups/runners_controller.rb
index 3600a0fbed5..cb6f837b8e3 100644
--- a/app/controllers/groups/runners_controller.rb
+++ b/app/controllers/groups/runners_controller.rb
@@ -6,6 +6,10 @@ class Groups::RunnersController < Groups::ApplicationController
before_action :authorize_update_runner!, only: [:edit, :update, :destroy, :pause, :resume]
before_action :runner, only: [:edit, :update, :destroy, :pause, :resume, :show, :register]
+ before_action do
+ push_frontend_feature_flag(:gcp_runner, @project, type: :wip)
+ end
+
feature_category :runner
urgency :low
diff --git a/app/controllers/projects/blob_controller.rb b/app/controllers/projects/blob_controller.rb
index 558aac7b1ef..b0eabe92f39 100644
--- a/app/controllers/projects/blob_controller.rb
+++ b/app/controllers/projects/blob_controller.rb
@@ -49,7 +49,6 @@ class Projects::BlobController < Projects::ApplicationController
urgency :low, [:create, :show, :edit, :update, :diff]
before_action do
- push_frontend_feature_flag(:highlight_js_worker, @project)
push_frontend_feature_flag(:explain_code_chat, current_user)
push_frontend_feature_flag(:encoding_logs_tree)
push_licensed_feature(:file_locks) if @project.licensed_feature_available?(:file_locks)
diff --git a/app/controllers/projects/deployments_controller.rb b/app/controllers/projects/deployments_controller.rb
index bebade1b21b..07aeb49279d 100644
--- a/app/controllers/projects/deployments_controller.rb
+++ b/app/controllers/projects/deployments_controller.rb
@@ -16,6 +16,12 @@ class Projects::DeploymentsController < Projects::ApplicationController
end
# rubocop: enable CodeReuse/ActiveRecord
+ def show
+ return render_404 unless Feature.enabled?(:deployment_details_page, project, type: :wip)
+
+ @deployment = environment.deployments.find_by_iid!(params[:id])
+ end
+
def metrics
return render_404 unless deployment_metrics.has_metrics?
diff --git a/app/controllers/projects/runners_controller.rb b/app/controllers/projects/runners_controller.rb
index db19ca23e9f..01a2d7f04dc 100644
--- a/app/controllers/projects/runners_controller.rb
+++ b/app/controllers/projects/runners_controller.rb
@@ -5,6 +5,10 @@ class Projects::RunnersController < Projects::ApplicationController
before_action :authorize_create_runner!, only: [:new, :register]
before_action :runner, only: [:edit, :update, :destroy, :pause, :resume, :show, :register]
+ before_action do
+ push_frontend_feature_flag(:gcp_runner, @project, type: :wip)
+ end
+
feature_category :runner
urgency :low
diff --git a/app/controllers/projects/tree_controller.rb b/app/controllers/projects/tree_controller.rb
index e98a5fc07d3..9b9dbc507e1 100644
--- a/app/controllers/projects/tree_controller.rb
+++ b/app/controllers/projects/tree_controller.rb
@@ -19,7 +19,6 @@ class Projects::TreeController < Projects::ApplicationController
before_action :authorize_edit_tree!, only: [:create_dir]
before_action do
- push_frontend_feature_flag(:highlight_js_worker, @project)
push_frontend_feature_flag(:explain_code_chat, current_user)
push_frontend_feature_flag(:encoding_logs_tree)
push_licensed_feature(:file_locks) if @project.licensed_feature_available?(:file_locks)
diff --git a/app/controllers/projects_controller.rb b/app/controllers/projects_controller.rb
index 23c5e2ad28f..679be9323d4 100644
--- a/app/controllers/projects_controller.rb
+++ b/app/controllers/projects_controller.rb
@@ -38,7 +38,6 @@ class ProjectsController < Projects::ApplicationController
before_action :check_export_rate_limit!, only: [:export, :download_export, :generate_new_export]
before_action do
- push_frontend_feature_flag(:highlight_js_worker, @project)
push_frontend_feature_flag(:remove_monitor_metrics, @project)
push_frontend_feature_flag(:explain_code_chat, current_user)
push_frontend_feature_flag(:issue_email_participants, @project)
diff --git a/app/models/packages/protection/rule.rb b/app/models/packages/protection/rule.rb
index f13bcc6e32e..ff45db40cad 100644
--- a/app/models/packages/protection/rule.rb
+++ b/app/models/packages/protection/rule.rb
@@ -23,14 +23,17 @@ module Packages
before_save :set_package_name_pattern_ilike_query, if: :package_name_pattern_changed?
- scope :for_package_name, ->(package_name) {
+ scope :for_package_name, ->(package_name) do
return none if package_name.blank?
- where(':package_name ILIKE package_name_pattern_ilike_query', package_name: package_name)
- }
+ where(
+ ":package_name ILIKE #{::Gitlab::SQL::Glob.to_like('package_name_pattern')}",
+ package_name: package_name
+ )
+ end
- def self.push_protected_from?(access_level:, package_name:, package_type:)
- return true if [access_level, package_name, package_type].any?(&:blank?)
+ def self.for_push_exists?(access_level:, package_name:, package_type:)
+ return false if [access_level, package_name, package_type].any?(&:blank?)
where(package_type: package_type, push_protected_up_to_access_level: access_level..)
.for_package_name(package_name)
diff --git a/app/services/packages/npm/create_package_service.rb b/app/services/packages/npm/create_package_service.rb
index a27f059036c..b1970053745 100644
--- a/app/services/packages/npm/create_package_service.rb
+++ b/app/services/packages/npm/create_package_service.rb
@@ -72,7 +72,7 @@ module Packages
return false if Feature.disabled?(:packages_protected_packages, project)
user_project_authorization_access_level = current_user.max_member_access_for_project(project.id)
- project.package_protection_rules.push_protected_from?(access_level: user_project_authorization_access_level, package_name: name, package_type: :npm)
+ project.package_protection_rules.for_push_exists?(access_level: user_project_authorization_access_level, package_name: name, package_type: :npm)
end
def name
diff --git a/app/views/layouts/devise.html.haml b/app/views/layouts/devise.html.haml
index 2905ba924ca..0ae2e5337f5 100644
--- a/app/views/layouts/devise.html.haml
+++ b/app/views/layouts/devise.html.haml
@@ -12,7 +12,7 @@
.content
= render "layouts/flash"
- if custom_text.present?
- .row
+ .row.gl-mt-5.gl-row-gap-5
.col-md.order-12.sm-bg-gray
.col-sm-12
%h1.mb-3.gl-font-size-h2
@@ -24,12 +24,11 @@
= brand_image
= yield
- else
- .mt-3
+ .gl-my-5
.col-sm-12.gl-text-center
= brand_image
%h1.mb-3.gl-font-size-h2
= brand_title
- .mb-3
.gl-w-full.gl-sm-w-half.gl-ml-auto.gl-mr-auto.bar
= yield
diff --git a/app/views/projects/deployments/show.html.haml b/app/views/projects/deployments/show.html.haml
new file mode 100644
index 00000000000..b0ea762b000
--- /dev/null
+++ b/app/views/projects/deployments/show.html.haml
@@ -0,0 +1,4 @@
+- add_to_breadcrumbs _("Environments"), project_environments_path(@project)
+- add_to_breadcrumbs @environment.name, project_environment_path(@project, @environment)
+- breadcrumb_title _("Deployment #%{iid}") % { iid: @deployment.iid }
+- page_title _("Deployment #%{iid}") % { iid: @deployment.iid }
diff --git a/config/feature_flags/development/highlight_js_worker.yml b/config/feature_flags/development/highlight_js_worker.yml
deleted file mode 100644
index 7086ace38e6..00000000000
--- a/config/feature_flags/development/highlight_js_worker.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: highlight_js_worker
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124276
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/415755
-milestone: '16.2'
-type: development
-group: group::source code
-default_enabled: true
diff --git a/config/feature_flags/gitlab_com_derisk/ci_text_interpolation.yml b/config/feature_flags/gitlab_com_derisk/ci_text_interpolation.yml
new file mode 100644
index 00000000000..32dd15e16b0
--- /dev/null
+++ b/config/feature_flags/gitlab_com_derisk/ci_text_interpolation.yml
@@ -0,0 +1,9 @@
+---
+name: ci_text_interpolation
+feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/433002
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142009
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/435177
+milestone: '16.9'
+group: group::pipeline authoring
+type: gitlab_com_derisk
+default_enabled: false
diff --git a/config/feature_flags/gitlab_com_derisk/external_pipeline_validation_migration.yml b/config/feature_flags/gitlab_com_derisk/external_pipeline_validation_migration.yml
new file mode 100644
index 00000000000..0efc2e1c160
--- /dev/null
+++ b/config/feature_flags/gitlab_com_derisk/external_pipeline_validation_migration.yml
@@ -0,0 +1,9 @@
+---
+name: external_pipeline_validation_migration
+feature_issue_url: https://gitlab.com/gitlab-com/gl-infra/platform/runway/team/-/issues/135
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142077
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/438627
+milestone: '16.9'
+group: group::scalability
+type: gitlab_com_derisk
+default_enabled: false
diff --git a/config/feature_flags/ops/log_large_in_list_queries.yml b/config/feature_flags/ops/log_large_in_list_queries.yml
new file mode 100644
index 00000000000..c4e500e7192
--- /dev/null
+++ b/config/feature_flags/ops/log_large_in_list_queries.yml
@@ -0,0 +1,9 @@
+---
+name: log_large_in_list_queries
+feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/434581
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141150
+rollout_issue_url: https://gitlab.com/gitlab-com/gl-infra/production/-/issues/17359
+milestone: '16.9'
+group: group::database
+type: ops
+default_enabled: false
diff --git a/config/feature_flags/wip/deployment_details_page.yml b/config/feature_flags/wip/deployment_details_page.yml
new file mode 100644
index 00000000000..ac6fc050ff7
--- /dev/null
+++ b/config/feature_flags/wip/deployment_details_page.yml
@@ -0,0 +1,8 @@
+---
+name: deployment_details_page
+introduced_by_url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141808'
+rollout_issue_url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/438327'
+milestone: '16.9'
+type: wip
+group: group::environments
+default_enabled: false
diff --git a/config/feature_flags/wip/gcp_runner.yml b/config/feature_flags/wip/gcp_runner.yml
new file mode 100644
index 00000000000..e98b2508b1e
--- /dev/null
+++ b/config/feature_flags/wip/gcp_runner.yml
@@ -0,0 +1,9 @@
+---
+name: gcp_runner
+feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/437901
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141521
+rollout_issue_url:
+milestone: '16.9'
+group: group::runner
+type: wip
+default_enabled: false
diff --git a/config/initializers/database_query_analyzers.rb b/config/initializers/database_query_analyzers.rb
index 9facd822e5c..d5910d13e0a 100644
--- a/config/initializers/database_query_analyzers.rb
+++ b/config/initializers/database_query_analyzers.rb
@@ -8,6 +8,7 @@ Gitlab::Database::QueryAnalyzer.instance.tap do |query_analyzer|
analyzers.append(::Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics)
analyzers.append(::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification)
analyzers.append(::Gitlab::Database::QueryAnalyzers::Ci::PartitioningRoutingAnalyzer)
+ analyzers.append(::Gitlab::Database::QueryAnalyzers::LogLargeInLists)
if Gitlab.dev_or_test_env?
analyzers.append(::Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection)
diff --git a/config/routes/project.rb b/config/routes/project.rb
index 6c77bf64755..23733bbe821 100644
--- a/config/routes/project.rb
+++ b/config/routes/project.rb
@@ -334,7 +334,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
get :search
end
- resources :deployments, only: [:index] do
+ resources :deployments, only: [:index, :show] do
member do
get :metrics
get :additional_metrics
diff --git a/db/post_migrate/20240123155252_remove_project_import_level_from_namespace_settings.rb b/db/post_migrate/20240123155252_remove_project_import_level_from_namespace_settings.rb
new file mode 100644
index 00000000000..bc7c362a94c
--- /dev/null
+++ b/db/post_migrate/20240123155252_remove_project_import_level_from_namespace_settings.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+class RemoveProjectImportLevelFromNamespaceSettings < Gitlab::Database::Migration[2.2]
+ milestone '16.8'
+ enable_lock_retries!
+
+ def change
+ remove_column :namespace_settings, :project_import_level, :smallint, default: 50, null: false
+ end
+end
diff --git a/db/schema_migrations/20240123155252 b/db/schema_migrations/20240123155252
new file mode 100644
index 00000000000..869d077b22a
--- /dev/null
+++ b/db/schema_migrations/20240123155252
@@ -0,0 +1 @@
+1383d7d49981bdb5fa5eeb54dd83520f4bd2de1e8f0cc227c1397d0bc48a66f4 \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 7631fecfe7a..aad301f401c 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -19982,7 +19982,6 @@ CREATE TABLE namespace_settings (
enabled_git_access_protocol smallint DEFAULT 0 NOT NULL,
unique_project_download_limit smallint DEFAULT 0 NOT NULL,
unique_project_download_limit_interval_in_seconds integer DEFAULT 0 NOT NULL,
- project_import_level smallint DEFAULT 50 NOT NULL,
unique_project_download_limit_allowlist text[] DEFAULT '{}'::text[] NOT NULL,
auto_ban_user_on_excessive_projects_download boolean DEFAULT false NOT NULL,
only_allow_merge_if_pipeline_succeeds boolean DEFAULT false NOT NULL,
diff --git a/doc/administration/geo/replication/multiple_servers.md b/doc/administration/geo/replication/multiple_servers.md
index 850ab9f0707..9298281561f 100644
--- a/doc/administration/geo/replication/multiple_servers.md
+++ b/doc/administration/geo/replication/multiple_servers.md
@@ -61,6 +61,9 @@ The following steps enable a GitLab site to serve as the Geo **primary** site.
### Step 1: Configure the **primary** frontend nodes
+NOTE:
+Avoid using [`geo_primary_role`](https://docs.gitlab.com/omnibus/roles/#gitlab-geo-roles) because it is intended for a single-node site.
+
1. Edit `/etc/gitlab/gitlab.rb` and add the following:
```ruby
@@ -126,14 +129,7 @@ NOTE:
[NFS](../../nfs.md) can be used in place of Gitaly but is not
recommended.
-### Step 2: Configure PostgreSQL streaming replication
-
-Follow the [Geo database replication instructions](../setup/database.md).
-
-If using an external PostgreSQL instance, refer also to
-[Geo with external PostgreSQL instances](../setup/external_database.md).
-
-### Step 3: Configure the Geo tracking database on the Geo **secondary** site
+### Step 2: Configure the Geo tracking database on the Geo **secondary** site
If you want to run the Geo tracking database in a multi-node PostgreSQL cluster,
then follow [Configuring Patroni cluster for the tracking PostgreSQL database](../setup/database.md#configuring-patroni-cluster-for-the-tracking-postgresql-database).
@@ -183,8 +179,20 @@ After making these changes, [reconfigure GitLab](../../restart_gitlab.md#reconfi
If using an external PostgreSQL instance, refer also to
[Geo with external PostgreSQL instances](../setup/external_database.md).
+### Step 3: Configure PostgreSQL streaming replication
+
+Follow the [Geo database replication instructions](../setup/database.md).
+
+If using an external PostgreSQL instance, refer also to
+[Geo with external PostgreSQL instances](../setup/external_database.md).
+
+After streaming replication is enabled in the secondary Geo site's read-replica database, then commands such as `gitlab-rake db:migrate:status:geo` will fail, until [configuration of the secondary site is complete](#step-7-copy-secrets-and-add-the-secondary-site-in-the-application), specifically [Geo configuration - Step 3. Add the secondary site](configuration.md#step-3-add-the-secondary-site).
+
### Step 4: Configure the frontend application nodes on the Geo **secondary** site
+NOTE:
+Avoid using [`geo_secondary_role`](https://docs.gitlab.com/omnibus/roles/#gitlab-geo-roles) because it is intended for a single-node site.
+
In the minimal [architecture diagram](#architecture-overview) above, there are two
machines running the GitLab application services. These services are enabled
selectively in the configuration.
@@ -364,3 +372,7 @@ application nodes above, with some changes to run only the `sidekiq` service:
`sidekiq['enable'] = false`.
These nodes do not need to be attached to the load balancer.
+
+### Step 7: Copy secrets and add the secondary site in the application
+
+1. [Configure GitLab](configuration.md) to set the **primary** and **secondary** sites.
diff --git a/doc/administration/geo/replication/troubleshooting.md b/doc/administration/geo/replication/troubleshooting.md
index 6e60ae34a22..39fa04103db 100644
--- a/doc/administration/geo/replication/troubleshooting.md
+++ b/doc/administration/geo/replication/troubleshooting.md
@@ -337,7 +337,7 @@ sudo gitlab-rake gitlab:geo:check
```
- If you are running the secondary site on a single node for all services, then follow [Geo database replication - Configure the secondary server](../setup/database.md#step-2-configure-the-secondary-server).
- - If you are running the secondary site's tracking database on its own node, then follow [Geo for multiple servers - Configure the Geo tracking database on the Geo secondary site](multiple_servers.md#step-3-configure-the-geo-tracking-database-on-the-geo-secondary-site)
+ - If you are running the secondary site's tracking database on its own node, then follow [Geo for multiple servers - Configure the Geo tracking database on the Geo secondary site](multiple_servers.md#step-2-configure-the-geo-tracking-database-on-the-geo-secondary-site)
- If you are running the secondary site's tracking database in a Patroni cluster, then follow [Geo database replication - Configure the tracking database on the secondary sites](../setup/database.md#step-3-configure-the-tracking-database-on-the-secondary-sites)
- If you are running the secondary site's tracking database in an external database, then follow [Geo with external PostgreSQL instances](../setup/external_database.md#configure-the-tracking-database)
- If the Geo check task was run on a node which is not running a service which runs the GitLab Rails app (Puma, Sidekiq, or Geo Log Cursor), then this error can be ignored. The node does not need Rails to be configured.
diff --git a/doc/administration/geo/setup/database.md b/doc/administration/geo/setup/database.md
index b9c2a69eaf7..a9238a4142a 100644
--- a/doc/administration/geo/setup/database.md
+++ b/doc/administration/geo/setup/database.md
@@ -950,7 +950,7 @@ The Linux package automatically configures a tracking database when `roles(['geo
If you want to run this database in a highly available configuration, don't use the `geo_secondary_role` above.
Instead, follow the instructions below.
-If you want to run the Geo tracking database on a single node, see [Configure the Geo tracking database on the Geo secondary site](../replication/multiple_servers.md#step-3-configure-the-geo-tracking-database-on-the-geo-secondary-site).
+If you want to run the Geo tracking database on a single node, see [Configure the Geo tracking database on the Geo secondary site](../replication/multiple_servers.md#step-2-configure-the-geo-tracking-database-on-the-geo-secondary-site).
A production-ready and secure setup for the tracking PostgreSQL DB requires at least three Consul nodes: two
Patroni nodes, and one PgBouncer node on the secondary site.
diff --git a/doc/development/database/index.md b/doc/development/database/index.md
index c5969176d72..d014164c3ba 100644
--- a/doc/development/database/index.md
+++ b/doc/development/database/index.md
@@ -34,7 +34,7 @@ info: Any user with at least the Maintainer role can merge updates to this conte
- [Different types of migrations](../migration_style_guide.md#choose-an-appropriate-migration-type)
- [Migrations for multiple databases](migrations_for_multiple_databases.md)
- [Migrations style guide](../migration_style_guide.md) for creating safe SQL migrations
-- [Partitioning tables](table_partitioning.md)
+- [Partitioning tables](partitioning/index.md)
- [Post-deployment migrations guidelines](post_deployment_migrations.md) and [how to create one](post_deployment_migrations.md#creating-migrations)
- [Running database migrations](database_debugging.md#migration-wrangling)
- [SQL guidelines](../sql.md) for working with SQL queries
@@ -42,6 +42,14 @@ info: Any user with at least the Maintainer role can merge updates to this conte
- [Testing Rails migrations](../testing_guide/testing_migrations_guide.md) guide
- [When and how to write Rails migrations tests](../testing_guide/testing_migrations_guide.md)
+## Partitioning tables
+
+- [Overview](partitioning/index.md)
+- [Date range](partitioning/date_range.md)
+- [Hash](partitioning/hash.md)
+- [Int range](partitioning/int_range.md)
+- [List](partitioning/list.md)
+
## Debugging
- [Accessing the database](database_debugging.md#manually-access-the-database)
diff --git a/doc/development/database/partitioning/int_range.md b/doc/development/database/partitioning/int_range.md
index 7fbdd4da865..7556d40b46e 100644
--- a/doc/development/database/partitioning/int_range.md
+++ b/doc/development/database/partitioning/int_range.md
@@ -10,7 +10,7 @@ info: Any user with at least the Maintainer role can merge updates to this conte
## Description
-Int range partition is a technique for dividing a large table into smaller,
+Int range partitioning is a technique for dividing a large table into smaller,
more manageable chunks based on an integer column.
This can be particularly useful for tables with large numbers of rows,
as it can significantly improve query performance, reduce storage requirements, and simplify maintenance tasks.
diff --git a/doc/update/index.md b/doc/update/index.md
index 9192d409d55..77e5a2e5130 100644
--- a/doc/update/index.md
+++ b/doc/update/index.md
@@ -218,9 +218,7 @@ crucial database schema and migration patches may be included in the latest patc
Required upgrade stops are versions of GitLab that you must upgrade to before upgrading to later versions. Required
upgrade stops allow required background migrations to finish.
-During GitLab 16.x, we are scheduling required upgrade stops beforehand so users can better plan out appropriate upgrade stops and downtime when necessary.
-
-The first scheduled required upgrade stop has been announced for 16.3.x. When planning upgrades, take this into account.
+During GitLab 16.x, we are scheduling required upgrade stops beforehand so you can better plan out appropriate upgrade stops and downtime when necessary. When planning upgrades, take this into account.
### Earlier GitLab versions
diff --git a/doc/user/project/repository/git_blame.md b/doc/user/project/repository/git_blame.md
index 7f333ec7b7f..ddf0d65bd0b 100644
--- a/doc/user/project/repository/git_blame.md
+++ b/doc/user/project/repository/git_blame.md
@@ -36,14 +36,16 @@ changes to light gray.
### View blame directly in the file view
<!--
-When feature flags `graphql_git_blame` and `highlight_js_worker` are removed,
+When `graphql_git_blame` feature flag is removed,
delete this section and update the steps in "View blame for a file".
-->
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/430950) in GitLab 16.7 [with flags](../../../administration/feature_flags.md) named `graphql_git_blame` and `highlight_js_worker`. Enabled by default.
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/430950) in GitLab 16.7 [with flags](../../../administration/feature_flags.md) named `graphql_git_blame` and `highlight_js_worker`. Enabled by default.
+> - Feature flag `highlight_js_worker` [removed](https://gitlab.com/gitlab-org/gitlab/-/issues/432706) in GitLab 16.9.
FLAG:
On self-managed GitLab, by default this feature is available.
+To hide the feature, an administrator can [disable the feature flag](../../../administration/feature_flags.md) named `graphql_git_blame`.
On GitLab.com, this feature is available.
When this feature is enabled, you can additionally view blame for a file directly from the file page.
diff --git a/lib/backup/tasks/task.rb b/lib/backup/tasks/task.rb
index 4727e19b550..6aadcb42648 100644
--- a/lib/backup/tasks/task.rb
+++ b/lib/backup/tasks/task.rb
@@ -6,7 +6,9 @@ module Backup
attr_reader :progress, :options
# Identifier used as parameter in the CLI to skip from executing
- def self.id = raise NotImplementedError
+ def self.id
+ raise NotImplementedError
+ end
def initialize(progress:, options:)
@progress = progress
@@ -14,16 +16,24 @@ module Backup
end
# Key string that identifies the task
- def key = raise NotImplementedError
+ def key
+ raise NotImplementedError
+ end
# Name of the task used for logging.
- def human_name = raise NotImplementedError
+ def human_name
+ raise NotImplementedError
+ end
# Where the task should put its backup file/dir
- def destination_path = raise NotImplementedError
+ def destination_path
+ raise NotImplementedError
+ end
# The target factory method
- def target = raise NotImplementedError
+ def target
+ raise NotImplementedError
+ end
# Path to remove after a successful backup, uses #destination_path when not specified
def cleanup_path
@@ -31,12 +41,18 @@ module Backup
end
# `true` if the destination might not exist on a successful backup
- def destination_optional = false
+ def destination_optional
+ false
+ end
# `true` if the task can be used
- def enabled = true
+ def enabled
+ true
+ end
- def enabled? = enabled
+ def enabled?
+ enabled
+ end
end
end
end
diff --git a/lib/gitlab/background_migration/backfill_project_import_level.rb b/lib/gitlab/background_migration/backfill_project_import_level.rb
deleted file mode 100644
index 1a4b1e6731f..00000000000
--- a/lib/gitlab/background_migration/backfill_project_import_level.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-module Gitlab
- module BackgroundMigration
- class BackfillProjectImportLevel < BatchedMigrationJob
- operation_name :update_import_level
- feature_category :database
-
- LEVEL = {
- Gitlab::Access::NO_ACCESS => [0],
- Gitlab::Access::DEVELOPER => [2],
- Gitlab::Access::MAINTAINER => [1],
- Gitlab::Access::OWNER => [nil]
- }.freeze
-
- def perform
- each_sub_batch do |sub_batch|
- update_import_level(sub_batch)
- end
- end
-
- private
-
- def update_import_level(relation)
- LEVEL.each do |import_level, creation_level|
- namespace_ids = relation
- .where(type: 'Group', project_creation_level: creation_level)
-
- NamespaceSetting.where(
- namespace_id: namespace_ids
- ).update_all(project_import_level: import_level)
- end
- end
- end
- end
-end
-
-# rubocop:enable Style/Documentation
diff --git a/lib/gitlab/ci/config/interpolation/text_interpolator.rb b/lib/gitlab/ci/config/interpolation/text_interpolator.rb
index f5c83023f92..773defbfa37 100644
--- a/lib/gitlab/ci/config/interpolation/text_interpolator.rb
+++ b/lib/gitlab/ci/config/interpolation/text_interpolator.rb
@@ -10,8 +10,8 @@ module Gitlab
class TextInterpolator
attr_reader :errors
- def initialize(config, input_args, variables)
- @config = config
+ def initialize(yaml_documents, input_args, variables)
+ @yaml_documents = yaml_documents
@input_args = input_args.to_h
@variables = variables
@errors = []
@@ -37,14 +37,12 @@ module Gitlab
end
def interpolate!
- return errors.concat(config.errors) unless config.valid?
-
if inputs_without_header?
return errors.push(
_('Given inputs not defined in the `spec` section of the included configuration file'))
end
- return @result ||= config.content unless config.header
+ return @result ||= yaml_documents.content unless yaml_documents.header
return errors.concat(header.errors) unless header.valid?
return errors.concat(inputs.errors) unless inputs.valid?
@@ -62,14 +60,14 @@ module Gitlab
private
- attr_reader :config, :input_args, :variables
+ attr_reader :yaml_documents, :input_args, :variables
def inputs_without_header?
- input_args.any? && !config.header
+ input_args.any? && !yaml_documents.header
end
def header
- @header ||= Header::Root.new(config.header).tap do |header|
+ @header ||= Header::Root.new(yaml_documents.header).tap do |header|
header.key = 'header'
header.compose!
@@ -77,7 +75,7 @@ module Gitlab
end
def content
- @content ||= config.content
+ @content ||= yaml_documents.content
end
def spec
diff --git a/lib/gitlab/ci/config/yaml/documents.rb b/lib/gitlab/ci/config/yaml/documents.rb
index 04a31da8a2e..37d6e37b792 100644
--- a/lib/gitlab/ci/config/yaml/documents.rb
+++ b/lib/gitlab/ci/config/yaml/documents.rb
@@ -7,19 +7,12 @@ module Gitlab
class Documents
include Gitlab::Utils::StrongMemoize
- attr_reader :errors
-
def initialize(documents)
@documents = documents
- @errors = []
parsed_first_document
end
- def valid?
- errors.none?
- end
-
def header
return unless has_header?
@@ -46,8 +39,6 @@ module Gitlab
return {} if documents.count == 0
documents.first.load!
- rescue ::Gitlab::Config::Loader::FormatError => e
- errors << e.message
end
strong_memoize_attr :parsed_first_document
end
diff --git a/lib/gitlab/ci/config/yaml/loader.rb b/lib/gitlab/ci/config/yaml/loader.rb
index 1e9ac2b3dd5..bf20cd9c027 100644
--- a/lib/gitlab/ci/config/yaml/loader.rb
+++ b/lib/gitlab/ci/config/yaml/loader.rb
@@ -17,20 +17,23 @@ module Gitlab
end
def load
- yaml_result = load_uninterpolated_yaml
-
- return yaml_result unless yaml_result.valid?
+ if Feature.disabled?(:ci_text_interpolation, Feature.current_request, type: :gitlab_com_derisk)
+ return legacy_load
+ end
- interpolator = Interpolation::Interpolator.new(yaml_result, inputs, variables)
+ interpolator = Interpolation::TextInterpolator.new(yaml_documents, inputs, variables)
interpolator.interpolate!
if interpolator.valid?
- # This Result contains only the interpolated config and does not have a header
- Yaml::Result.new(config: interpolator.to_hash, error: nil, interpolated: interpolator.interpolated?)
+ loaded_yaml = yaml(interpolator.to_result).load!
+
+ Yaml::Result.new(config: loaded_yaml, error: nil, interpolated: interpolator.interpolated?)
else
Yaml::Result.new(error: interpolator.error_message, interpolated: interpolator.interpolated?)
end
+ rescue ::Gitlab::Config::Loader::FormatError => e
+ Yaml::Result.new(error: e.message, error_class: e)
end
def load_uninterpolated_yaml
@@ -43,6 +46,38 @@ module Gitlab
attr_reader :content, :inputs, :variables
+ def yaml(content)
+ ensure_custom_tags
+
+ ::Gitlab::Config::Loader::Yaml.new(content, additional_permitted_classes: AVAILABLE_TAGS)
+ end
+
+ def yaml_documents
+ docs = content
+ .split(::Gitlab::Config::Loader::MultiDocYaml::MULTI_DOC_DIVIDER, MAX_DOCUMENTS + 1)
+ .map { |d| yaml(d) }
+
+ docs.reject!(&:blank?)
+
+ Yaml::Documents.new(docs)
+ end
+
+ def legacy_load
+ yaml_result = load_uninterpolated_yaml
+
+ return yaml_result unless yaml_result.valid?
+
+ interpolator = Interpolation::Interpolator.new(yaml_result, inputs, variables)
+
+ interpolator.interpolate!
+
+ if interpolator.valid?
+ Yaml::Result.new(config: interpolator.to_hash, error: nil, interpolated: interpolator.interpolated?)
+ else
+ Yaml::Result.new(error: interpolator.error_message, interpolated: interpolator.interpolated?)
+ end
+ end
+
def load_yaml!
ensure_custom_tags
diff --git a/lib/gitlab/ci/pipeline/chain/validate/external.rb b/lib/gitlab/ci/pipeline/chain/validate/external.rb
index 915e48828d2..fdc57f7f9dc 100644
--- a/lib/gitlab/ci/pipeline/chain/validate/external.rb
+++ b/lib/gitlab/ci/pipeline/chain/validate/external.rb
@@ -72,7 +72,12 @@ module Gitlab
end
def validation_service_url
- Gitlab::CurrentSettings.external_pipeline_validation_service_url || ENV['EXTERNAL_VALIDATION_SERVICE_URL']
+ if migration_enabled?
+ # After derisking, feature flag will be removed in favor of existing external_pipeline_validation_service_url setting
+ ENV['EXTERNAL_VALIDATION_SERVICE_RUNWAY_URL']
+ else
+ Gitlab::CurrentSettings.external_pipeline_validation_service_url || ENV['EXTERNAL_VALIDATION_SERVICE_URL']
+ end
end
def validation_service_token
@@ -141,6 +146,10 @@ module Gitlab
def stages_attributes
command.yaml_processor_result.stages_attributes
end
+
+ def migration_enabled?
+ ENV['EXTERNAL_VALIDATION_SERVICE_RUNWAY_URL'].present? && Feature.enabled?(:external_pipeline_validation_migration, project, type: :gitlab_com_derisk)
+ end
end
end
end
diff --git a/lib/gitlab/database/query_analyzer.rb b/lib/gitlab/database/query_analyzer.rb
index 6f64d04270f..b2a49a0f722 100644
--- a/lib/gitlab/database/query_analyzer.rb
+++ b/lib/gitlab/database/query_analyzer.rb
@@ -11,7 +11,7 @@ module Gitlab
include ::Singleton
Parsed = Struct.new(
- :sql, :connection, :pg
+ :sql, :connection, :pg, :event_name
)
attr_reader :all_analyzers
@@ -20,12 +20,15 @@ module Gitlab
@all_analyzers = []
end
+ # @info most common event names are:
+ # Model Load, Model Create, Model Update, Model Pluck, Model Destroy, Model Insert, Model Delete All
+ # Model Exists?, nil, TRANSACTION, SCHEMA
def hook!
@subscriber = ActiveSupport::Notifications.subscribe('sql.active_record') do |event|
# In some cases analyzer code might trigger another SQL call
# to avoid stack too deep this detects recursive call of subscriber
with_ignored_recursive_calls do
- process_sql(event.payload[:sql], event.payload[:connection])
+ process_sql(event.payload[:sql], event.payload[:connection], event.payload[:name].to_s)
end
end
end
@@ -76,11 +79,11 @@ module Gitlab
Thread.current[:query_analyzer_enabled_analyzers] ||= []
end
- def process_sql(sql, connection)
+ def process_sql(sql, connection, event_name)
analyzers = enabled_analyzers
return unless analyzers&.any?
- parsed = parse(sql, connection)
+ parsed = parse(sql, connection, event_name)
return unless parsed
analyzers.each do |analyzer|
@@ -93,12 +96,12 @@ module Gitlab
end
end
- def parse(sql, connection)
+ def parse(sql, connection, event_name)
parsed = PgQuery.parse(sql)
return unless parsed
normalized = PgQuery.normalize(sql)
- Parsed.new(normalized, connection, parsed)
+ Parsed.new(normalized, connection, parsed, normalize_event_name(event_name))
rescue PgQuery::ParseError => e
# Ignore PgQuery parse errors (due to depth limit or other reasons)
Gitlab::ErrorTracking.track_exception(e)
@@ -116,6 +119,12 @@ module Gitlab
Thread.current[:query_analyzer_recursive] = nil
end
end
+
+ def normalize_event_name(event_name)
+ split_event_name = event_name.to_s.downcase.split(' ')
+
+ split_event_name.size > 1 ? split_event_name.from(1).join('_') : split_event_name.join('_')
+ end
end
end
end
diff --git a/lib/gitlab/database/query_analyzers/log_large_in_lists.rb b/lib/gitlab/database/query_analyzers/log_large_in_lists.rb
new file mode 100644
index 00000000000..6cf82e0e3cd
--- /dev/null
+++ b/lib/gitlab/database/query_analyzers/log_large_in_lists.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ module QueryAnalyzers
+ # The purpose of this analyzer is to log query activity that contains `IN` clauses having more that 2500 items
+ # as this type of query can cause performance degradation in the database.
+ #
+ # The feature flag should prevent sampling going above 1% or 0.01% of queries hitting
+ # to avoid performance issues
+ class LogLargeInLists < Base
+ REGEX = /\bIN\s*\(([$?\d\s*,]*)\)+/i
+ MIN_QUERY_SIZE = 10_000
+ IN_SIZE_LIMIT = 2_500
+ EVENT_NAMES = %w[load pluck].freeze
+
+ EXCLUDE_FROM_TRACE = %w[
+ lib/gitlab/database/query_analyzer.rb
+ lib/gitlab/database/query_analyzers/log_large_in_lists.rb
+ ].freeze
+
+ class << self
+ def enabled?
+ ::Feature::FlipperFeature.table_exists? &&
+ Feature.enabled?(:log_large_in_list_queries, type: :ops)
+ end
+
+ # Skips queries containing less than 10000 chars or any other events than +load+ and +pluck+
+ def requires_tracking?(parsed)
+ return false if parsed.sql.size < MIN_QUERY_SIZE
+
+ EVENT_NAMES.include?(parsed.event_name)
+ end
+
+ def analyze(parsed)
+ result = check_argument_size(parsed.sql)
+
+ log(result, parsed.event_name) if result.any?
+ end
+
+ private
+
+ def check_argument_size(sql)
+ matches = sql.scan(REGEX).flatten
+
+ return [] if matches.empty?
+
+ matches.filter_map do |match|
+ match_size = match.split(',').size
+
+ match_size if match_size > IN_SIZE_LIMIT
+ end
+ end
+
+ def log(result, event_name)
+ Gitlab::AppLogger.warn(
+ message: 'large_in_list_found',
+ matches: result.size,
+ event_name: event_name,
+ in_list_size: result.join(', '),
+ stacktrace: backtrace.first(5)
+ )
+ end
+
+ def backtrace
+ Gitlab::BacktraceCleaner.clean_backtrace(caller).reject do |line|
+ EXCLUDE_FROM_TRACE.any? { |exclusion| line.include?(exclusion) }
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index dad32def094..1fd9195a114 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -12120,7 +12120,7 @@ msgstr ""
msgid "CodeSuggestions|A user can be assigned a Duo Pro seat only once each billable month."
msgstr ""
-msgid "CodeSuggestions|Boost productivity by using %{linkStart}Code Suggestions%{linkEnd} to write and understand code. Code Suggestions is part of the GitLab Duo Pro add-on, available to Premium and Ultimate users now."
+msgid "CodeSuggestions|Boost productivity by using %{linkStart}Code Suggestions%{linkEnd} to write and understand code. Starting in February 2024, Code Suggestions will be part of %{duoLinkStart}GitLab Duo Pro%{duoLinkEnd}, available to Premium and Ultimate users for purchase now."
msgstr ""
msgid "CodeSuggestions|Code Suggestions"
@@ -17154,6 +17154,9 @@ msgstr ""
msgid "Deployment"
msgstr ""
+msgid "Deployment #%{iid}"
+msgstr ""
+
msgid "Deployment Frequency"
msgstr ""
@@ -42099,6 +42102,9 @@ msgstr ""
msgid "Runners|Clear selection"
msgstr ""
+msgid "Runners|Cloud"
+msgstr ""
+
msgid "Runners|Command to register runner"
msgstr ""
@@ -42236,6 +42242,9 @@ msgstr ""
msgid "Runners|Go to runners page"
msgstr ""
+msgid "Runners|Google Cloud connection"
+msgstr ""
+
msgid "Runners|Group"
msgstr ""
diff --git a/spec/controllers/projects/deployments_controller_spec.rb b/spec/controllers/projects/deployments_controller_spec.rb
index a696eb933e9..abf12f0c3bf 100644
--- a/spec/controllers/projects/deployments_controller_spec.rb
+++ b/spec/controllers/projects/deployments_controller_spec.rb
@@ -84,6 +84,49 @@ RSpec.describe Projects::DeploymentsController do
end
end
+ describe 'GET #show' do
+ let(:deployment) { create(:deployment, :success, environment: environment) }
+
+ subject do
+ get :show, params: deployment_params(id: deployment.iid)
+ end
+
+ context 'without feature flag' do
+ before do
+ stub_feature_flags(deployment_details_page: false)
+ end
+
+ it 'renders a 404' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'with feature flag' do
+ before do
+ stub_feature_flags(deployment_details_page: true)
+ end
+
+ context 'as maintainer' do
+ it 'renders show with 200 status code' do
+ is_expected.to have_gitlab_http_status(:ok)
+ is_expected.to render_template(:show)
+ end
+ end
+
+ context 'as anonymous user' do
+ let(:anonymous_user) { create(:user) }
+
+ before do
+ sign_in(anonymous_user)
+ end
+
+ it 'renders a 404' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
describe 'GET #metrics' do
let(:deployment) { create(:deployment, :success, project: project, environment: environment) }
diff --git a/spec/dot_gitlab_ci/rules_spec.rb b/spec/dot_gitlab_ci/rules_spec.rb
index 88e98fe95ba..8e13be25f4c 100644
--- a/spec/dot_gitlab_ci/rules_spec.rb
+++ b/spec/dot_gitlab_ci/rules_spec.rb
@@ -3,10 +3,16 @@
require 'fast_spec_helper'
RSpec.describe '.gitlab/ci/rules.gitlab-ci.yml', feature_category: :tooling do
- config = YAML.load_file(
- File.expand_path('../../.gitlab/ci/rules.gitlab-ci.yml', __dir__),
- aliases: true
- ).freeze
+ begin
+ config = YAML.load_file(
+ File.expand_path('../../.gitlab/ci/rules.gitlab-ci.yml', __dir__),
+ aliases: true
+ ).freeze
+ rescue ArgumentError # Ruby 3.0 does not take `aliases: true`
+ config = YAML.load_file(
+ File.expand_path('../../.gitlab/ci/rules.gitlab-ci.yml', __dir__)
+ ).freeze
+ end
context 'with changes' do
config.each do |name, definition|
diff --git a/spec/fixtures/gitlab/database/query_analyzers/large_query_with_in_list.txt b/spec/fixtures/gitlab/database/query_analyzers/large_query_with_in_list.txt
new file mode 100644
index 00000000000..45d359ad457
--- /dev/null
+++ b/spec/fixtures/gitlab/database/query_analyzers/large_query_with_in_list.txt
@@ -0,0 +1 @@
+SELECT projects.id, projects.name FROM projects WHERE projects.namespace_id IN (SELECT DISTINCT namespaces.id FROM ((WITH base_ancestors_cte AS MATERIALIZED (SELECT namespaces.traversal_ids FROM ((WITH direct_groups AS MATERIALIZED (SELECT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM ((SELECT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM namespaces INNER JOIN members ON namespaces.id = members.source_id WHERE members.type = 'GroupMember' AND members.source_type = 'Namespace' AND namespaces.type = 'Group' AND members.user_id = 2167502 AND members.requested_at IS NULL AND (access_level >= 10)) UNION (SELECT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM namespaces WHERE namespaces.type = 'Group' AND namespaces.id IN (SELECT projects.namespace_id FROM projects INNER JOIN project_authorizations ON projects.id = project_authorizations.project_id WHERE project_authorizations.user_id = 2167502))) namespaces WHERE namespaces.type = 'Group') SELECT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM ((SELECT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM direct_groups namespaces WHERE namespaces.type = 'Group') UNION (SELECT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM namespaces INNER JOIN group_group_links ON group_group_links.shared_group_id = namespaces.id WHERE namespaces.type = 'Group' AND group_group_links.shared_with_group_id IN (SELECT namespaces.id FROM direct_groups namespaces WHERE namespaces.type = 'Group'))) namespaces WHERE namespaces.type = 'Group') UNION (SELECT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM namespaces INNER JOIN members ON namespaces.id = members.source_id WHERE members.type = 'GroupMember' AND members.source_type = 'Namespace' AND namespaces.type = 'Group' AND members.user_id = 2167502 AND members.access_level = 5 AND (EXISTS (SELECT 1 FROM plans INNER JOIN gitlab_subscriptions ON gitlab_subscriptions.hosted_plan_id = plans.id WHERE plans.name IN ('silver', 'premium', 'premium_trial') AND (gitlab_subscriptions.namespace_id = namespaces.id))))) namespaces WHERE namespaces.type = 'Group') SELECT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM namespaces INNER JOIN (SELECT DISTINCT unnest(base_ancestors_cte.traversal_ids) FROM base_ancestors_cte) AS ancestors(ancestor_id) ON namespaces.id = ancestors.ancestor_id WHERE namespaces.type = 'Group') UNION (WITH descendants_base_cte AS MATERIALIZED (SELECT namespaces.id, namespaces.traversal_ids FROM namespaces INNER JOIN members ON namespaces.id = members.source_id WHERE members.type = 'GroupMember' AND members.source_type = 'Namespace' AND namespaces.type = 'Group' AND members.user_id = 2167502 AND members.requested_at IS NULL AND (access_level >= 10)), superset AS (SELECT d1.traversal_ids FROM descendants_base_cte d1 WHERE NOT EXISTS ( SELECT 1 FROM descendants_base_cte d2 WHERE d2.id = ANY(d1.traversal_ids) AND d2.id <> d1.id ) ) SELECT DISTINCT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM superset, namespaces WHERE namespaces.type = 'Group' AND next_traversal_ids_sibling(superset.traversal_ids) > namespaces.traversal_ids AND superset.traversal_ids <= namespaces.traversal_ids) UNION (SELECT namespaces.id, namespaces.name, namespaces.path, namespaces.owner_id, namespaces.created_at, namespaces.updated_at, namespaces.type, namespaces.description, namespaces.avatar, namespaces.membership_lock, namespaces.share_with_group_lock, namespaces.visibility_level, namespaces.request_access_enabled, namespaces.ldap_sync_status, namespaces.ldap_sync_error, namespaces.ldap_sync_last_update_at, namespaces.ldap_sync_last_successful_update_at, namespaces.ldap_sync_last_sync_at, namespaces.lfs_enabled, namespaces.description_html, namespaces.parent_id, namespaces.shared_runners_minutes_limit, namespaces.repository_size_limit, namespaces.require_two_factor_authentication, namespaces.two_factor_grace_period, namespaces.cached_markdown_version, namespaces.project_creation_level, namespaces.runners_token, namespaces.file_template_project_id, namespaces.saml_discovery_token, namespaces.runners_token_encrypted, namespaces.custom_project_templates_group_id, namespaces.auto_devops_enabled, namespaces.extra_shared_runners_minutes_limit, namespaces.last_ci_minutes_notification_at, namespaces.last_ci_minutes_usage_notification_level, namespaces.subgroup_creation_level, namespaces.emails_disabled, namespaces.max_pages_size, namespaces.max_artifacts_size, namespaces.mentions_disabled, namespaces.default_branch_protection, namespaces.max_personal_access_token_lifetime, namespaces.push_rule_id, namespaces.shared_runners_enabled, namespaces.allow_descendants_override_disabled_shared_runners, namespaces.traversal_ids, namespaces.organization_id FROM namespaces WHERE namespaces.type = 'Group' AND namespaces.visibility_level IN (10, 20))) namespaces INNER JOIN projects ON projects.namespace_id = namespaces.id WHERE namespaces.type = 'Group' AND namespaces.id IN (SELECT namespaces.custom_project_templates_group_id FROM namespaces WHERE namespaces.type = 'Group' AND (traversal_ids[1] IN (SELECT gitlab_subscriptions.namespace_id FROM gitlab_subscriptions WHERE gitlab_subscriptions.hosted_plan_id IN (%IN_LIST%))) AND namespaces.custom_project_templates_group_id IS NOT NULL)) AND projects.marked_for_deletion_at IS NULL AND projects.pending_delete = FALSE AND projects.archived = FALSE;
diff --git a/spec/fixtures/gitlab/database/query_analyzers/small_query_with_in_list.txt b/spec/fixtures/gitlab/database/query_analyzers/small_query_with_in_list.txt
new file mode 100644
index 00000000000..df920489294
--- /dev/null
+++ b/spec/fixtures/gitlab/database/query_analyzers/small_query_with_in_list.txt
@@ -0,0 +1 @@
+SELECT namespaces.id FROM namespaces WHERE namespaces.id IN (%IN_LIST%)
diff --git a/spec/fixtures/gitlab/database/query_analyzers/small_query_without_in_list.txt b/spec/fixtures/gitlab/database/query_analyzers/small_query_without_in_list.txt
new file mode 100644
index 00000000000..982159eb40c
--- /dev/null
+++ b/spec/fixtures/gitlab/database/query_analyzers/small_query_without_in_list.txt
@@ -0,0 +1 @@
+SELECT 1 FROM namespaces;
diff --git a/spec/frontend/__helpers__/mock_observability_client.js b/spec/frontend/__helpers__/mock_observability_client.js
index a65b5233b73..571ee68f9bf 100644
--- a/spec/frontend/__helpers__/mock_observability_client.js
+++ b/spec/frontend/__helpers__/mock_observability_client.js
@@ -4,6 +4,7 @@ export function createMockClient() {
const mockClient = buildClient({
provisioningUrl: 'provisioning-url',
tracingUrl: 'tracing-url',
+ tracingAnalyticsUrl: 'tracing-analytics-url',
servicesUrl: 'services-url',
operationsUrl: 'operations-url',
metricsUrl: 'metrics-url',
diff --git a/spec/frontend/ci/runner/components/runner_cloud_form_spec.js b/spec/frontend/ci/runner/components/runner_cloud_form_spec.js
new file mode 100644
index 00000000000..ae856631f60
--- /dev/null
+++ b/spec/frontend/ci/runner/components/runner_cloud_form_spec.js
@@ -0,0 +1,16 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import RunnerCloudConnectionForm from '~/ci/runner/components/runner_cloud_connection_form.vue';
+
+describe('Runner Cloud Form', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(RunnerCloudConnectionForm);
+ };
+
+ it('default', () => {
+ createComponent();
+
+ expect(wrapper.exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/ci/runner/components/runner_platforms_radio_group_spec.js b/spec/frontend/ci/runner/components/runner_platforms_radio_group_spec.js
index eddc1438fff..18aa722b94a 100644
--- a/spec/frontend/ci/runner/components/runner_platforms_radio_group_spec.js
+++ b/spec/frontend/ci/runner/components/runner_platforms_radio_group_spec.js
@@ -22,65 +22,108 @@ describe('RunnerPlatformsRadioGroup', () => {
.filter((w) => w.text() === text)
.at(0);
- const createComponent = ({ props = {}, mountFn = shallowMountExtended, ...options } = {}) => {
+ const createComponent = ({
+ props = {},
+ mountFn = shallowMountExtended,
+ gcpRunner = false,
+ ...options
+ } = {}) => {
wrapper = mountFn(RunnerPlatformsRadioGroup, {
propsData: {
value: null,
...props,
},
+ provide: {
+ glFeatures: {
+ gcpRunner,
+ },
+ },
...options,
});
};
- beforeEach(() => {
- createComponent();
- });
+ describe('defaults', () => {
+ beforeEach(() => {
+ createComponent();
+ });
- it('contains expected options with images', () => {
- const labels = findFormRadios().map((w) => [w.text(), w.props('image')]);
+ it('contains expected options with images', () => {
+ const labels = findFormRadios().map((w) => [w.text(), w.props('image')]);
- expect(labels).toEqual([
- ['Linux', expect.any(String)],
- ['macOS', null],
- ['Windows', null],
- ['Docker', expect.any(String)],
- ['Kubernetes', expect.any(String)],
- ]);
- });
+ expect(labels).toStrictEqual([
+ ['Linux', expect.any(String)],
+ ['macOS', null],
+ ['Windows', null],
+ ['Docker', expect.any(String)],
+ ['Kubernetes', expect.any(String)],
+ ]);
+ });
- it('allows users to use radio group', async () => {
- findFormRadioGroup().vm.$emit('input', MACOS_PLATFORM);
- await nextTick();
+ it('allows users to use radio group', async () => {
+ findFormRadioGroup().vm.$emit('input', MACOS_PLATFORM);
+ await nextTick();
- expect(wrapper.emitted('input')[0]).toEqual([MACOS_PLATFORM]);
- });
+ expect(wrapper.emitted('input')[0]).toEqual([MACOS_PLATFORM]);
+ });
+
+ it.each`
+ text | value
+ ${'Linux'} | ${LINUX_PLATFORM}
+ ${'macOS'} | ${MACOS_PLATFORM}
+ ${'Windows'} | ${WINDOWS_PLATFORM}
+ `('user can select "$text"', async ({ text, value }) => {
+ const radio = findFormRadioByText(text);
+ expect(radio.props('value')).toBe(value);
- it.each`
- text | value
- ${'Linux'} | ${LINUX_PLATFORM}
- ${'macOS'} | ${MACOS_PLATFORM}
- ${'Windows'} | ${WINDOWS_PLATFORM}
- `('user can select "$text"', async ({ text, value }) => {
- const radio = findFormRadioByText(text);
- expect(radio.props('value')).toBe(value);
+ radio.vm.$emit('input', value);
+ await nextTick();
- radio.vm.$emit('input', value);
- await nextTick();
+ expect(wrapper.emitted('input')[0]).toEqual([value]);
+ });
+
+ it.each`
+ text | href
+ ${'Docker'} | ${DOCKER_HELP_URL}
+ ${'Kubernetes'} | ${KUBERNETES_HELP_URL}
+ `('provides link to "$text" docs', ({ text, href }) => {
+ const radio = findFormRadioByText(text);
- expect(wrapper.emitted('input')[0]).toEqual([value]);
+ expect(radio.findComponent(GlLink).attributes()).toEqual({
+ href,
+ target: '_blank',
+ });
+ expect(radio.findComponent(GlIcon).props('name')).toBe('external-link');
+ });
});
- it.each`
- text | href
- ${'Docker'} | ${DOCKER_HELP_URL}
- ${'Kubernetes'} | ${KUBERNETES_HELP_URL}
- `('provides link to "$text" docs', ({ text, href }) => {
- const radio = findFormRadioByText(text);
+ describe('with gcpRunner flag enabled', () => {
+ it('contains expected options with images', () => {
+ createComponent({ props: {}, mountFn: shallowMountExtended, gcpRunner: true });
+
+ const labels = findFormRadios().map((w) => [w.text(), w.props('image')]);
+
+ expect(labels).toStrictEqual([
+ ['Linux', expect.any(String)],
+ ['macOS', null],
+ ['Windows', null],
+ ['Google Cloud', null],
+ ['Docker', expect.any(String)],
+ ['Kubernetes', expect.any(String)],
+ ]);
+ });
+
+ it('does not contain cloud option when admin prop is passed', () => {
+ createComponent({ props: { admin: true }, mountFn: shallowMountExtended, gcpRunner: true });
+
+ const labels = findFormRadios().map((w) => [w.text(), w.props('image')]);
- expect(radio.findComponent(GlLink).attributes()).toEqual({
- href,
- target: '_blank',
+ expect(labels).toStrictEqual([
+ ['Linux', expect.any(String)],
+ ['macOS', null],
+ ['Windows', null],
+ ['Docker', expect.any(String)],
+ ['Kubernetes', expect.any(String)],
+ ]);
});
- expect(radio.findComponent(GlIcon).props('name')).toBe('external-link');
});
});
diff --git a/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js b/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js
index 623a8f1c5a1..4e1e8c0adde 100644
--- a/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js
+++ b/spec/frontend/ci/runner/group_new_runner_app/group_new_runner_app_spec.js
@@ -1,4 +1,5 @@
import { GlSprintf } from '@gitlab/ui';
+import { nextTick } from 'vue';
import { s__ } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -13,7 +14,9 @@ import {
GROUP_TYPE,
DEFAULT_PLATFORM,
WINDOWS_PLATFORM,
+ GOOGLE_CLOUD_PLATFORM,
} from '~/ci/runner/constants';
+import RunnerCloudConnectionForm from '~/ci/runner/components/runner_cloud_connection_form.vue';
import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue';
import { visitUrl } from '~/lib/utils/url_utility';
import { runnerCreateResult } from '../mock_data';
@@ -36,8 +39,9 @@ describe('GroupRunnerRunnerApp', () => {
const findRegistrationCompatibilityAlert = () =>
wrapper.findComponent(RegistrationCompatibilityAlert);
const findRunnerCreateForm = () => wrapper.findComponent(RunnerCreateForm);
+ const findRunnerCloudForm = () => wrapper.findComponent(RunnerCloudConnectionForm);
- const createComponent = () => {
+ const createComponent = (gcpRunner = false) => {
wrapper = shallowMountExtended(GroupRunnerRunnerApp, {
propsData: {
groupId: mockGroupId,
@@ -45,74 +49,100 @@ describe('GroupRunnerRunnerApp', () => {
stubs: {
GlSprintf,
},
+ provide: {
+ glFeatures: {
+ gcpRunner,
+ },
+ },
});
};
- beforeEach(() => {
- createComponent();
- });
-
- it('shows a registration compatibility alert', () => {
- expect(findRegistrationCompatibilityAlert().props('alertKey')).toBe(mockGroupId);
- });
+ describe('defaults', () => {
+ beforeEach(() => {
+ createComponent();
+ });
- describe('Platform', () => {
- it('shows the platforms radio group', () => {
- expect(findRunnerPlatformsRadioGroup().props('value')).toBe(DEFAULT_PLATFORM);
+ it('shows a registration compatibility alert', () => {
+ expect(findRegistrationCompatibilityAlert().props('alertKey')).toBe(mockGroupId);
});
- });
- describe('Runner form', () => {
- it('shows the runner create form for an instance runner', () => {
- expect(findRunnerCreateForm().props()).toEqual({
- runnerType: GROUP_TYPE,
- groupId: mockGroupId,
- projectId: null,
+ describe('Platform', () => {
+ it('shows the platforms radio group', () => {
+ expect(findRunnerPlatformsRadioGroup().props('value')).toBe(DEFAULT_PLATFORM);
});
});
- describe('When a runner is saved', () => {
- beforeEach(() => {
- findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner);
+ describe('Runner form', () => {
+ it('shows the runner create form for an instance runner', () => {
+ expect(findRunnerCreateForm().props()).toEqual({
+ runnerType: GROUP_TYPE,
+ groupId: mockGroupId,
+ projectId: null,
+ });
});
- it('pushes an alert to be shown after redirection', () => {
- expect(saveAlertToLocalStorage).toHaveBeenCalledWith({
- message: s__('Runners|Runner created.'),
- variant: VARIANT_SUCCESS,
+ describe('When a runner is saved', () => {
+ beforeEach(() => {
+ findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner);
});
- });
- it('redirects to the registration page', () => {
- const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`;
+ it('pushes an alert to be shown after redirection', () => {
+ expect(saveAlertToLocalStorage).toHaveBeenCalledWith({
+ message: s__('Runners|Runner created.'),
+ variant: VARIANT_SUCCESS,
+ });
+ });
- expect(visitUrl).toHaveBeenCalledWith(url);
- });
- });
+ it('redirects to the registration page', () => {
+ const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`;
- describe('When another platform is selected and a runner is saved', () => {
- beforeEach(() => {
- findRunnerPlatformsRadioGroup().vm.$emit('input', WINDOWS_PLATFORM);
- findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner);
+ expect(visitUrl).toHaveBeenCalledWith(url);
+ });
});
- it('redirects to the registration page with the platform', () => {
- const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`;
+ describe('When another platform is selected and a runner is saved', () => {
+ beforeEach(() => {
+ findRunnerPlatformsRadioGroup().vm.$emit('input', WINDOWS_PLATFORM);
+ findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner);
+ });
+
+ it('redirects to the registration page with the platform', () => {
+ const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`;
- expect(visitUrl).toHaveBeenCalledWith(url);
+ expect(visitUrl).toHaveBeenCalledWith(url);
+ });
});
- });
- describe('When runner fails to save', () => {
- const ERROR_MSG = 'Cannot save!';
+ describe('When runner fails to save', () => {
+ const ERROR_MSG = 'Cannot save!';
- beforeEach(() => {
- findRunnerCreateForm().vm.$emit('error', new Error(ERROR_MSG));
- });
+ beforeEach(() => {
+ findRunnerCreateForm().vm.$emit('error', new Error(ERROR_MSG));
+ });
- it('shows an error message', () => {
- expect(createAlert).toHaveBeenCalledWith({ message: ERROR_MSG });
+ it('shows an error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: ERROR_MSG });
+ });
});
});
});
+
+ describe('Runner cloud form', () => {
+ it.each`
+ flagState | visible
+ ${true} | ${true}
+ ${false} | ${false}
+ `(
+ 'shows runner cloud form: $visible when flag is set to $flagState and platform is google',
+ async ({ flagState, visible }) => {
+ createComponent(flagState);
+
+ findRunnerPlatformsRadioGroup().vm.$emit('input', GOOGLE_CLOUD_PLATFORM);
+
+ await nextTick();
+
+ expect(findRunnerCloudForm().exists()).toBe(visible);
+ },
+ );
+ });
});
diff --git a/spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js b/spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js
index 3e12f3911a0..e2cbe731032 100644
--- a/spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js
+++ b/spec/frontend/ci/runner/project_new_runner_app/project_new_runner_app_spec.js
@@ -1,4 +1,5 @@
import { GlSprintf } from '@gitlab/ui';
+import { nextTick } from 'vue';
import { s__ } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -13,7 +14,9 @@ import {
PROJECT_TYPE,
DEFAULT_PLATFORM,
WINDOWS_PLATFORM,
+ GOOGLE_CLOUD_PLATFORM,
} from '~/ci/runner/constants';
+import RunnerCloudConnectionForm from '~/ci/runner/components/runner_cloud_connection_form.vue';
import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue';
import { visitUrl } from '~/lib/utils/url_utility';
import { runnerCreateResult, mockRegistrationToken } from '../mock_data';
@@ -36,8 +39,9 @@ describe('ProjectRunnerRunnerApp', () => {
const findRegistrationCompatibilityAlert = () =>
wrapper.findComponent(RegistrationCompatibilityAlert);
const findRunnerCreateForm = () => wrapper.findComponent(RunnerCreateForm);
+ const findRunnerCloudForm = () => wrapper.findComponent(RunnerCloudConnectionForm);
- const createComponent = () => {
+ const createComponent = (gcpRunner = false) => {
wrapper = shallowMountExtended(ProjectRunnerRunnerApp, {
propsData: {
projectId: mockProjectId,
@@ -46,74 +50,100 @@ describe('ProjectRunnerRunnerApp', () => {
stubs: {
GlSprintf,
},
+ provide: {
+ glFeatures: {
+ gcpRunner,
+ },
+ },
});
};
- beforeEach(() => {
- createComponent();
- });
-
- it('shows a registration compatibility alert', () => {
- expect(findRegistrationCompatibilityAlert().props('alertKey')).toBe(mockProjectId);
- });
+ describe('defaults', () => {
+ beforeEach(() => {
+ createComponent();
+ });
- describe('Platform', () => {
- it('shows the platforms radio group', () => {
- expect(findRunnerPlatformsRadioGroup().props('value')).toBe(DEFAULT_PLATFORM);
+ it('shows a registration compatibility alert', () => {
+ expect(findRegistrationCompatibilityAlert().props('alertKey')).toBe(mockProjectId);
});
- });
- describe('Runner form', () => {
- it('shows the runner create form for an instance runner', () => {
- expect(findRunnerCreateForm().props()).toEqual({
- runnerType: PROJECT_TYPE,
- projectId: mockProjectId,
- groupId: null,
+ describe('Platform', () => {
+ it('shows the platforms radio group', () => {
+ expect(findRunnerPlatformsRadioGroup().props('value')).toBe(DEFAULT_PLATFORM);
});
});
- describe('When a runner is saved', () => {
- beforeEach(() => {
- findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner);
+ describe('Runner form', () => {
+ it('shows the runner create form for an instance runner', () => {
+ expect(findRunnerCreateForm().props()).toEqual({
+ runnerType: PROJECT_TYPE,
+ projectId: mockProjectId,
+ groupId: null,
+ });
});
- it('pushes an alert to be shown after redirection', () => {
- expect(saveAlertToLocalStorage).toHaveBeenCalledWith({
- message: s__('Runners|Runner created.'),
- variant: VARIANT_SUCCESS,
+ describe('When a runner is saved', () => {
+ beforeEach(() => {
+ findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner);
});
- });
- it('redirects to the registration page', () => {
- const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`;
+ it('pushes an alert to be shown after redirection', () => {
+ expect(saveAlertToLocalStorage).toHaveBeenCalledWith({
+ message: s__('Runners|Runner created.'),
+ variant: VARIANT_SUCCESS,
+ });
+ });
- expect(visitUrl).toHaveBeenCalledWith(url);
- });
- });
+ it('redirects to the registration page', () => {
+ const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${DEFAULT_PLATFORM}`;
- describe('When another platform is selected and a runner is saved', () => {
- beforeEach(() => {
- findRunnerPlatformsRadioGroup().vm.$emit('input', WINDOWS_PLATFORM);
- findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner);
+ expect(visitUrl).toHaveBeenCalledWith(url);
+ });
});
- it('redirects to the registration page with the platform', () => {
- const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`;
+ describe('When another platform is selected and a runner is saved', () => {
+ beforeEach(() => {
+ findRunnerPlatformsRadioGroup().vm.$emit('input', WINDOWS_PLATFORM);
+ findRunnerCreateForm().vm.$emit('saved', mockCreatedRunner);
+ });
+
+ it('redirects to the registration page with the platform', () => {
+ const url = `${mockCreatedRunner.ephemeralRegisterUrl}?${PARAM_KEY_PLATFORM}=${WINDOWS_PLATFORM}`;
- expect(visitUrl).toHaveBeenCalledWith(url);
+ expect(visitUrl).toHaveBeenCalledWith(url);
+ });
});
- });
- describe('When runner fails to save', () => {
- const ERROR_MSG = 'Cannot save!';
+ describe('When runner fails to save', () => {
+ const ERROR_MSG = 'Cannot save!';
- beforeEach(() => {
- findRunnerCreateForm().vm.$emit('error', new Error(ERROR_MSG));
- });
+ beforeEach(() => {
+ findRunnerCreateForm().vm.$emit('error', new Error(ERROR_MSG));
+ });
- it('shows an error message', () => {
- expect(createAlert).toHaveBeenCalledWith({ message: ERROR_MSG });
+ it('shows an error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: ERROR_MSG });
+ });
});
});
});
+
+ describe('Runner cloud form', () => {
+ it.each`
+ flagState | visible
+ ${true} | ${true}
+ ${false} | ${false}
+ `(
+ 'shows runner cloud form: $visible when flag is set to $flagState and platform is google',
+ async ({ flagState, visible }) => {
+ createComponent(flagState);
+
+ findRunnerPlatformsRadioGroup().vm.$emit('input', GOOGLE_CLOUD_PLATFORM);
+
+ await nextTick();
+
+ expect(findRunnerCloudForm().exists()).toBe(visible);
+ },
+ );
+ });
});
diff --git a/spec/frontend/observability/client_spec.js b/spec/frontend/observability/client_spec.js
index 1bd0112746b..e3196861659 100644
--- a/spec/frontend/observability/client_spec.js
+++ b/spec/frontend/observability/client_spec.js
@@ -14,6 +14,7 @@ describe('buildClient', () => {
let axiosMock;
const tracingUrl = 'https://example.com/tracing';
+ const tracingAnalyticsUrl = 'https://example.com/tracing/analytics';
const provisioningUrl = 'https://example.com/provisioning';
const servicesUrl = 'https://example.com/services';
const operationsUrl = 'https://example.com/services/$SERVICE_NAME$/operations';
@@ -23,6 +24,7 @@ describe('buildClient', () => {
const apiConfig = {
tracingUrl,
+ tracingAnalyticsUrl,
provisioningUrl,
servicesUrl,
operationsUrl,
@@ -389,6 +391,196 @@ describe('buildClient', () => {
});
});
+ describe('fetchTracesAnalytics', () => {
+ it('fetches analytics from the tracesAnalytics URL', async () => {
+ const mockResponse = {
+ results: [
+ {
+ Interval: 1705039800,
+ count: 5,
+ p90_duration_nano: 50613502867,
+ p95_duration_nano: 50613502867,
+ p75_duration_nano: 49756727928,
+ p50_duration_nano: 41610120929,
+ error_count: 324,
+ trace_rate: 2.576111111111111,
+ error_rate: 0.09,
+ },
+ ],
+ };
+
+ axiosMock.onGet(tracingAnalyticsUrl).reply(200, mockResponse);
+
+ const result = await client.fetchTracesAnalytics();
+
+ expect(axios.get).toHaveBeenCalledTimes(1);
+ expect(axios.get).toHaveBeenCalledWith(tracingAnalyticsUrl, {
+ withCredentials: true,
+ params: expect.any(URLSearchParams),
+ });
+ expect(result).toEqual(mockResponse.results);
+ });
+
+ it('returns empty array if analytics are missing', async () => {
+ axiosMock.onGet(tracingAnalyticsUrl).reply(200, {});
+
+ expect(await client.fetchTracesAnalytics()).toEqual([]);
+ });
+
+ describe('query filter', () => {
+ beforeEach(() => {
+ axiosMock.onGet(tracingAnalyticsUrl).reply(200, {
+ results: [],
+ });
+ });
+
+ it('does not set any query param without filters', async () => {
+ await client.fetchTracesAnalytics();
+
+ expect(getQueryParam()).toBe(``);
+ });
+
+ it('converts filter to proper query params', async () => {
+ await client.fetchTracesAnalytics({
+ filters: {
+ durationMs: [
+ { operator: '>', value: '100' },
+ { operator: '<', value: '1000' },
+ ],
+ operation: [
+ { operator: '=', value: 'op' },
+ { operator: '!=', value: 'not-op' },
+ ],
+ service: [
+ { operator: '=', value: 'service' },
+ { operator: '!=', value: 'not-service' },
+ ],
+ period: [{ operator: '=', value: '5m' }],
+ status: [
+ { operator: '=', value: 'ok' },
+ { operator: '!=', value: 'error' },
+ ],
+ traceId: [
+ { operator: '=', value: 'trace-id' },
+ { operator: '!=', value: 'not-trace-id' },
+ ],
+ attribute: [{ operator: '=', value: 'name1=value1' }],
+ },
+ });
+ expect(getQueryParam()).toContain(
+ 'gt[duration_nano]=100000000&lt[duration_nano]=1000000000' +
+ '&operation=op&not[operation]=not-op' +
+ '&service_name=service&not[service_name]=not-service' +
+ '&period=5m' +
+ '&trace_id=trace-id&not[trace_id]=not-trace-id' +
+ '&attr_name=name1&attr_value=value1' +
+ '&status=ok&not[status]=error',
+ );
+ });
+ describe('date range time filter', () => {
+ it('handles custom date range period filter', async () => {
+ await client.fetchTracesAnalytics({
+ filters: {
+ period: [{ operator: '=', value: '2023-01-01 - 2023-02-01' }],
+ },
+ });
+ expect(getQueryParam()).not.toContain('period=');
+ expect(getQueryParam()).toContain(
+ 'start_time=2023-01-01T00:00:00.000Z&end_time=2023-02-01T00:00:00.000Z',
+ );
+ });
+
+ it.each([
+ 'invalid - 2023-02-01',
+ '2023-02-01 - invalid',
+ 'invalid - invalid',
+ '2023-01-01 / 2023-02-01',
+ '2023-01-01 2023-02-01',
+ '2023-01-01 - 2023-02-01 - 2023-02-01',
+ ])('ignore invalid values', async (val) => {
+ await client.fetchTracesAnalytics({
+ filters: {
+ period: [{ operator: '=', value: val }],
+ },
+ });
+
+ expect(getQueryParam()).not.toContain('start_time=');
+ expect(getQueryParam()).not.toContain('end_time=');
+ expect(getQueryParam()).not.toContain('period=');
+ });
+ });
+
+ it('handles repeated params', async () => {
+ await client.fetchTracesAnalytics({
+ filters: {
+ operation: [
+ { operator: '=', value: 'op' },
+ { operator: '=', value: 'op2' },
+ ],
+ },
+ });
+ expect(getQueryParam()).toContain('operation=op&operation=op2');
+ });
+
+ it('ignores unsupported filters', async () => {
+ await client.fetchTracesAnalytics({
+ filters: {
+ unsupportedFilter: [{ operator: '=', value: 'foo' }],
+ },
+ });
+
+ expect(getQueryParam()).toBe(``);
+ });
+
+ it('ignores empty filters', async () => {
+ await client.fetchTracesAnalytics({
+ filters: {
+ durationMs: null,
+ },
+ });
+
+ expect(getQueryParam()).toBe(``);
+ });
+
+ it('ignores non-array filters', async () => {
+ await client.fetchTracesAnalytics({
+ filters: {
+ traceId: { operator: '=', value: 'foo' },
+ },
+ });
+
+ expect(getQueryParam()).toBe(``);
+ });
+
+ it('ignores unsupported operators', async () => {
+ await client.fetchTracesAnalytics({
+ filters: {
+ durationMs: [
+ { operator: '*', value: 'foo' },
+ { operator: '=', value: 'foo' },
+ { operator: '!=', value: 'foo' },
+ ],
+ operation: [
+ { operator: '>', value: 'foo' },
+ { operator: '<', value: 'foo' },
+ ],
+ service: [
+ { operator: '>', value: 'foo' },
+ { operator: '<', value: 'foo' },
+ ],
+ period: [{ operator: '!=', value: 'foo' }],
+ traceId: [
+ { operator: '>', value: 'foo' },
+ { operator: '<', value: 'foo' },
+ ],
+ },
+ });
+
+ expect(getQueryParam()).toBe(``);
+ });
+ });
+ });
+
describe('fetchServices', () => {
it('fetches services from the services URL', async () => {
const mockResponse = {
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 8414dfcf151..31337364dca 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -18,7 +18,6 @@ import { loadViewer } from '~/repository/components/blob_viewers';
import DownloadViewer from '~/repository/components/blob_viewers/download_viewer.vue';
import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
-import SourceViewerNew from '~/vue_shared/components/source_viewer/source_viewer_new.vue';
import blobInfoQuery from 'shared_queries/repository/blob_info.query.graphql';
import projectInfoQuery from '~/repository/queries/project_info.query.graphql';
import CodeIntelligence from '~/code_navigation/components/app.vue';
@@ -60,6 +59,7 @@ const mockRouterPush = jest.fn();
const mockRouter = {
push: mockRouterPush,
};
+const highlightWorker = { postMessage: jest.fn() };
const legacyViewerUrl = '/some_file.js?format=json&viewer=simple';
@@ -74,7 +74,7 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
downloadCode = userPermissionsMock.downloadCode,
createMergeRequestIn = userPermissionsMock.createMergeRequestIn,
isBinary,
- inject = {},
+ inject = { highlightWorker },
} = mockData;
const blobInfo = {
@@ -136,9 +136,6 @@ const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute
targetBranch: 'test',
originalBranch: 'default-ref',
...inject,
- glFeatures: {
- highlightJsWorker: false,
- },
},
}),
);
@@ -158,7 +155,6 @@ describe('Blob content viewer component', () => {
const findForkSuggestion = () => wrapper.findComponent(ForkSuggestion);
const findCodeIntelligence = () => wrapper.findComponent(CodeIntelligence);
const findSourceViewer = () => wrapper.findComponent(SourceViewer);
- const findSourceViewerNew = () => wrapper.findComponent(SourceViewerNew);
beforeEach(() => {
jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
@@ -203,28 +199,28 @@ describe('Blob content viewer component', () => {
});
it('adds blame param to the URL and passes `showBlame` to the SourceViewer', async () => {
- loadViewer.mockReturnValueOnce(SourceViewerNew);
+ loadViewer.mockReturnValueOnce(SourceViewer);
await createComponent({ blob: simpleViewerMock });
await triggerBlame();
expect(mockRouterPush).toHaveBeenCalledWith({ query: { blame: '1' } });
- expect(findSourceViewerNew().props('showBlame')).toBe(true);
+ expect(findSourceViewer().props('showBlame')).toBe(true);
await triggerBlame();
expect(mockRouterPush).toHaveBeenCalledWith({ query: { blame: '0' } });
- expect(findSourceViewerNew().props('showBlame')).toBe(false);
+ expect(findSourceViewer().props('showBlame')).toBe(false);
});
describe('when viewing rich content', () => {
it('always shows the blame when clicking on the blame button', async () => {
- loadViewer.mockReturnValueOnce(SourceViewerNew);
+ loadViewer.mockReturnValueOnce(SourceViewer);
const query = { plain: '0', blame: '1' };
await createComponent({ blob: simpleViewerMock }, shallowMount, { query });
await triggerBlame();
- expect(findSourceViewerNew().props('showBlame')).toBe(true);
+ expect(findSourceViewer().props('showBlame')).toBe(true);
});
});
});
@@ -435,7 +431,7 @@ describe('Blob content viewer component', () => {
await waitForPromises();
- expect(loadViewer).toHaveBeenCalledWith(viewer, false, false, 'javascript');
+ expect(loadViewer).toHaveBeenCalledWith(viewer, false);
expect(wrapper.findComponent(loadViewerReturnValue).exists()).toBe(true);
});
});
@@ -514,7 +510,7 @@ describe('Blob content viewer component', () => {
});
it('is called with originalBranch value if the prop has a value', async () => {
- await createComponent({ inject: { originalBranch: 'some-branch' } });
+ await createComponent({ inject: { originalBranch: 'some-branch', highlightWorker } });
expect(blobInfoMockResolver).toHaveBeenCalledWith(
expect.objectContaining({
diff --git a/spec/frontend/repository/mixins/highlight_mixin_spec.js b/spec/frontend/repository/mixins/highlight_mixin_spec.js
index c635c09d1aa..ccde41f62e5 100644
--- a/spec/frontend/repository/mixins/highlight_mixin_spec.js
+++ b/spec/frontend/repository/mixins/highlight_mixin_spec.js
@@ -41,7 +41,6 @@ describe('HighlightMixin', () => {
mixins: [highlightMixin],
inject: {
highlightWorker: { default: workerMock },
- glFeatures: { default: { highlightJsWorker: true } },
},
template: '<div>{{chunks[0]?.highlightedContent}}</div>',
created() {
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
deleted file mode 100644
index 745886161ce..00000000000
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_new_spec.js
+++ /dev/null
@@ -1,191 +0,0 @@
-import Vue, { nextTick } from 'vue';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { setHTMLFixture } from 'helpers/fixtures';
-import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer_new.vue';
-import Chunk from '~/vue_shared/components/source_viewer/components/chunk_new.vue';
-import {
- EVENT_ACTION,
- EVENT_LABEL_VIEWER,
- CODEOWNERS_FILE_NAME,
-} from '~/vue_shared/components/source_viewer/constants';
-import Tracking from '~/tracking';
-import LineHighlighter from '~/blob/line_highlighter';
-import addBlobLinksTracking from '~/blob/blob_links_tracking';
-import waitForPromises from 'helpers/wait_for_promises';
-import blameDataQuery from '~/vue_shared/components/source_viewer/queries/blame_data.query.graphql';
-import Blame from '~/vue_shared/components/source_viewer/components/blame_info.vue';
-import * as utils from '~/vue_shared/components/source_viewer/utils';
-import CodeownersValidation from 'ee_component/blob/components/codeowners_validation.vue';
-
-import {
- BLOB_DATA_MOCK,
- CHUNK_1,
- CHUNK_2,
- LANGUAGE_MOCK,
- BLAME_DATA_QUERY_RESPONSE_MOCK,
- SOURCE_CODE_CONTENT_MOCK,
-} from './mock_data';
-
-Vue.use(VueApollo);
-
-const lineHighlighter = new LineHighlighter();
-jest.mock('~/blob/line_highlighter', () =>
- jest.fn().mockReturnValue({
- highlightHash: jest.fn(),
- }),
-);
-jest.mock('~/blob/blob_links_tracking');
-
-describe('Source Viewer component', () => {
- let wrapper;
- let fakeApollo;
- const CHUNKS_MOCK = [CHUNK_1, CHUNK_2];
- const projectPath = 'test';
- const currentRef = 'main';
- const hash = '#L142';
-
- const blameDataQueryHandlerSuccess = jest.fn().mockResolvedValue(BLAME_DATA_QUERY_RESPONSE_MOCK);
- const blameInfo =
- BLAME_DATA_QUERY_RESPONSE_MOCK.data.project.repository.blobs.nodes[0].blame.groups;
-
- const createComponent = ({ showBlame = true, blob = {} } = {}) => {
- fakeApollo = createMockApollo([[blameDataQuery, blameDataQueryHandlerSuccess]]);
-
- wrapper = shallowMountExtended(SourceViewer, {
- apolloProvider: fakeApollo,
- mocks: { $route: { hash } },
- propsData: {
- blob: { ...blob, ...BLOB_DATA_MOCK },
- chunks: CHUNKS_MOCK,
- projectPath,
- currentRef,
- showBlame,
- },
- });
- };
-
- const findChunks = () => wrapper.findAllComponents(Chunk);
- const findBlameComponents = () => wrapper.findAllComponents(Blame);
- const triggerChunkAppear = async (chunkIndex = 0) => {
- findChunks().at(chunkIndex).vm.$emit('appear');
- await waitForPromises();
- };
-
- beforeEach(() => {
- jest.spyOn(Tracking, 'event');
- return createComponent();
- });
-
- it('instantiates the lineHighlighter class', () => {
- expect(LineHighlighter).toHaveBeenCalled();
- });
-
- describe('event tracking', () => {
- it('fires a tracking event when the component is created', () => {
- const eventData = { label: EVENT_LABEL_VIEWER, property: LANGUAGE_MOCK };
- expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
- });
-
- it('adds blob links tracking', () => {
- expect(addBlobLinksTracking).toHaveBeenCalled();
- });
- });
-
- describe('rendering', () => {
- it('does not render a Blame component if the respective chunk for the blame has not appeared', async () => {
- await waitForPromises();
- expect(findBlameComponents()).toHaveLength(0);
- });
-
- describe('DOM updates', () => {
- it('adds the necessary classes to the DOM', async () => {
- setHTMLFixture(SOURCE_CODE_CONTENT_MOCK);
- jest.spyOn(utils, 'toggleBlameClasses');
- createComponent();
- await triggerChunkAppear();
-
- expect(utils.toggleBlameClasses).toHaveBeenCalledWith(blameInfo, true);
- });
- });
-
- describe('Blame information', () => {
- it('renders a Blame component when a chunk appears', async () => {
- await triggerChunkAppear();
-
- expect(findBlameComponents().at(0).exists()).toBe(true);
- expect(findBlameComponents().at(0).props()).toMatchObject({ blameInfo });
- });
-
- it('calls the blame data query', async () => {
- await triggerChunkAppear();
-
- expect(blameDataQueryHandlerSuccess).toHaveBeenCalledWith(
- expect.objectContaining({
- filePath: BLOB_DATA_MOCK.path,
- fullPath: projectPath,
- ref: currentRef,
- }),
- );
- });
-
- it('calls the query only once per chunk', async () => {
- // We trigger the `appear` event multiple times here in order to simulate the user scrolling past the chunk more than once.
- // In this scenario we only want to query the backend once.
- await triggerChunkAppear();
- await triggerChunkAppear();
-
- expect(blameDataQueryHandlerSuccess).toHaveBeenCalledTimes(1);
- });
-
- it('requests blame information for overlapping chunk', async () => {
- await triggerChunkAppear(1);
-
- expect(blameDataQueryHandlerSuccess).toHaveBeenCalledTimes(2);
- expect(blameDataQueryHandlerSuccess).toHaveBeenCalledWith(
- expect.objectContaining({ fromLine: 71, toLine: 110 }),
- );
- expect(blameDataQueryHandlerSuccess).toHaveBeenCalledWith(
- expect.objectContaining({ fromLine: 1, toLine: 70 }),
- );
-
- expect(findChunks().at(0).props('isHighlighted')).toBe(true);
- });
-
- it('does not render a Blame component when `showBlame: false`', async () => {
- createComponent({ showBlame: false });
- await triggerChunkAppear();
-
- expect(findBlameComponents()).toHaveLength(0);
- });
- });
-
- it('renders a Chunk component for each chunk', () => {
- expect(findChunks().at(0).props()).toMatchObject(CHUNK_1);
- expect(findChunks().at(1).props()).toMatchObject(CHUNK_2);
- });
- });
-
- describe('hash highlighting', () => {
- it('calls highlightHash with expected parameter', () => {
- expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
- });
- });
-
- describe('Codeowners validation', () => {
- const findCodeownersValidation = () => wrapper.findComponent(CodeownersValidation);
-
- it('does not render codeowners validation when file is not CODEOWNERS', async () => {
- await createComponent();
- await nextTick();
- expect(findCodeownersValidation().exists()).toBe(false);
- });
-
- it('renders codeowners validation when file is CODEOWNERS', async () => {
- await createComponent({ blob: { name: CODEOWNERS_FILE_NAME } });
- await nextTick();
- expect(findCodeownersValidation().exists()).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index 2043f36443d..1fa15b28cf1 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -1,279 +1,175 @@
-import hljs from 'highlight.js/lib/core';
-import axios from 'axios';
-import MockAdapter from 'axios-mock-adapter';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { setHTMLFixture } from 'helpers/fixtures';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
-import CodeownersValidation from 'ee_component/blob/components/codeowners_validation.vue';
-import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
-import Chunk from '~/vue_shared/components/source_viewer/components/chunk.vue';
+import Chunk from '~/vue_shared/components/source_viewer/components/chunk_new.vue';
import {
EVENT_ACTION,
EVENT_LABEL_VIEWER,
- EVENT_LABEL_FALLBACK,
- ROUGE_TO_HLJS_LANGUAGE_MAP,
- LINES_PER_CHUNK,
- LEGACY_FALLBACKS,
CODEOWNERS_FILE_NAME,
- CODEOWNERS_LANGUAGE,
- SVELTE_LANGUAGE,
} from '~/vue_shared/components/source_viewer/constants';
-import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import waitForPromises from 'helpers/wait_for_promises';
-import LineHighlighter from '~/blob/line_highlighter';
-import eventHub from '~/notes/event_hub';
import Tracking from '~/tracking';
+import LineHighlighter from '~/blob/line_highlighter';
+import addBlobLinksTracking from '~/blob/blob_links_tracking';
+import waitForPromises from 'helpers/wait_for_promises';
+import blameDataQuery from '~/vue_shared/components/source_viewer/queries/blame_data.query.graphql';
+import Blame from '~/vue_shared/components/source_viewer/components/blame_info.vue';
+import * as utils from '~/vue_shared/components/source_viewer/utils';
+import CodeownersValidation from 'ee_component/blob/components/codeowners_validation.vue';
-const lineHighlighter = new LineHighlighter();
-jest.mock('~/blob/line_highlighter', () => jest.fn().mockReturnValue({ highlightHash: jest.fn() }));
-jest.mock('highlight.js/lib/core');
-jest.mock('~/vue_shared/components/source_viewer/plugins/index');
-const mockAxios = new MockAdapter(axios);
+import {
+ BLOB_DATA_MOCK,
+ CHUNK_1,
+ CHUNK_2,
+ LANGUAGE_MOCK,
+ BLAME_DATA_QUERY_RESPONSE_MOCK,
+ SOURCE_CODE_CONTENT_MOCK,
+} from './mock_data';
-const generateContent = (content, totalLines = 1, delimiter = '\n') => {
- let generatedContent = '';
- for (let i = 0; i < totalLines; i += 1) {
- generatedContent += `Line: ${i + 1} = ${content}${delimiter}`;
- }
- return generatedContent;
-};
+Vue.use(VueApollo);
-const execImmediately = (callback) => callback();
+const lineHighlighter = new LineHighlighter();
+jest.mock('~/blob/line_highlighter', () =>
+ jest.fn().mockReturnValue({
+ highlightHash: jest.fn(),
+ }),
+);
+jest.mock('~/blob/blob_links_tracking');
describe('Source Viewer component', () => {
let wrapper;
- const language = 'docker';
- const selectedRangeHash = '#L1-2';
- const mappedLanguage = ROUGE_TO_HLJS_LANGUAGE_MAP[language];
- const chunk1 = generateContent('// Some source code 1', 70);
- const chunk2 = generateContent('// Some source code 2', 70);
- const chunk3 = generateContent('// Some source code 3', 70, '\r\n');
- const chunk3Result = generateContent('// Some source code 3', 70, '\n');
- const content = chunk1 + chunk2 + chunk3;
- const path = 'some/path.js';
- const blamePath = 'some/blame/path.js';
- const fileType = 'javascript';
- const DEFAULT_BLOB_DATA = { language, rawTextBlob: content, path, blamePath, fileType };
- const highlightedContent = `<span data-testid='test-highlighted' id='LC1'>${content}</span><span id='LC2'></span>`;
+ let fakeApollo;
+ const CHUNKS_MOCK = [CHUNK_1, CHUNK_2];
+ const projectPath = 'test';
const currentRef = 'main';
- const projectPath = 'test/project';
+ const hash = '#L142';
+
+ const blameDataQueryHandlerSuccess = jest.fn().mockResolvedValue(BLAME_DATA_QUERY_RESPONSE_MOCK);
+ const blameInfo =
+ BLAME_DATA_QUERY_RESPONSE_MOCK.data.project.repository.blobs.nodes[0].blame.groups;
+
+ const createComponent = ({ showBlame = true, blob = {} } = {}) => {
+ fakeApollo = createMockApollo([[blameDataQuery, blameDataQueryHandlerSuccess]]);
- const createComponent = async (blob = {}) => {
wrapper = shallowMountExtended(SourceViewer, {
- propsData: { blob: { ...DEFAULT_BLOB_DATA, ...blob }, currentRef, projectPath },
- mocks: { $route: { hash: selectedRangeHash } },
+ apolloProvider: fakeApollo,
+ mocks: { $route: { hash } },
+ propsData: {
+ blob: { ...blob, ...BLOB_DATA_MOCK },
+ chunks: CHUNKS_MOCK,
+ projectPath,
+ currentRef,
+ showBlame,
+ },
});
- await waitForPromises();
};
const findChunks = () => wrapper.findAllComponents(Chunk);
+ const findBlameComponents = () => wrapper.findAllComponents(Blame);
+ const triggerChunkAppear = async (chunkIndex = 0) => {
+ findChunks().at(chunkIndex).vm.$emit('appear');
+ await waitForPromises();
+ };
beforeEach(() => {
- hljs.highlight.mockImplementation(() => ({ value: highlightedContent }));
- hljs.highlightAuto.mockImplementation(() => ({ value: highlightedContent }));
- jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
- jest.spyOn(eventHub, '$emit');
jest.spyOn(Tracking, 'event');
-
return createComponent();
});
- describe('Displaying LFS blob', () => {
- const rawPath = '/org/project/-/raw/file.xml';
- const externalStorageUrl = 'http://127.0.0.1:9000/lfs-objects/91/12/1341234';
- const rawTextBlob = 'This is the external content';
- const blob = {
- storedExternally: true,
- externalStorage: 'lfs',
- simpleViewer: { fileType: 'text' },
- rawPath,
- };
-
- afterEach(() => {
- mockAxios.reset();
- });
-
- it('Uses externalStorageUrl to fetch content if present', async () => {
- mockAxios.onGet(externalStorageUrl).replyOnce(HTTP_STATUS_OK, rawTextBlob);
-
- await createComponent({ ...blob, externalStorageUrl });
-
- expect(mockAxios.history.get).toHaveLength(1);
- expect(mockAxios.history.get[0].url).toBe(externalStorageUrl);
- expect(wrapper.vm.$data.content).toBe(rawTextBlob);
- });
-
- it('Falls back to rawPath to fetch content', async () => {
- mockAxios.onGet(rawPath).replyOnce(HTTP_STATUS_OK, rawTextBlob);
-
- await createComponent(blob);
-
- expect(mockAxios.history.get).toHaveLength(1);
- expect(mockAxios.history.get[0].url).toBe(rawPath);
- expect(wrapper.vm.$data.content).toBe(rawTextBlob);
- });
+ it('instantiates the lineHighlighter class', () => {
+ expect(LineHighlighter).toHaveBeenCalled();
});
describe('event tracking', () => {
it('fires a tracking event when the component is created', () => {
- const eventData = { label: EVENT_LABEL_VIEWER, property: language };
+ const eventData = { label: EVENT_LABEL_VIEWER, property: LANGUAGE_MOCK };
expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
});
- it('does not emit an error event when the language is supported', () => {
- expect(wrapper.emitted('error')).toBeUndefined();
+ it('adds blob links tracking', () => {
+ expect(addBlobLinksTracking).toHaveBeenCalled();
});
-
- it('fires a tracking event and emits an error when the language is not supported', () => {
- const unsupportedLanguage = 'apex';
- const eventData = { label: EVENT_LABEL_FALLBACK, property: unsupportedLanguage };
- createComponent({ language: unsupportedLanguage });
-
- expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
- expect(wrapper.emitted('error')).toHaveLength(1);
- });
- });
-
- describe('legacy fallbacks', () => {
- it.each(LEGACY_FALLBACKS)(
- 'tracks a fallback event and emits an error when viewing %s files',
- (fallbackLanguage) => {
- const eventData = { label: EVENT_LABEL_FALLBACK, property: fallbackLanguage };
- createComponent({ language: fallbackLanguage });
-
- expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
- expect(wrapper.emitted('error')).toHaveLength(1);
- },
- );
});
- describe('highlight.js', () => {
- beforeEach(() => createComponent({ language: mappedLanguage }));
-
- it('registers our plugins for Highlight.js', () => {
- expect(registerPlugins).toHaveBeenCalledWith(hljs, fileType, content);
- });
-
- it('registers the language definition', async () => {
- const languageDefinition = await import(`highlight.js/lib/languages/${mappedLanguage}`);
-
- expect(hljs.registerLanguage).toHaveBeenCalledWith(
- mappedLanguage,
- languageDefinition.default,
- );
+ describe('rendering', () => {
+ it('does not render a Blame component if the respective chunk for the blame has not appeared', async () => {
+ await waitForPromises();
+ expect(findBlameComponents()).toHaveLength(0);
});
- describe('sub-languages', () => {
- const languageDefinition = {
- subLanguage: 'xml',
- contains: [{ subLanguage: 'javascript' }, { subLanguage: 'typescript' }],
- };
-
- beforeEach(async () => {
- jest.spyOn(hljs, 'getLanguage').mockReturnValue(languageDefinition);
+ describe('DOM updates', () => {
+ it('adds the necessary classes to the DOM', async () => {
+ setHTMLFixture(SOURCE_CODE_CONTENT_MOCK);
+ jest.spyOn(utils, 'toggleBlameClasses');
createComponent();
- await waitForPromises();
- });
+ await triggerChunkAppear();
- it('registers the primary sub-language', () => {
- expect(hljs.registerLanguage).toHaveBeenCalledWith(
- languageDefinition.subLanguage,
- expect.any(Function),
- );
+ expect(utils.toggleBlameClasses).toHaveBeenCalledWith(blameInfo, true);
});
-
- it.each(languageDefinition.contains)(
- 'registers the rest of the sub-languages',
- ({ subLanguage }) => {
- expect(hljs.registerLanguage).toHaveBeenCalledWith(subLanguage, expect.any(Function));
- },
- );
});
- it('registers json language definition if fileType is package_json', async () => {
- await createComponent({ language: 'json', fileType: 'package_json' });
- const languageDefinition = await import(`highlight.js/lib/languages/json`);
+ describe('Blame information', () => {
+ it('renders a Blame component when a chunk appears', async () => {
+ await triggerChunkAppear();
- expect(hljs.registerLanguage).toHaveBeenCalledWith('json', languageDefinition.default);
- });
-
- it('correctly maps languages starting with uppercase', async () => {
- await createComponent({ language: 'Ruby' });
- const languageDefinition = await import(`highlight.js/lib/languages/ruby`);
-
- expect(hljs.registerLanguage).toHaveBeenCalledWith('ruby', languageDefinition.default);
- });
+ expect(findBlameComponents().at(0).exists()).toBe(true);
+ expect(findBlameComponents().at(0).props()).toMatchObject({ blameInfo });
+ });
- it('registers codeowners language definition if file name is CODEOWNERS', async () => {
- await createComponent({ name: CODEOWNERS_FILE_NAME });
- const languageDefinition = await import(
- '~/vue_shared/components/source_viewer/languages/codeowners'
- );
+ it('calls the blame data query', async () => {
+ await triggerChunkAppear();
- expect(hljs.registerLanguage).toHaveBeenCalledWith(
- CODEOWNERS_LANGUAGE,
- languageDefinition.default,
- );
- });
+ expect(blameDataQueryHandlerSuccess).toHaveBeenCalledWith(
+ expect.objectContaining({
+ filePath: BLOB_DATA_MOCK.path,
+ fullPath: projectPath,
+ ref: currentRef,
+ }),
+ );
+ });
- it('registers svelte language definition if file name ends with .svelte', async () => {
- await createComponent({ name: `component.${SVELTE_LANGUAGE}` });
- const languageDefinition = await import(
- '~/vue_shared/components/source_viewer/languages/svelte'
- );
+ it('calls the query only once per chunk', async () => {
+ // We trigger the `appear` event multiple times here in order to simulate the user scrolling past the chunk more than once.
+ // In this scenario we only want to query the backend once.
+ await triggerChunkAppear();
+ await triggerChunkAppear();
- expect(hljs.registerLanguage).toHaveBeenCalledWith(
- SVELTE_LANGUAGE,
- languageDefinition.default,
- );
- });
+ expect(blameDataQueryHandlerSuccess).toHaveBeenCalledTimes(1);
+ });
- it('highlights the first chunk', () => {
- expect(hljs.highlight).toHaveBeenCalledWith(chunk1.trim(), { language: mappedLanguage });
- expect(findChunks().at(0).props('isFirstChunk')).toBe(true);
- });
+ it('requests blame information for overlapping chunk', async () => {
+ await triggerChunkAppear(1);
- describe('auto-detects if a language cannot be loaded', () => {
- beforeEach(() => createComponent({ language: 'some_unknown_language' }));
+ expect(blameDataQueryHandlerSuccess).toHaveBeenCalledTimes(2);
+ expect(blameDataQueryHandlerSuccess).toHaveBeenCalledWith(
+ expect.objectContaining({ fromLine: 71, toLine: 110 }),
+ );
+ expect(blameDataQueryHandlerSuccess).toHaveBeenCalledWith(
+ expect.objectContaining({ fromLine: 1, toLine: 70 }),
+ );
- it('highlights the content with auto-detection', () => {
- expect(hljs.highlightAuto).toHaveBeenCalledWith(chunk1.trim());
+ expect(findChunks().at(0).props('isHighlighted')).toBe(true);
});
- });
- });
- describe('rendering', () => {
- it.each`
- chunkIndex | chunkContent | totalChunks
- ${0} | ${chunk1} | ${0}
- ${1} | ${chunk2} | ${3}
- ${2} | ${chunk3Result} | ${3}
- `('renders chunk $chunkIndex', ({ chunkIndex, chunkContent, totalChunks }) => {
- const chunk = findChunks().at(chunkIndex);
+ it('does not render a Blame component when `showBlame: false`', async () => {
+ createComponent({ showBlame: false });
+ await triggerChunkAppear();
- expect(chunk.props('content')).toContain(chunkContent.trim());
-
- expect(chunk.props()).toMatchObject({
- totalLines: LINES_PER_CHUNK,
- startingFrom: LINES_PER_CHUNK * chunkIndex,
- totalChunks,
+ expect(findBlameComponents()).toHaveLength(0);
});
});
- it('emits showBlobInteractionZones on the eventHub when chunk appears', () => {
- findChunks().at(0).vm.$emit('appear');
- expect(eventHub.$emit).toHaveBeenCalledWith('showBlobInteractionZones', path);
+ it('renders a Chunk component for each chunk', () => {
+ expect(findChunks().at(0).props()).toMatchObject(CHUNK_1);
+ expect(findChunks().at(1).props()).toMatchObject(CHUNK_2);
});
});
- describe('LineHighlighter', () => {
- it('instantiates the lineHighlighter class', () => {
- expect(LineHighlighter).toHaveBeenCalledWith({ scrollBehavior: 'auto' });
- });
-
- it('highlights the range when chunk appears', () => {
- findChunks().at(0).vm.$emit('appear');
- const scrollEnabled = false;
- expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(selectedRangeHash, scrollEnabled);
+ describe('hash highlighting', () => {
+ it('calls highlightHash with expected parameter', () => {
+ expect(lineHighlighter.highlightHash).toHaveBeenCalledWith(hash);
});
});
@@ -282,11 +178,13 @@ describe('Source Viewer component', () => {
it('does not render codeowners validation when file is not CODEOWNERS', async () => {
await createComponent();
+ await nextTick();
expect(findCodeownersValidation().exists()).toBe(false);
});
it('renders codeowners validation when file is CODEOWNERS', async () => {
- await createComponent({ name: CODEOWNERS_FILE_NAME });
+ await createComponent({ blob: { name: CODEOWNERS_FILE_NAME } });
+ await nextTick();
expect(findCodeownersValidation().exists()).toBe(true);
});
});
diff --git a/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
deleted file mode 100644
index 73661a3da1f..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
+++ /dev/null
@@ -1,122 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# rubocop:disable Layout/HashAlignment
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectImportLevel do
- let(:migration) do
- described_class.new(
- start_id: table(:namespaces).minimum(:id),
- end_id: table(:namespaces).maximum(:id),
- batch_table: :namespaces,
- batch_column: :id,
- sub_batch_size: 2,
- pause_ms: 0,
- connection: ApplicationRecord.connection
- )
- end
- # rubocop:enable Layout/HashAlignment
-
- let(:namespaces_table) { table(:namespaces) }
- let(:namespace_settings_table) { table(:namespace_settings) }
-
- let!(:user_namespace) do
- namespaces_table.create!(
- name: 'user_namespace',
- path: 'user_namespace',
- type: 'User',
- project_creation_level: 100
- )
- end
-
- let!(:group_namespace_nil) do
- namespaces_table.create!(
- name: 'group_namespace_nil',
- path: 'group_namespace_nil',
- type: 'Group',
- project_creation_level: nil
- )
- end
-
- let!(:group_namespace_0) do
- namespaces_table.create!(
- name: 'group_namespace_0',
- path: 'group_namespace_0',
- type: 'Group',
- project_creation_level: 0
- )
- end
-
- let!(:group_namespace_1) do
- namespaces_table.create!(
- name: 'group_namespace_1',
- path: 'group_namespace_1',
- type: 'Group',
- project_creation_level: 1
- )
- end
-
- let!(:group_namespace_2) do
- namespaces_table.create!(
- name: 'group_namespace_2',
- path: 'group_namespace_2',
- type: 'Group',
- project_creation_level: 2
- )
- end
-
- let!(:group_namespace_9999) do
- namespaces_table.create!(
- name: 'group_namespace_9999',
- path: 'group_namespace_9999',
- type: 'Group',
- project_creation_level: 9999
- )
- end
-
- subject(:perform_migration) { migration.perform }
-
- before do
- namespace_settings_table.create!(namespace_id: user_namespace.id)
- namespace_settings_table.create!(namespace_id: group_namespace_nil.id)
- namespace_settings_table.create!(namespace_id: group_namespace_0.id)
- namespace_settings_table.create!(namespace_id: group_namespace_1.id)
- namespace_settings_table.create!(namespace_id: group_namespace_2.id)
- namespace_settings_table.create!(namespace_id: group_namespace_9999.id)
- end
-
- describe 'Groups' do
- using RSpec::Parameterized::TableSyntax
-
- where(:namespace_id, :prev_level, :new_level) do
- lazy { group_namespace_0.id } | ::Gitlab::Access::OWNER | ::Gitlab::Access::NO_ACCESS
- lazy { group_namespace_1.id } | ::Gitlab::Access::OWNER | ::Gitlab::Access::MAINTAINER
- lazy { group_namespace_2.id } | ::Gitlab::Access::OWNER | ::Gitlab::Access::DEVELOPER
- end
-
- with_them do
- it 'backfills the correct project_import_level of Group namespaces' do
- expect { perform_migration }
- .to change { namespace_settings_table.find_by(namespace_id: namespace_id).project_import_level }
- .from(prev_level).to(new_level)
- end
- end
-
- it 'does not update `User` namespaces or values outside range' do
- expect { perform_migration }
- .not_to change { namespace_settings_table.find_by(namespace_id: user_namespace.id).project_import_level }
-
- expect { perform_migration }
- .not_to change { namespace_settings_table.find_by(namespace_id: group_namespace_9999.id).project_import_level }
- end
-
- it 'maintains default import_level if creation_level is nil' do
- project_import_level = namespace_settings_table.find_by(namespace_id: group_namespace_nil.id).project_import_level
-
- expect { perform_migration }
- .not_to change { project_import_level }
-
- expect(project_import_level).to eq(::Gitlab::Access::OWNER)
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/config/external/file/base_spec.rb b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
index bcfab620bd9..6100974bbe8 100644
--- a/spec/lib/gitlab/ci/config/external/file/base_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
@@ -145,7 +145,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipe
it 'surfaces interpolation errors' do
expect(valid?).to be_falsy
expect(file.errors)
- .to include('`some-location.yml`: interpolation interrupted by errors, unknown interpolation key: `abcd`')
+ .to include('`some-location.yml`: unknown interpolation key: `abcd`')
end
end
diff --git a/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb b/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb
index 8655d3fb0b7..2e59b6982dd 100644
--- a/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb
+++ b/spec/lib/gitlab/ci/config/interpolation/text_interpolator_spec.rb
@@ -34,17 +34,6 @@ RSpec.describe Gitlab::Ci::Config::Interpolation::TextInterpolator, feature_cate
end
end
- context 'when the header has an error while being parsed' do
- let(:header) { ::Gitlab::Config::Loader::Yaml.new('_!@malformedyaml:&') }
-
- it 'surfaces the error' do
- interpolator.interpolate!
-
- expect(interpolator).not_to be_valid
- expect(interpolator.error_message).to eq('Invalid configuration format')
- end
- end
-
context 'when spec header is missing but inputs are specified' do
let(:documents) { ::Gitlab::Ci::Config::Yaml::Documents.new([content]) }
diff --git a/spec/lib/gitlab/ci/config/yaml/documents_spec.rb b/spec/lib/gitlab/ci/config/yaml/documents_spec.rb
index babdea6623b..424fa4858a4 100644
--- a/spec/lib/gitlab/ci/config/yaml/documents_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/documents_spec.rb
@@ -5,24 +5,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Yaml::Documents, feature_category: :pipeline_composition do
let(:documents) { described_class.new(yaml_documents) }
- describe '#valid?' do
- context 'when there are no errors' do
- let(:yaml_documents) { [::Gitlab::Config::Loader::Yaml.new('job:')] }
-
- it 'returns true' do
- expect(documents).to be_valid
- end
- end
-
- context 'when there are errors' do
- let(:yaml_documents) { [::Gitlab::Config::Loader::Yaml.new('_!@malformedyaml:&')] }
-
- it 'returns false' do
- expect(documents).not_to be_valid
- end
- end
- end
-
describe '#header' do
context 'when there are at least 2 documents and the first document has a `spec` keyword' do
let(:yaml_documents) { [::Gitlab::Config::Loader::Yaml.new('spec:'), ::Gitlab::Config::Loader::Yaml.new('job:')] }
diff --git a/spec/lib/gitlab/ci/config/yaml/loader_spec.rb b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
index 684da1df43b..045cdf37037 100644
--- a/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
+++ b/spec/lib/gitlab/ci/config/yaml/loader_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe ::Gitlab::Ci::Config::Yaml::Loader, feature_category: :pipeline_c
let_it_be(:project) { create(:project) }
let(:inputs) { { test_input: 'hello test' } }
+ let(:variables) { [] }
let(:yaml) do
<<~YAML
@@ -21,7 +22,7 @@ RSpec.describe ::Gitlab::Ci::Config::Yaml::Loader, feature_category: :pipeline_c
YAML
end
- subject(:result) { described_class.new(yaml, inputs: inputs).load }
+ subject(:result) { described_class.new(yaml, inputs: inputs, variables: variables).load }
it 'loads and interpolates CI config YAML' do
expected_config = { test_job: { script: ['echo "hello test"'] } }
@@ -57,6 +58,240 @@ RSpec.describe ::Gitlab::Ci::Config::Yaml::Loader, feature_category: :pipeline_c
expect(result.error).to eq('`test_input` input: required value has not been provided')
end
end
+
+ context 'when interpolating into a YAML key' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ inputs:
+ test_input:
+ ---
+ "$[[ inputs.test_input ]]_job":
+ script:
+ - echo "test"
+ YAML
+ end
+
+ it 'loads and interpolates CI config YAML' do
+ expected_config = { 'hello test_job': { script: ['echo "test"'] } }
+
+ expect(result).to be_valid
+ expect(result).to be_interpolated
+ expect(result.content).to eq(expected_config)
+ end
+ end
+
+ context 'when interpolating values of different types' do
+ let(:inputs) do
+ {
+ test_boolean: true,
+ test_number: 8,
+ test_string: 'test'
+ }
+ end
+
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ inputs:
+ test_string:
+ type: string
+ test_boolean:
+ type: boolean
+ test_number:
+ type: number
+ ---
+ "$[[ inputs.test_string ]]_job":
+ allow_failure: $[[ inputs.test_boolean ]]
+ parallel: $[[ inputs.test_number ]]
+ YAML
+ end
+
+ it 'loads and interpolates CI config YAML' do
+ expected_config = { test_job: { allow_failure: true, parallel: 8 } }
+
+ expect(result).to be_valid
+ expect(result).to be_interpolated
+ expect(result.content).to eq(expected_config)
+ end
+ end
+
+ context 'when interpolating and expanding variables' do
+ let(:inputs) { { test_input: '$TEST_VAR' } }
+
+ let(:variables) do
+ Gitlab::Ci::Variables::Collection.new([
+ { key: 'TEST_VAR', value: 'test variable', masked: false }
+ ])
+ end
+
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ inputs:
+ test_input:
+ ---
+ "test_job":
+ script:
+ - echo "$[[ inputs.test_input | expand_vars ]]"
+ YAML
+ end
+
+ it 'loads and interpolates CI config YAML' do
+ expected_config = { test_job: { script: ['echo "test variable"'] } }
+
+ expect(result).to be_valid
+ expect(result).to be_interpolated
+ expect(result.content).to eq(expected_config)
+ end
+ end
+
+ context 'when using !reference' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ inputs:
+ test_input:
+ job_name:
+ default: .example_ref
+ ---
+ .example_ref:
+ script:
+ - echo "$[[ inputs.test_input ]]"
+ rules:
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+
+ build_job:
+ script: echo "build"
+ rules:
+ - !reference ["$[[ inputs.job_name ]]", "rules"]
+
+ test_job:
+ script:
+ - !reference [.example_ref, script]
+ YAML
+ end
+
+ it 'loads and interpolates CI config YAML' do
+ expect(result).to be_valid
+ expect(result).to be_interpolated
+ expect(result.content).to include('.example_ref': {
+ rules: [{ if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' }],
+ script: ['echo "hello test"']
+ })
+ expect(result.content.dig(:build_job, :rules).first.data[:seq]).to eq(['.example_ref', 'rules'])
+ expect(result.content).to include(
+ test_job: { script: [an_instance_of(::Gitlab::Ci::Config::Yaml::Tags::Reference)] }
+ )
+ end
+ end
+
+ context 'when there are too many interpolation blocks' do
+ let(:inputs) { { first_input: 'first', second_input: 'second' } }
+
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ inputs:
+ first_input:
+ second_input:
+ ---
+ test_job:
+ script:
+ - echo "$[[ inputs.first_input ]]"
+ - echo "$[[ inputs.second_input ]]"
+ YAML
+ end
+
+ it 'returns an error result' do
+ stub_const('::Gitlab::Ci::Config::Interpolation::TextTemplate::MAX_BLOCKS', 1)
+
+ expect(result).not_to be_valid
+ expect(result.error).to eq('too many interpolation blocks')
+ end
+ end
+
+ context 'when a block is invalid' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ inputs:
+ test_input:
+ ---
+ test_job:
+ script:
+ - echo "$[[ inputs.test_input | expand_vars | truncate(0,1) ]]"
+ YAML
+ end
+
+ it 'returns an error result' do
+ stub_const('::Gitlab::Ci::Config::Interpolation::Block::MAX_FUNCTIONS', 1)
+
+ expect(result).not_to be_valid
+ expect(result.error).to eq('too many functions in interpolation block')
+ end
+ end
+
+ context 'when the YAML file is too large' do
+ it 'returns an error result' do
+ stub_application_setting(ci_max_total_yaml_size_bytes: 1)
+
+ expect(result).not_to be_valid
+ expect(result.error).to eq('config too large')
+ end
+ end
+
+ context 'when given an empty YAML file' do
+ let(:inputs) { {} }
+ let(:yaml) { '' }
+
+ it 'returns an error result' do
+ expect(result).not_to be_valid
+ expect(result.error).to eq('Invalid configuration format')
+ end
+ end
+
+ context 'when ci_text_interpolation is disabled' do
+ before do
+ stub_feature_flags(ci_text_interpolation: false)
+ end
+
+ it 'loads and interpolates CI config YAML' do
+ expected_config = { test_job: { script: ['echo "hello test"'] } }
+
+ expect(result).to be_valid
+ expect(result).to be_interpolated
+ expect(result.content).to eq(expected_config)
+ end
+
+ context 'when hash interpolation fails' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ spec:
+ inputs:
+ test_input:
+ ---
+ test_job:
+ script:
+ - echo "$[[ inputs.test_input | expand_vars | truncate(0,1) ]]"
+ YAML
+ end
+
+ it 'returns an error result' do
+ stub_const('::Gitlab::Ci::Config::Interpolation::Block::MAX_FUNCTIONS', 1)
+
+ expect(result).not_to be_valid
+ expect(result.error).to eq('interpolation interrupted by errors, too many functions in interpolation block')
+ end
+ end
+ end
end
describe '#load_uninterpolated_yaml' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index 4017076d29f..967cd1693a9 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -56,6 +56,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External, feature_category
let(:validation_service_url) { 'https://validation-service.external/' }
before do
+ stub_feature_flags(external_pipeline_validation_migration: false)
stub_env('EXTERNAL_VALIDATION_SERVICE_URL', validation_service_url)
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('correlation-id')
end
@@ -84,6 +85,42 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External, feature_category
end
end
+ context 'when :external_pipeline_validation_migration feature flag is enabled' do
+ let(:migrated_validation_service_url) { 'https://runway.validation-service.external/' }
+
+ before do
+ stub_feature_flags(external_pipeline_validation_migration: project)
+ end
+
+ context 'when EXTERNAL_VALIDATION_SERVICE_RUNWAY_URL is NOT present' do
+ before do
+ stub_env('EXTERNAL_VALIDATION_SERVICE_RUNWAY_URL', nil)
+ end
+
+ it 'fallbacks to existing validation service URL' do
+ expect(::Gitlab::HTTP).to receive(:post) do |url, _params|
+ expect(url).to eq(validation_service_url)
+ end
+
+ perform!
+ end
+ end
+
+ context 'when EXTERNAL_VALIDATION_SERVICE_RUNWAY_URL is present' do
+ before do
+ stub_env('EXTERNAL_VALIDATION_SERVICE_RUNWAY_URL', migrated_validation_service_url)
+ end
+
+ it 'uses migrated validation service URL' do
+ expect(::Gitlab::HTTP).to receive(:post) do |url, _params|
+ expect(url).to eq(migrated_validation_service_url)
+ end
+
+ perform!
+ end
+ end
+ end
+
it 'respects the defined payload schema' do
expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
expect(params[:body]).to match_schema('/external_validation')
diff --git a/spec/lib/gitlab/database/query_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzer_spec.rb
index 0b849063562..20599bb89b6 100644
--- a/spec/lib/gitlab/database/query_analyzer_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzer_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
+ using RSpec::Parameterized::TableSyntax
+
let(:analyzer) { double(:query_analyzer) }
let(:user_analyzer) { double(:user_query_analyzer) }
let(:disabled_analyzer) { double(:disabled_query_analyzer) }
@@ -181,12 +183,34 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error
end
- def process_sql(sql)
+ def process_sql(sql, event_name = 'load')
described_class.instance.within do
ApplicationRecord.load_balancer.read_write do |connection|
- described_class.instance.send(:process_sql, sql, connection)
+ described_class.instance.send(:process_sql, sql, connection, event_name)
end
end
end
end
+
+ describe '#normalize_event_name' do
+ where(:event, :parsed_event) do
+ 'Project Load' | 'load'
+ 'Namespaces::UserNamespace Create' | 'create'
+ 'Project Update' | 'update'
+ 'Project Destroy' | 'destroy'
+ 'Project Pluck' | 'pluck'
+ 'Project Insert' | 'insert'
+ 'Project Delete All' | 'delete_all'
+ 'Project Exists?' | 'exists?'
+ nil | ''
+ 'TRANSACTION' | 'transaction'
+ 'SCHEMA' | 'schema'
+ end
+
+ with_them do
+ it 'parses event name correctly' do
+ expect(described_class.instance.send(:normalize_event_name, event)).to eq(parsed_event)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/query_analyzers/ci/partitioning_id_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzers/ci/partitioning_id_analyzer_spec.rb
index 0fe19041b6d..0c1c694a3e3 100644
--- a/spec/lib/gitlab/database/query_analyzers/ci/partitioning_id_analyzer_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/ci/partitioning_id_analyzer_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::Ci::PartitioningIdAnalyzer, que
def process_sql(model, sql)
Gitlab::Database::QueryAnalyzer.instance.within do
# Skip load balancer and retrieve connection assigned to model
- Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection)
+ Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection, 'load')
end
end
end
diff --git a/spec/lib/gitlab/database/query_analyzers/ci/partitioning_routing_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzers/ci/partitioning_routing_analyzer_spec.rb
index 1f86c2ccbb0..8b053fa0291 100644
--- a/spec/lib/gitlab/database/query_analyzers/ci/partitioning_routing_analyzer_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/ci/partitioning_routing_analyzer_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::Ci::PartitioningRoutingAnalyzer
def process_sql(model, sql)
Gitlab::Database::QueryAnalyzer.instance.within do
# Skip load balancer and retrieve connection assigned to model
- Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection)
+ Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection, 'load')
end
end
end
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
index 1909e134e66..fb00fbe27ba 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
@@ -99,7 +99,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
def process_sql(model, sql)
Gitlab::Database::QueryAnalyzer.instance.within do
# Skip load balancer and retrieve connection assigned to model
- Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection)
+ Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection, 'load')
end
end
end
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb
index 0664508fa8d..6a36db1870a 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb
@@ -97,7 +97,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection
def process_sql(model, sql)
Gitlab::Database::QueryAnalyzer.instance.within([analyzer]) do
# Skip load balancer and retrieve connection assigned to model
- Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection)
+ Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection, 'load')
end
end
end
diff --git a/spec/lib/gitlab/database/query_analyzers/log_large_in_lists_spec.rb b/spec/lib/gitlab/database/query_analyzers/log_large_in_lists_spec.rb
new file mode 100644
index 00000000000..5646c3ff3b6
--- /dev/null
+++ b/spec/lib/gitlab/database/query_analyzers/log_large_in_lists_spec.rb
@@ -0,0 +1,148 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::QueryAnalyzers::LogLargeInLists, query_analyzers: false, feature_category: :database do
+ let(:analyzer) { described_class }
+ let(:fixture) { fixture_file("gitlab/database/query_analyzers/#{file}") }
+ let(:sql) { fixture.gsub('%IN_LIST%', arguments) }
+
+ # Reduce the in list size to 5 to help with testing
+ # Reduce the min query size to 50 to help with testing
+ before do
+ stub_const("#{described_class}::IN_SIZE_LIMIT", 5)
+ stub_const("#{described_class}::MIN_QUERY_SIZE", 50)
+ allow(analyzer).to receive(:backtrace).and_return([])
+ allow(analyzer).to receive(:suppressed?).and_return(true) # bypass suppressed? method to avoid false positives
+ end
+
+ after do
+ # Clears analyzers list after each test to reload the state of `enabled?` method
+ Thread.current[:query_analyzer_enabled_analyzers] = []
+ end
+
+ context 'when feature flag is enabled' do
+ before do
+ stub_feature_flags(log_large_in_list_queries: true)
+ Gitlab::Database::QueryAnalyzer.instance.begin!([analyzer])
+ end
+
+ context 'when conditions are satisfied for logging' do
+ where(:file, :arguments, :result, :event_name) do
+ [
+ [
+ 'small_query_with_in_list.txt',
+ '1, 2, 3, 4, 5, 6',
+ { message: 'large_in_list_found', matches: 1, in_list_size: "6", stacktrace: [], event_name: 'load' },
+ 'load'
+ ],
+ [
+ 'small_query_with_in_list.txt',
+ '1,2,3,4,5,6',
+ { message: 'large_in_list_found', matches: 1, in_list_size: "6", stacktrace: [], event_name: 'pluck' },
+ 'pluck'
+ ],
+ [
+ 'small_query_with_in_list.txt',
+ 'SELECT id FROM projects where id IN (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)',
+ { message: 'large_in_list_found', matches: 1, in_list_size: "10", stacktrace: [], event_name: 'load' },
+ 'load'
+ ],
+ [
+ 'large_query_with_in_list.txt',
+ '1,2,3,4,5,6',
+ { message: 'large_in_list_found', matches: 1, in_list_size: "6", stacktrace: [], event_name: 'load' },
+ 'load'
+ ],
+ [
+ 'large_query_with_in_list.txt',
+ '1, 2, 3, 4, 5, 6',
+ { message: 'large_in_list_found', matches: 1, in_list_size: "6", stacktrace: [], event_name: 'pluck' },
+ 'pluck'
+ ],
+ [
+ 'large_query_with_in_list.txt',
+ 'SELECT id FROM projects where id IN (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)',
+ { message: 'large_in_list_found', matches: 1, in_list_size: "10", stacktrace: [], event_name: 'load' },
+ 'load'
+ ]
+ ]
+ end
+
+ with_them do
+ it 'logs all the occurrences' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(result)
+
+ process_sql(sql, event_name)
+ end
+ end
+ end
+
+ context 'when conditions are not satisfied for logging' do
+ where(:file, :arguments, :event_name) do
+ [
+ ['small_query_with_in_list.txt', '1, 2, 3, 4, 5', 'load'],
+ ['small_query_with_in_list.txt', '$1, $2, $3, $4, $5', 'load'],
+ ['small_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN (1, 2, 3, 4, 5)', 'load'],
+ ['small_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN (SELECT id FROM namespaces)', 'load'],
+ ['small_query_with_in_list.txt', '1, 2, 3, 4, 5', 'schema'],
+ ['large_query_with_in_list.txt', '1, 2, 3, 4, 5', 'load'],
+ ['large_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN (1, 2, 3, 4, 5)', 'load'],
+ ['large_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN (SELECT id FROM namespaces)', 'load'],
+ ['large_query_with_in_list.txt', '1, 2, 3, 4, 5', 'schema'],
+ ['small_query_without_in_list.txt', '', 'load'],
+ ['small_query_without_in_list.txt', '', 'schema']
+ ]
+ end
+
+ with_them do
+ it 'skips logging the occurrences' do
+ expect(Gitlab::AppLogger).not_to receive(:warn)
+
+ process_sql(sql, event_name)
+ end
+ end
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(log_large_in_list_queries: false)
+ Gitlab::Database::QueryAnalyzer.instance.begin!([analyzer])
+ end
+
+ where(:file, :arguments, :event_name) do
+ [
+ ['small_query_with_in_list.txt', '1, 2, 3, 4, 5, 6', 'load'],
+ ['small_query_with_in_list.txt', '$1, $2, $3, $4, $5, $6', 'load'],
+ ['small_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN (1, 2, 3, 4, 5, 6)', 'load'],
+ ['small_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN (1, 2, 3, 4, 5, 6)', 'load'],
+ ['small_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN (SELECT id FROM namespaces)', 'load'],
+ ['small_query_with_in_list.txt', '1, 2, 3, 4, 5, 6', 'schema'],
+ ['large_query_with_in_list.txt', '1, 2, 3, 4, 5, 6', 'load'],
+ ['large_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN (1, 2, 3, 4, 5, 6, 7, 8)', 'load'],
+ ['large_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN ($1, $2, $3, $4, $5, $6, $7)', 'load'],
+ ['large_query_with_in_list.txt', 'SELECT id FROM projects WHERE id IN (SELECT id FROM namespaces)', 'load'],
+ ['large_query_with_in_list.txt', '1, 2, 3, 4, 5, 6', 'schema'],
+ ['small_query_without_in_list.txt', '', 'load'],
+ ['small_query_without_in_list.txt', '', 'schema']
+ ]
+ end
+
+ with_them do
+ it 'skips logging the occurrences' do
+ expect(Gitlab::AppLogger).not_to receive(:warn)
+
+ process_sql(sql, event_name)
+ end
+ end
+ end
+
+ private
+
+ def process_sql(sql, event_name)
+ Gitlab::Database::QueryAnalyzer.instance.within do
+ Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, ActiveRecord::Base.connection, event_name)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb
index 7fcdc59b691..00b16faab01 100644
--- a/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/prevent_set_operator_mismatch_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventSetOperatorMismatch, que
def process_sql(sql, model = ApplicationRecord)
Gitlab::Database::QueryAnalyzer.instance.within([analyzer]) do
# Skip load balancer and retrieve connection assigned to model
- Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection)
+ Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection, 'load')
end
end
diff --git a/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
index b90f60e0301..8054743c9a9 100644
--- a/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/restrict_allowed_schemas_spec.rb
@@ -180,7 +180,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas,
yield if block_given?
# Skip load balancer and retrieve connection assigned to model
- Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection)
+ Gitlab::Database::QueryAnalyzer.instance.send(:process_sql, sql, model.retrieve_connection, 'load')
end
end
end
diff --git a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
index 4184c674823..844c3b54587 100644
--- a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
+++ b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
@@ -167,9 +167,9 @@ RSpec.describe Gitlab::Metrics::Exporter::BaseExporter, feature_category: :cloud
describe '#start' do
it "doesn't start running server" do
- expect_any_instance_of(::WEBrick::HTTPServer).not_to receive(:start)
+ expect(::WEBrick::HTTPServer).not_to receive(:new)
- expect { exporter.start }.not_to change { exporter.thread? }
+ exporter.start
end
end
diff --git a/spec/models/packages/protection/rule_spec.rb b/spec/models/packages/protection/rule_spec.rb
index 03d0440f0d9..995f0035879 100644
--- a/spec/models/packages/protection/rule_spec.rb
+++ b/spec/models/packages/protection/rule_spec.rb
@@ -187,7 +187,7 @@ RSpec.describe Packages::Protection::Rule, type: :model, feature_category: :pack
end
end
- describe '.push_protected_from?' do
+ describe '.for_push_exists?' do
let_it_be(:project_with_ppr) { create(:project) }
let_it_be(:project_without_ppr) { create(:project) }
@@ -230,7 +230,7 @@ RSpec.describe Packages::Protection::Rule, type: :model, feature_category: :pack
subject do
project
.package_protection_rules
- .push_protected_from?(
+ .for_push_exists?(
access_level: access_level,
package_name: package_name,
package_type: package_type
@@ -270,8 +270,11 @@ RSpec.describe Packages::Protection::Rule, type: :model, feature_category: :pack
ref(:project_with_ppr) | Gitlab::Access::NO_ACCESS | '@my-scope/my-package-prod' | :npm | true
# Edge cases
- ref(:project_with_ppr) | 0 | '' | nil | true
- ref(:project_with_ppr) | nil | nil | nil | true
+ ref(:project_with_ppr) | nil | '@my-scope/my-package-stage-sha-1234' | :npm | false
+ ref(:project_with_ppr) | :developer | nil | :npm | false
+ ref(:project_with_ppr) | :developer | '' | :npm | false
+ ref(:project_with_ppr) | :developer | '@my-scope/my-package-stage-sha-1234' | nil | false
+ ref(:project_with_ppr) | nil | nil | nil | false
# For projects that have no package protection rules
ref(:project_without_ppr) | :developer | '@my-scope/my-package-prod' | :npm | false
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 7dea50ba270..8e4e90ae962 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -1838,7 +1838,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors)
- .to include 'interpolation interrupted by errors, unknown interpolation key: `suite`'
+ .to include 'unknown interpolation key: `suite`'
end
end
@@ -2001,7 +2001,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(pipeline).to be_persisted
expect(pipeline.yaml_errors)
- .to include 'interpolation interrupted by errors, unknown interpolation key: `suite`'
+ .to include 'unknown interpolation key: `suite`'
end
end
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index 872523e8d16..4be24d43363 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -5314,7 +5314,6 @@
- './spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_note_discussion_id_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb'
-- './spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_project_repositories_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb'