Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-12-02 03:17:40 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-12-02 03:17:40 +0300
commit6922dc498da47a6612bec722e1fb658b45b427b0 (patch)
treed67d30206c6ea48533427ecfa10801f84c7e89c2
parent3b2d5044ab17d58a55a6609f73a6834cb8ab9d9a (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop_todo/layout/line_length.yml1
-rw-r--r--.rubocop_todo/rspec/context_wording.yml1
-rw-r--r--.rubocop_todo/rspec/named_subject.yml1
-rw-r--r--app/assets/javascripts/ci/job_details/components/log/collapsible_section.vue71
-rw-r--r--app/assets/javascripts/ci/job_details/components/log/line_header.vue7
-rw-r--r--app/assets/javascripts/ci/job_details/components/log/log.vue57
-rw-r--r--app/assets/javascripts/ci/job_details/store/mutations.js23
-rw-r--r--app/assets/javascripts/ci/job_details/store/state.js1
-rw-r--r--app/assets/javascripts/ci/job_details/store/utils.js258
-rw-r--r--app/assets/javascripts/editor/schema/ci.json2
-rw-r--r--app/assets/javascripts/kubernetes_dashboard/graphql/helpers/resolver_helpers.js25
-rw-r--r--app/assets/javascripts/kubernetes_dashboard/graphql/resolvers/kubernetes.js3
-rw-r--r--app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js2
-rw-r--r--app/assets/stylesheets/_page_specific_files.scss1
-rw-r--r--app/assets/stylesheets/pages/notes.scss (renamed from app/assets/stylesheets/page_bundles/notes.scss)6
-rw-r--r--app/models/ci/build.rb2
-rw-r--r--app/models/users/callout.rb3
-rw-r--r--app/presenters/commit_status_presenter.rb2
-rw-r--r--app/views/groups/work_items/index.html.haml1
-rw-r--r--app/views/layouts/_head.html.haml4
-rw-r--r--app/views/projects/_stat_anchor_list.html.haml2
-rw-r--r--app/views/projects/commit/show.html.haml1
-rw-r--r--app/views/projects/commits/show.html.haml1
-rw-r--r--app/views/projects/issues/show.html.haml1
-rw-r--r--app/views/projects/merge_requests/creations/new.html.haml1
-rw-r--r--app/views/projects/merge_requests/diffs.html.haml1
-rw-r--r--app/views/projects/merge_requests/index.html.haml1
-rw-r--r--app/views/projects/merge_requests/show.html.haml1
-rw-r--r--app/views/projects/work_items/show.html.haml1
-rw-r--r--app/views/snippets/edit.html.haml1
-rw-r--r--app/views/snippets/new.html.haml1
-rw-r--r--app/views/snippets/show.html.haml1
-rw-r--r--config/application.rb1
-rw-r--r--config/metrics/counts_all/20210216175512_ci_internal_pipelines.yml3
-rw-r--r--config/metrics/counts_all/20210216180228_projects_jira_server_active.yml3
-rw-r--r--doc/api/container_registry.md9
-rw-r--r--doc/api/graphql/reference/index.md1
-rw-r--r--doc/architecture/blueprints/new_diffs/index.md169
-rw-r--r--doc/ci/yaml/index.md2
-rw-r--r--doc/subscriptions/gitlab_dedicated/index.md3
-rw-r--r--doc/topics/git/lfs/index.md158
-rw-r--r--doc/topics/git/lfs/troubleshooting.md162
-rw-r--r--lib/api/entities/container_registry.rb1
-rw-r--r--lib/feature.rb47
-rw-r--r--lib/gitlab/ci/variables/downstream/generator.rb2
-rw-r--r--locale/gitlab.pot15
-rw-r--r--spec/frontend/ci/job_details/components/log/collapsible_section_spec.js103
-rw-r--r--spec/frontend/ci/job_details/components/log/line_header_spec.js8
-rw-r--r--spec/frontend/ci/job_details/components/log/log_spec.js72
-rw-r--r--spec/frontend/ci/job_details/components/log/mock_data.js243
-rw-r--r--spec/frontend/ci/job_details/store/mutations_spec.js18
-rw-r--r--spec/frontend/ci/job_details/store/utils_spec.js683
-rw-r--r--spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js71
-rw-r--r--spec/lib/feature_spec.rb1536
-rw-r--r--spec/lib/gitlab/checks/tag_check_spec.rb103
-rw-r--r--spec/support/shared_contexts/single_change_access_checks_shared_context.rb2
56 files changed, 1828 insertions, 2071 deletions
diff --git a/.rubocop_todo/layout/line_length.yml b/.rubocop_todo/layout/line_length.yml
index 6196f2e087c..19712d9d551 100644
--- a/.rubocop_todo/layout/line_length.yml
+++ b/.rubocop_todo/layout/line_length.yml
@@ -3611,7 +3611,6 @@ Layout/LineLength:
- 'spec/lib/gitlab/checks/diff_check_spec.rb'
- 'spec/lib/gitlab/checks/push_check_spec.rb'
- 'spec/lib/gitlab/checks/snippet_check_spec.rb'
- - 'spec/lib/gitlab/checks/tag_check_spec.rb'
- 'spec/lib/gitlab/ci/ansi2html_spec.rb'
- 'spec/lib/gitlab/ci/ansi2json/style_spec.rb'
- 'spec/lib/gitlab/ci/ansi2json_spec.rb'
diff --git a/.rubocop_todo/rspec/context_wording.yml b/.rubocop_todo/rspec/context_wording.yml
index 82ccbe7dc46..ba1bdb3f194 100644
--- a/.rubocop_todo/rspec/context_wording.yml
+++ b/.rubocop_todo/rspec/context_wording.yml
@@ -1588,7 +1588,6 @@ RSpec/ContextWording:
- 'spec/lib/gitlab/checks/lfs_integrity_spec.rb'
- 'spec/lib/gitlab/checks/push_file_count_check_spec.rb'
- 'spec/lib/gitlab/checks/snippet_check_spec.rb'
- - 'spec/lib/gitlab/checks/tag_check_spec.rb'
- 'spec/lib/gitlab/ci/ansi2html_spec.rb'
- 'spec/lib/gitlab/ci/ansi2json/style_spec.rb'
- 'spec/lib/gitlab/ci/ansi2json_spec.rb'
diff --git a/.rubocop_todo/rspec/named_subject.yml b/.rubocop_todo/rspec/named_subject.yml
index 20e43b25c1e..93404a43df2 100644
--- a/.rubocop_todo/rspec/named_subject.yml
+++ b/.rubocop_todo/rspec/named_subject.yml
@@ -1881,7 +1881,6 @@ RSpec/NamedSubject:
- 'spec/lib/gitlab/checks/push_file_count_check_spec.rb'
- 'spec/lib/gitlab/checks/single_change_access_spec.rb'
- 'spec/lib/gitlab/checks/snippet_check_spec.rb'
- - 'spec/lib/gitlab/checks/tag_check_spec.rb'
- 'spec/lib/gitlab/ci/ansi2html_spec.rb'
- 'spec/lib/gitlab/ci/ansi2json/line_spec.rb'
- 'spec/lib/gitlab/ci/ansi2json/parser_spec.rb'
diff --git a/app/assets/javascripts/ci/job_details/components/log/collapsible_section.vue b/app/assets/javascripts/ci/job_details/components/log/collapsible_section.vue
deleted file mode 100644
index 39c612bc600..00000000000
--- a/app/assets/javascripts/ci/job_details/components/log/collapsible_section.vue
+++ /dev/null
@@ -1,71 +0,0 @@
-<script>
-import LogLine from './line.vue';
-import LogLineHeader from './line_header.vue';
-
-export default {
- name: 'CollapsibleLogSection',
- components: {
- LogLine,
- LogLineHeader,
- },
- props: {
- section: {
- type: Object,
- required: true,
- },
- jobLogEndpoint: {
- type: String,
- required: true,
- },
- searchResults: {
- type: Array,
- required: false,
- default: () => [],
- },
- },
- computed: {
- badgeDuration() {
- return this.section.line && this.section.line.section_duration;
- },
- highlightedLines() {
- return this.searchResults.map((result) => result.lineNumber);
- },
- headerIsHighlighted() {
- const {
- line: { lineNumber },
- } = this.section;
-
- return this.highlightedLines.includes(lineNumber);
- },
- },
- methods: {
- handleOnClickCollapsibleLine(section) {
- this.$emit('onClickCollapsibleLine', section);
- },
- lineIsHighlighted({ lineNumber }) {
- return this.highlightedLines.includes(lineNumber);
- },
- },
-};
-</script>
-<template>
- <div>
- <log-line-header
- :line="section.line"
- :duration="badgeDuration"
- :path="jobLogEndpoint"
- :is-closed="section.isClosed"
- :is-highlighted="headerIsHighlighted"
- @toggleLine="handleOnClickCollapsibleLine(section)"
- />
- <template v-if="!section.isClosed">
- <log-line
- v-for="line in section.lines"
- :key="line.offset"
- :line="line"
- :path="jobLogEndpoint"
- :is-highlighted="lineIsHighlighted(line)"
- />
- </template>
- </div>
-</template>
diff --git a/app/assets/javascripts/ci/job_details/components/log/line_header.vue b/app/assets/javascripts/ci/job_details/components/log/line_header.vue
index d36701323da..4e009afe8c9 100644
--- a/app/assets/javascripts/ci/job_details/components/log/line_header.vue
+++ b/app/assets/javascripts/ci/job_details/components/log/line_header.vue
@@ -24,6 +24,11 @@ export default {
type: String,
required: true,
},
+ hideDuration: {
+ type: Boolean,
+ required: false,
+ default: false,
+ },
duration: {
type: String,
required: false,
@@ -77,6 +82,6 @@ export default {
:class="content.style"
>{{ content.text }}</span
>
- <duration-badge v-if="duration" :duration="duration" />
+ <duration-badge v-if="duration && !hideDuration" :duration="duration" />
</div>
</template>
diff --git a/app/assets/javascripts/ci/job_details/components/log/log.vue b/app/assets/javascripts/ci/job_details/components/log/log.vue
index 433d1b3579b..2f5830c2967 100644
--- a/app/assets/javascripts/ci/job_details/components/log/log.vue
+++ b/app/assets/javascripts/ci/job_details/components/log/log.vue
@@ -4,12 +4,12 @@
import { mapState, mapActions } from 'vuex';
import { scrollToElement } from '~/lib/utils/common_utils';
import { getLocationHash } from '~/lib/utils/url_utility';
-import CollapsibleLogSection from './collapsible_section.vue';
import LogLine from './line.vue';
+import LogLineHeader from './line_header.vue';
export default {
components: {
- CollapsibleLogSection,
+ LogLineHeader,
LogLine,
},
props: {
@@ -20,7 +20,7 @@ export default {
},
},
computed: {
- ...mapState(['jobLogEndpoint', 'jobLog', 'isJobLogComplete']),
+ ...mapState(['jobLogEndpoint', 'jobLog', 'jobLogSections', 'isJobLogComplete']),
highlightedLines() {
return this.searchResults.map((result) => result.lineNumber);
},
@@ -45,6 +45,20 @@ export default {
handleOnClickCollapsibleLine(section) {
this.toggleCollapsibleLine(section);
},
+ isLineVisible(line) {
+ const { lineNumber, section } = line;
+
+ if (!section) {
+ // lines outside of sections can't be collapsed
+ return true;
+ }
+
+ return !Object.values(this.jobLogSections).find(
+ ({ isClosed, startLineNumber, endLineNumber }) => {
+ return isClosed && lineNumber > startLineNumber && lineNumber <= endLineNumber;
+ },
+ );
+ },
isHighlighted({ lineNumber }) {
return this.highlightedLines.includes(lineNumber);
},
@@ -53,22 +67,27 @@ export default {
</script>
<template>
<code class="job-log d-block" data-testid="job-log-content">
- <template v-for="(section, index) in jobLog">
- <collapsible-log-section
- v-if="section.isHeader"
- :key="`collapsible-${index}`"
- :section="section"
- :job-log-endpoint="jobLogEndpoint"
- :search-results="searchResults"
- @onClickCollapsibleLine="handleOnClickCollapsibleLine"
- />
- <log-line
- v-else
- :key="section.offset"
- :line="section"
- :path="jobLogEndpoint"
- :is-highlighted="isHighlighted(section)"
- />
+ <template v-for="line in jobLog">
+ <template v-if="isLineVisible(line)">
+ <log-line-header
+ v-if="line.isHeader"
+ :key="line.offset"
+ :line="line"
+ :path="jobLogEndpoint"
+ :is-closed="jobLogSections[line.section].isClosed"
+ :duration="jobLogSections[line.section].duration"
+ :hide-duration="jobLogSections[line.section].hideDuration"
+ :is-highlighted="isHighlighted(line)"
+ @toggleLine="handleOnClickCollapsibleLine(line.section)"
+ />
+ <log-line
+ v-else
+ :key="line.offset"
+ :line="line"
+ :path="jobLogEndpoint"
+ :is-highlighted="isHighlighted(line)"
+ />
+ </template>
</template>
<div v-if="!isJobLogComplete" class="js-log-animation loader-animation pt-3 pl-3">
diff --git a/app/assets/javascripts/ci/job_details/store/mutations.js b/app/assets/javascripts/ci/job_details/store/mutations.js
index 72f9a86c4a9..bc679699012 100644
--- a/app/assets/javascripts/ci/job_details/store/mutations.js
+++ b/app/assets/javascripts/ci/job_details/store/mutations.js
@@ -1,6 +1,5 @@
-import Vue from 'vue';
import * as types from './mutation_types';
-import { logLinesParser, updateIncrementalJobLog } from './utils';
+import { logLinesParser } from './utils';
export default {
[types.SET_JOB_LOG_OPTIONS](state, options = {}) {
@@ -22,15 +21,27 @@ export default {
}
if (log.append) {
- state.jobLog = log.lines ? updateIncrementalJobLog(log.lines, state.jobLog) : state.jobLog;
+ if (log.lines) {
+ const { sections, lines } = logLinesParser(log.lines, {
+ currentLines: state.jobLog,
+ currentSections: state.jobLogSections,
+ });
+ state.jobLog = lines;
+ state.jobLogSections = sections;
+ }
state.jobLogSize += log.size;
} else {
// When the job still does not have a log
// the job log response will not have a defined
// html or size. We keep the old value otherwise these
// will be set to `null`
- state.jobLog = log.lines ? logLinesParser(log.lines, [], window.location.hash) : state.jobLog;
+ if (log.lines) {
+ const { sections, lines } = logLinesParser(log.lines, {}, window.location.hash);
+
+ state.jobLog = lines;
+ state.jobLogSections = sections;
+ }
state.jobLogSize = log.size || state.jobLogSize;
}
@@ -64,7 +75,9 @@ export default {
* @param {Object} section
*/
[types.TOGGLE_COLLAPSIBLE_LINE](state, section) {
- Vue.set(section, 'isClosed', !section.isClosed);
+ if (state.jobLogSections[section]) {
+ state.jobLogSections[section].isClosed = !state.jobLogSections[section].isClosed;
+ }
},
[types.REQUEST_JOB](state) {
diff --git a/app/assets/javascripts/ci/job_details/store/state.js b/app/assets/javascripts/ci/job_details/store/state.js
index 512b8b115b7..3efbcc51d65 100644
--- a/app/assets/javascripts/ci/job_details/store/state.js
+++ b/app/assets/javascripts/ci/job_details/store/state.js
@@ -17,6 +17,7 @@ export default () => ({
isScrollTopDisabled: true,
jobLog: [],
+ jobLogSections: {},
isJobLogComplete: false,
jobLogSize: 0,
isJobLogSizeVisible: false,
diff --git a/app/assets/javascripts/ci/job_details/store/utils.js b/app/assets/javascripts/ci/job_details/store/utils.js
index c8b33638821..1536c1140d0 100644
--- a/app/assets/javascripts/ci/job_details/store/utils.js
+++ b/app/assets/javascripts/ci/job_details/store/utils.js
@@ -1,193 +1,105 @@
import { parseBoolean } from '~/lib/utils/common_utils';
/**
- * Adds the line number property
- * @param Object line
- * @param Number lineNumber
- */
-export const parseLine = (line = {}, lineNumber) => ({
- ...line,
- lineNumber,
-});
-
-/**
- * When a line has `section_header` set to true, we create a new
- * structure to allow to nest the lines that belong to the
- * collapsible section
+ * Filters out lines that have an offset lower than the offset provided.
*
- * @param Object line
- * @param Number lineNumber
- */
-export const parseHeaderLine = (line = {}, lineNumber, hash) => {
- let isClosed = parseBoolean(line.section_options?.collapsed);
-
- // if a hash is present in the URL then we ensure
- // all sections are visible so we can scroll to the hash
- // in the DOM
- if (hash) {
- isClosed = false;
- }
-
- return {
- isClosed,
- isHeader: true,
- line: parseLine(line, lineNumber),
- lines: [],
- };
-};
-
-/**
- * Finds the matching header section
- * for the section_duration object and adds it to it
+ * If no offset is provided, all the lines are returned back.
*
- * {
- * isHeader: true,
- * line: {
- * content: [],
- * lineNumber: 0,
- * section_duration: "",
- * },
- * lines: []
- * }
- *
- * @param Array data
- * @param Object durationLine
+ * @param {Array} newLines
+ * @param {Number} offset
+ * @returns Lines to be added to the log that have not been added.
*/
-export function addDurationToHeader(data, durationLine) {
- data.forEach((el) => {
- if (el.line && el.line.section === durationLine.section) {
- el.line.section_duration = durationLine.section_duration;
- }
- });
-}
-
-/**
- * Check is the current section belongs to a collapsible section
- *
- * @param Array acc
- * @param Object last
- * @param Object section
- *
- * @returns Boolean
- */
-export const isCollapsibleSection = (acc = [], last = {}, section = {}) =>
- acc.length > 0 &&
- last.isHeader === true &&
- !section.section_duration &&
- section.section === last.line.section;
-
-/**
- * Returns the next line number in the parsed log
- *
- * @param Array acc
- * @returns Number
- */
-export const getNextLineNumber = (acc) => {
- if (!acc?.length) {
- return 1;
- }
-
- const lastElement = acc[acc.length - 1];
- const nestedLines = lastElement.lines;
-
- if (lastElement.isHeader && !nestedLines.length && lastElement.line) {
- return lastElement.line.lineNumber + 1;
+const linesAfterOffset = (newLines = [], offset = -1) => {
+ if (offset === -1) {
+ return newLines;
}
-
- if (lastElement.isHeader && nestedLines.length) {
- return nestedLines[nestedLines.length - 1].lineNumber + 1;
- }
-
- return lastElement.lineNumber + 1;
+ return newLines.filter((newLine) => newLine.offset > offset);
};
/**
- * Parses the job log content into a structure usable by the template
+ * Parses a series of trace lines from a job and returns lines and
+ * sections of the log. Each line is annotated with a lineNumber.
*
- * For collaspible lines (section_header = true):
- * - creates a new array to hold the lines that are collapsible,
- * - adds a isClosed property to handle toggle
- * - adds a isHeader property to handle template logic
- * - adds the section_duration
- * For each line:
- * - adds the index as lineNumber
+ * Sections have a range: starting line and ending line, plus a
+ * "duration" string.
*
- * @param Array lines
- * @param Array accumulator
- * @returns Array parsed log lines
+ * @param {Array} newLines - Lines to add to the log
+ * @param {Object} currentState - Current log: lines and sections
+ * @returns Consolidated lines and sections to be displayed
*/
-export const logLinesParser = (lines = [], prevLogLines = [], hash = '') =>
- lines.reduce(
- (acc, line) => {
- const lineNumber = getNextLineNumber(acc);
+export const logLinesParser = (
+ newLines = [],
+ { currentLines = [], currentSections = {} } = {},
+ hash = '',
+) => {
+ const lastCurrentLine = currentLines[currentLines.length - 1];
+ const newLinesToAppend = linesAfterOffset(newLines, lastCurrentLine?.offset);
+
+ if (!newLinesToAppend.length) {
+ return { lines: currentLines, sections: currentSections };
+ }
- const last = acc[acc.length - 1];
+ let lineNumber = lastCurrentLine?.lineNumber || 0;
+ const lines = [...currentLines];
+ const sections = { ...currentSections };
+
+ newLinesToAppend.forEach((line) => {
+ const {
+ offset,
+ content,
+ section,
+ section_header: isHeader,
+ section_footer: isFooter,
+ section_duration: duration,
+ section_options: options,
+ } = line;
+
+ if (content.length) {
+ lineNumber += 1;
+ lines.push({
+ offset,
+ lineNumber,
+ content,
+ ...(section ? { section } : {}),
+ ...(isHeader ? { isHeader: true } : {}),
+ });
+ }
- // If the object is an header, we parse it into another structure
- if (line.section_header) {
- acc.push(parseHeaderLine(line, lineNumber, hash));
- } else if (isCollapsibleSection(acc, last, line)) {
- // if the object belongs to a nested section, we append it to the new `lines` array of the
- // previously formatted header
- last.lines.push(parseLine(line, lineNumber));
- } else if (line.section_duration) {
- // if the line has section_duration, we look for the correct header to add it
- addDurationToHeader(acc, line);
- } else {
- // otherwise it's a regular line
- acc.push(parseLine(line, lineNumber));
+ // root level lines have no section, skip creating one
+ if (section) {
+ sections[section] = sections[section] || {
+ startLineNumber: 0,
+ endLineNumber: Infinity, // by default, sections are unbounded / have no end
+ duration: null,
+ isClosed: false,
+ };
+
+ if (isHeader) {
+ sections[section].startLineNumber = lineNumber;
}
-
- return acc;
- },
- [...prevLogLines],
- );
-
-/**
- * Finds the repeated offset, removes the old one
- *
- * Returns a new array with the updated log without
- * the repeated offset
- *
- * @param Array newLog
- * @param Array oldParsed
- * @returns Array
- *
- */
-export const findOffsetAndRemove = (newLog = [], oldParsed = []) => {
- const cloneOldLog = [...oldParsed];
- const lastIndex = cloneOldLog.length - 1;
- const last = cloneOldLog[lastIndex];
-
- const firstNew = newLog[0];
-
- if (last && firstNew) {
- if (last.offset === firstNew.offset || (last.line && last.line.offset === firstNew.offset)) {
- cloneOldLog.splice(lastIndex);
- } else if (last.lines && last.lines.length) {
- const lastNestedIndex = last.lines.length - 1;
- const lastNested = last.lines[lastNestedIndex];
- if (lastNested.offset === firstNew.offset) {
- last.lines.splice(lastNestedIndex);
+ if (options) {
+ let isClosed = parseBoolean(options?.collapsed);
+ // if a hash is present in the URL then we ensure
+ // all sections are visible so we can scroll to the hash
+ // in the DOM
+ if (hash) {
+ isClosed = false;
+ }
+ sections[section].isClosed = isClosed;
+
+ const hideDuration = parseBoolean(options?.hide_duration);
+ if (hideDuration) {
+ sections[section].hideDuration = hideDuration;
+ }
+ }
+ if (duration) {
+ sections[section].duration = duration;
+ }
+ if (isFooter) {
+ sections[section].endLineNumber = lineNumber;
}
}
- }
-
- return cloneOldLog;
-};
-
-/**
- * When the job log is not complete, backend may send the last received line
- * in the new response.
- *
- * We need to check if that is the case by looking for the offset property
- * before parsing the incremental part
- *
- * @param array oldLog
- * @param array newLog
- */
-export const updateIncrementalJobLog = (newLog = [], oldParsed = []) => {
- const parsedLog = findOffsetAndRemove(newLog, oldParsed);
+ });
- return logLinesParser(newLog, parsedLog);
+ return { lines, sections };
};
diff --git a/app/assets/javascripts/editor/schema/ci.json b/app/assets/javascripts/editor/schema/ci.json
index b8d3606d23e..5fcf547a4ed 100644
--- a/app/assets/javascripts/editor/schema/ci.json
+++ b/app/assets/javascripts/editor/schema/ci.json
@@ -1497,7 +1497,7 @@
},
{
"const": "data_integrity_failure",
- "description": "Retry if there is a structural integrity problem detected."
+ "description": "Retry if there is an unknown job problem."
}
]
},
diff --git a/app/assets/javascripts/kubernetes_dashboard/graphql/helpers/resolver_helpers.js b/app/assets/javascripts/kubernetes_dashboard/graphql/helpers/resolver_helpers.js
index 49286441dec..a6af2fcb96f 100644
--- a/app/assets/javascripts/kubernetes_dashboard/graphql/helpers/resolver_helpers.js
+++ b/app/assets/javascripts/kubernetes_dashboard/graphql/helpers/resolver_helpers.js
@@ -13,6 +13,18 @@ export const buildWatchPath = ({ resource, api = 'api/v1', namespace = '' }) =>
return namespace ? `/${api}/namespaces/${namespace}/${resource}` : `/${api}/${resource}`;
};
+const mapWorkloadItem = (item) => {
+ if (item.metadata) {
+ const metadata = {
+ ...item.metadata,
+ annotations: item.metadata?.annotations || {},
+ labels: item.metadata?.labels || {},
+ };
+ return { status: item.status, metadata };
+ }
+ return { status: item.status };
+};
+
export const watchPods = ({ client, query, configuration, namespace }) => {
const path = buildWatchPath({ resource: 'pods', namespace });
const config = new Configuration(configuration);
@@ -24,9 +36,7 @@ export const watchPods = ({ client, query, configuration, namespace }) => {
let result = [];
watcher.on(EVENT_DATA, (data) => {
- result = data.map((item) => {
- return { status: { phase: item.status.phase } };
- });
+ result = data.map(mapWorkloadItem);
client.writeQuery({
query,
@@ -61,14 +71,7 @@ export const getK8sPods = ({
}
const data = res?.items || [];
- return data?.map((item) => {
- if (item.metadata) {
- const metadata = { ...item.metadata, annotations: item.metadata?.annotations || {} };
- return { status: item.status, metadata };
- }
-
- return { status: item.status };
- });
+ return data.map(mapWorkloadItem);
})
.catch(async (err) => {
try {
diff --git a/app/assets/javascripts/kubernetes_dashboard/graphql/resolvers/kubernetes.js b/app/assets/javascripts/kubernetes_dashboard/graphql/resolvers/kubernetes.js
index db289541105..5f046464730 100644
--- a/app/assets/javascripts/kubernetes_dashboard/graphql/resolvers/kubernetes.js
+++ b/app/assets/javascripts/kubernetes_dashboard/graphql/resolvers/kubernetes.js
@@ -4,6 +4,7 @@ import k8sDashboardPodsQuery from '../queries/k8s_dashboard_pods.query.graphql';
export default {
k8sPods(_, { configuration }, { client }) {
const query = k8sDashboardPodsQuery;
- return getK8sPods({ client, query, configuration });
+ const enableWatch = true;
+ return getK8sPods({ client, query, configuration, enableWatch });
},
};
diff --git a/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js b/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js
index b09e8323ba2..952af4eeb5d 100644
--- a/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js
+++ b/app/assets/javascripts/vue_shared/components/filtered_search_bar/constants.js
@@ -68,6 +68,7 @@ export const TOKEN_TITLE_CONFIDENTIAL = __('Confidential');
export const TOKEN_TITLE_CONTACT = s__('Crm|Contact');
export const TOKEN_TITLE_GROUP = __('Group');
export const TOKEN_TITLE_LABEL = __('Label');
+export const TOKEN_TITLE_PROJECT = __('Project');
export const TOKEN_TITLE_MILESTONE = __('Milestone');
export const TOKEN_TITLE_MY_REACTION = __('My-Reaction');
export const TOKEN_TITLE_ORGANIZATION = s__('Crm|Organization');
@@ -93,6 +94,7 @@ export const TOKEN_TYPE_EPIC = 'epic';
// this is in the shared constants. Until we have not decoupled the EE filtered search bar
// from the CE component, we need to keep this in the CE code.
// https://gitlab.com/gitlab-org/gitlab/-/issues/377838
+export const TOKEN_TYPE_PROJECT = 'project';
export const TOKEN_TYPE_HEALTH = 'health';
export const TOKEN_TYPE_ITERATION = 'iteration';
export const TOKEN_TYPE_LABEL = 'label';
diff --git a/app/assets/stylesheets/_page_specific_files.scss b/app/assets/stylesheets/_page_specific_files.scss
index 129a9f207f8..be9a06d7bb5 100644
--- a/app/assets/stylesheets/_page_specific_files.scss
+++ b/app/assets/stylesheets/_page_specific_files.scss
@@ -5,6 +5,7 @@
@import './pages/hierarchy';
@import './pages/issues';
@import './pages/note_form';
+@import './pages/notes';
@import './pages/pipelines';
@import './pages/profile';
@import './pages/registry';
diff --git a/app/assets/stylesheets/page_bundles/notes.scss b/app/assets/stylesheets/pages/notes.scss
index 1c03cb6a20e..eb2061081ae 100644
--- a/app/assets/stylesheets/page_bundles/notes.scss
+++ b/app/assets/stylesheets/pages/notes.scss
@@ -1,7 +1,3 @@
-@import 'mixins_and_variables_and_functions';
-@import 'framework/notes';
-@import 'framework/buttons';
-
$avatar-icon-size: 2rem;
$avatar-m-top: 0.5rem;
$avatar-m-ratio: 2;
@@ -989,7 +985,7 @@ $system-note-icon-m-left: $avatar-m-left + $icon-size-diff / $avatar-m-ratio;
.disabled-comment {
background-color: $gray-light;
- border-radius: $gl-border-radius-base;
+ border-radius: $border-radius-base;
border: 1px solid $border-gray-normal;
color: $note-disabled-comment-color;
padding: $gl-padding-8 0;
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 5b4cb8ef21c..dbc56079a06 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -960,7 +960,7 @@ module Ci
job_artifacts.all_reports
end
- # Consider this object to have a structural integrity problems
+ # Consider this object to have an unknown job problem
def doom!
transaction do
now = Time.current
diff --git a/app/models/users/callout.rb b/app/models/users/callout.rb
index 924e130aab5..a9880e56e8c 100644
--- a/app/models/users/callout.rb
+++ b/app/models/users/callout.rb
@@ -77,8 +77,7 @@ module Users
vsd_feedback_banner: 75, # EE-only
security_policy_protected_branch_modification: 76, # EE-only
vulnerability_report_grouping: 77, # EE-only
- new_nav_for_everyone_callout: 78,
- duo_chat_callout: 79 # EE-only
+ new_nav_for_everyone_callout: 78
}
validates :feature_name,
diff --git a/app/presenters/commit_status_presenter.rb b/app/presenters/commit_status_presenter.rb
index 0858fad1e1a..38469be572a 100644
--- a/app/presenters/commit_status_presenter.rb
+++ b/app/presenters/commit_status_presenter.rb
@@ -14,7 +14,7 @@ class CommitStatusPresenter < Gitlab::View::Presenter::Delegated
archived_failure: 'The job is archived and cannot be run',
unmet_prerequisites: 'The job failed to complete prerequisite tasks',
scheduler_failure: 'The scheduler failed to assign job to the runner, please try again or contact system administrator',
- data_integrity_failure: 'There has been a structural integrity problem detected, please contact system administrator',
+ data_integrity_failure: 'There has been an unknown job problem, please contact your system administrator with the job ID to review the logs',
forward_deployment_failure: 'The deployment job is older than the previously succeeded deployment job, and therefore cannot be run',
pipeline_loop_detected: 'This job could not be executed because it would create infinitely looping pipelines',
insufficient_upstream_permissions: 'This job could not be executed because of insufficient permissions to track the upstream project.',
diff --git a/app/views/groups/work_items/index.html.haml b/app/views/groups/work_items/index.html.haml
index a586826967c..299a90b362d 100644
--- a/app/views/groups/work_items/index.html.haml
+++ b/app/views/groups/work_items/index.html.haml
@@ -1,5 +1,4 @@
- page_title s_('WorkItem|Work items')
- add_page_specific_style 'page_bundles/issuable_list'
-- add_page_specific_style 'page_bundles/notes'
.js-work-items-list-root{ data: work_items_list_data(@group) }
diff --git a/app/views/layouts/_head.html.haml b/app/views/layouts/_head.html.haml
index b7e485ac0d7..2c97df90110 100644
--- a/app/views/layouts/_head.html.haml
+++ b/app/views/layouts/_head.html.haml
@@ -2,10 +2,6 @@
- site_name = _('GitLab')
- omit_og = sign_in_with_redirect?
--# This is a temporary place for the page specific style migrations to be included on all pages like page_specific_files
-- if Feature.disabled?(:page_specific_styles, current_user)
- - add_page_specific_style('page_bundles/notes')
-
%head{ omit_og ? { } : { prefix: "og: http://ogp.me/ns#" } }
%meta{ charset: "utf-8" }
%meta{ 'http-equiv' => 'X-UA-Compatible', content: 'IE=edge' }
diff --git a/app/views/projects/_stat_anchor_list.html.haml b/app/views/projects/_stat_anchor_list.html.haml
index 8cad1974ffa..7f05a20261a 100644
--- a/app/views/projects/_stat_anchor_list.html.haml
+++ b/app/views/projects/_stat_anchor_list.html.haml
@@ -3,7 +3,7 @@
- return unless anchors.any?
-%ul.nav.gl-gap-2
+%ul.nav.gl-row-gap-2.gl-column-gap-5
- anchors.each do |anchor|
%li.nav-item
= link_to_if(anchor.link, anchor.label, anchor.link, stat_anchor_attrs(anchor)) do
diff --git a/app/views/projects/commit/show.html.haml b/app/views/projects/commit/show.html.haml
index 5708eacc801..0868475c49f 100644
--- a/app/views/projects/commit/show.html.haml
+++ b/app/views/projects/commit/show.html.haml
@@ -6,7 +6,6 @@
- page_title "#{@commit.title} (#{@commit.short_id})", _('Commits')
- page_description @commit.description
- add_page_specific_style 'page_bundles/pipelines'
-- add_page_specific_style 'page_bundles/notes'
.container-fluid{ class: [container_class] }
= render "commit_box"
diff --git a/app/views/projects/commits/show.html.haml b/app/views/projects/commits/show.html.haml
index 09efc2b9c82..be2bf43cbb9 100644
--- a/app/views/projects/commits/show.html.haml
+++ b/app/views/projects/commits/show.html.haml
@@ -2,7 +2,6 @@
- add_page_specific_style 'page_bundles/tree'
- add_page_specific_style 'page_bundles/merge_request'
- add_page_specific_style 'page_bundles/projects'
-- add_page_specific_style 'page_bundles/notes'
- page_title _("Commits"), @ref
= content_for :meta_tags do
diff --git a/app/views/projects/issues/show.html.haml b/app/views/projects/issues/show.html.haml
index e5ede7f1546..457eaf5e194 100644
--- a/app/views/projects/issues/show.html.haml
+++ b/app/views/projects/issues/show.html.haml
@@ -12,7 +12,6 @@
- add_page_specific_style 'page_bundles/issuable'
- add_page_specific_style 'page_bundles/issues_show'
- add_page_specific_style 'page_bundles/work_items'
-- add_page_specific_style 'page_bundles/notes'
- @content_class = "limit-container-width" unless fluid_layout
diff --git a/app/views/projects/merge_requests/creations/new.html.haml b/app/views/projects/merge_requests/creations/new.html.haml
index 3c51e826092..f2c2700b012 100644
--- a/app/views/projects/merge_requests/creations/new.html.haml
+++ b/app/views/projects/merge_requests/creations/new.html.haml
@@ -4,7 +4,6 @@
- add_page_specific_style 'page_bundles/pipelines'
- add_page_specific_style 'page_bundles/ci_status'
- add_page_specific_style 'page_bundles/merge_request'
-- add_page_specific_style 'page_bundles/notes'
- conflicting_mr = @merge_request.existing_mrs_targeting_same_branch.first
diff --git a/app/views/projects/merge_requests/diffs.html.haml b/app/views/projects/merge_requests/diffs.html.haml
index 5391d2d28ca..03306e98407 100644
--- a/app/views/projects/merge_requests/diffs.html.haml
+++ b/app/views/projects/merge_requests/diffs.html.haml
@@ -1,4 +1,3 @@
- add_page_specific_style 'page_bundles/merge_request'
-- add_page_specific_style 'page_bundles/notes'
= render 'page'
diff --git a/app/views/projects/merge_requests/index.html.haml b/app/views/projects/merge_requests/index.html.haml
index d3c9709d59b..e2d3e082289 100644
--- a/app/views/projects/merge_requests/index.html.haml
+++ b/app/views/projects/merge_requests/index.html.haml
@@ -7,7 +7,6 @@
- new_merge_request_email = @project.new_issuable_address(current_user, 'merge_request')
- add_page_specific_style 'page_bundles/issuable_list'
- add_page_specific_style 'page_bundles/merge_request'
-- add_page_specific_style 'page_bundles/notes'
= content_for :meta_tags do
= auto_discovery_link_tag(:atom, safe_params.merge(rss_url_options).to_h, title: "#{@project.name} merge requests")
diff --git a/app/views/projects/merge_requests/show.html.haml b/app/views/projects/merge_requests/show.html.haml
index 5391d2d28ca..03306e98407 100644
--- a/app/views/projects/merge_requests/show.html.haml
+++ b/app/views/projects/merge_requests/show.html.haml
@@ -1,4 +1,3 @@
- add_page_specific_style 'page_bundles/merge_request'
-- add_page_specific_style 'page_bundles/notes'
= render 'page'
diff --git a/app/views/projects/work_items/show.html.haml b/app/views/projects/work_items/show.html.haml
index 3fee9a73f17..7e0bddf1b5d 100644
--- a/app/views/projects/work_items/show.html.haml
+++ b/app/views/projects/work_items/show.html.haml
@@ -1,7 +1,6 @@
- page_title "##{request.params['iid']}"
- add_to_breadcrumbs _("Issues"), project_issues_path(@project)
- add_page_specific_style 'page_bundles/work_items'
-- add_page_specific_style 'page_bundles/notes'
- @gfm_form = true
- @noteable_type = 'WorkItem'
diff --git a/app/views/snippets/edit.html.haml b/app/views/snippets/edit.html.haml
index 1effa0de4d9..2b2035e362b 100644
--- a/app/views/snippets/edit.html.haml
+++ b/app/views/snippets/edit.html.haml
@@ -1,6 +1,5 @@
- breadcrumb_title _("Edit Snippet")
- page_title _("Edit"), "#{@snippet.title} (#{@snippet.to_reference})", _("Snippets")
-- add_page_specific_style 'page_bundles/notes'
- content_for :prefetch_asset_tags do
- webpack_preload_asset_tag('monaco')
diff --git a/app/views/snippets/new.html.haml b/app/views/snippets/new.html.haml
index 224dd519f6a..da2245432ce 100644
--- a/app/views/snippets/new.html.haml
+++ b/app/views/snippets/new.html.haml
@@ -1,5 +1,4 @@
- page_title _("New Snippet")
-- add_page_specific_style 'page_bundles/notes'
.page-title-holder.d-flex.align-items-center
%h1.page-title.gl-font-size-h-display= _('New Snippet')
diff --git a/app/views/snippets/show.html.haml b/app/views/snippets/show.html.haml
index 225789cb3e9..fe05a3de13a 100644
--- a/app/views/snippets/show.html.haml
+++ b/app/views/snippets/show.html.haml
@@ -9,7 +9,6 @@
- add_to_breadcrumbs _("Snippets"), explore_snippets_path
- breadcrumb_title @snippet.to_reference
- page_title "#{@snippet.title} (#{@snippet.to_reference})", _("Snippets")
-- add_page_specific_style 'page_bundles/notes'
- content_for :prefetch_asset_tags do
- webpack_preload_asset_tag('monaco', prefetch: true)
diff --git a/config/application.rb b/config/application.rb
index ee7c0f9c178..7fd209b1191 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -362,7 +362,6 @@ module Gitlab
config.assets.precompile << "page_bundles/work_items.css"
config.assets.precompile << "page_bundles/xterm.css"
config.assets.precompile << "page_bundles/labels.css"
- config.assets.precompile << "page_bundles/notes.css"
config.assets.precompile << "lazy_bundles/cropper.css"
config.assets.precompile << "lazy_bundles/gridstack.css"
config.assets.precompile << "performance_bar.css"
diff --git a/config/metrics/counts_all/20210216175512_ci_internal_pipelines.yml b/config/metrics/counts_all/20210216175512_ci_internal_pipelines.yml
index 773fe5b7844..afdbb64aebc 100644
--- a/config/metrics/counts_all/20210216175512_ci_internal_pipelines.yml
+++ b/config/metrics/counts_all/20210216175512_ci_internal_pipelines.yml
@@ -17,5 +17,6 @@ tier:
- free
- premium
- ultimate
-performance_indicator_type: []
+performance_indicator_type:
+- customer_health_score
milestone: "<13.9"
diff --git a/config/metrics/counts_all/20210216180228_projects_jira_server_active.yml b/config/metrics/counts_all/20210216180228_projects_jira_server_active.yml
index c196d017874..feda686d183 100644
--- a/config/metrics/counts_all/20210216180228_projects_jira_server_active.yml
+++ b/config/metrics/counts_all/20210216180228_projects_jira_server_active.yml
@@ -16,7 +16,8 @@ tier:
- free
- premium
- ultimate
-performance_indicator_type: []
+performance_indicator_type:
+- customer_health_score
milestone: "<13.9"
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/112571
milestone_removed: "15.10"
diff --git a/doc/api/container_registry.md b/doc/api/container_registry.md
index 3a03085c6da..e58bdc6ec16 100644
--- a/doc/api/container_registry.md
+++ b/doc/api/container_registry.md
@@ -116,7 +116,8 @@ Example response:
"project_id": 9,
"location": "gitlab.example.com:5000/group/project",
"created_at": "2019-01-10T13:38:57.391Z",
- "cleanup_policy_started_at": "2020-01-10T15:40:57.391Z"
+ "cleanup_policy_started_at": "2020-01-10T15:40:57.391Z",
+ "status": null
},
{
"id": 2,
@@ -125,7 +126,8 @@ Example response:
"project_id": 9,
"location": "gitlab.example.com:5000/group/project/releases",
"created_at": "2019-01-10T13:39:08.229Z",
- "cleanup_policy_started_at": "2020-08-17T03:12:35.489Z"
+ "cleanup_policy_started_at": "2020-08-17T03:12:35.489Z",
+ "status": "delete_ongoing"
}
]
```
@@ -215,7 +217,8 @@ Example response:
"location": "gitlab.example.com:5000/group/project:0.0.1"
}
],
- "size": 2818413
+ "size": 2818413,
+ "status": "delete_scheduled"
}
```
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 3703d8e1560..a125f036793 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -31082,7 +31082,6 @@ Name of the feature that the callout is for.
| <a id="usercalloutfeaturenameenumci_deprecation_warning_for_types_keyword"></a>`CI_DEPRECATION_WARNING_FOR_TYPES_KEYWORD` | Callout feature name for ci_deprecation_warning_for_types_keyword. |
| <a id="usercalloutfeaturenameenumcloud_licensing_subscription_activation_banner"></a>`CLOUD_LICENSING_SUBSCRIPTION_ACTIVATION_BANNER` | Callout feature name for cloud_licensing_subscription_activation_banner. |
| <a id="usercalloutfeaturenameenumcluster_security_warning"></a>`CLUSTER_SECURITY_WARNING` | Callout feature name for cluster_security_warning. |
-| <a id="usercalloutfeaturenameenumduo_chat_callout"></a>`DUO_CHAT_CALLOUT` | Callout feature name for duo_chat_callout. |
| <a id="usercalloutfeaturenameenumeoa_bronze_plan_banner"></a>`EOA_BRONZE_PLAN_BANNER` | Callout feature name for eoa_bronze_plan_banner. |
| <a id="usercalloutfeaturenameenumfeature_flags_new_version"></a>`FEATURE_FLAGS_NEW_VERSION` | Callout feature name for feature_flags_new_version. |
| <a id="usercalloutfeaturenameenumgcp_signup_offer"></a>`GCP_SIGNUP_OFFER` | Callout feature name for gcp_signup_offer. |
diff --git a/doc/architecture/blueprints/new_diffs/index.md b/doc/architecture/blueprints/new_diffs/index.md
index 836bb8e89e6..d9dab14e683 100644
--- a/doc/architecture/blueprints/new_diffs/index.md
+++ b/doc/architecture/blueprints/new_diffs/index.md
@@ -124,6 +124,94 @@ To measure our success, we need to set meaningful metrics. These metrics should
### Front end
+#### High-level implementation
+
+<!--
+This section should contain enough information that the specifics of your
+change are understandable. This may include API specs (though not always
+required) or even code snippets. If there's any ambiguity about HOW your
+proposal will be implemented, this is the place to discuss them.
+
+If you are not sure how many implementation details you should include in the
+blueprint, the rule of thumb here is to provide enough context for people to
+understand the proposal. As you move forward with the implementation, you may
+need to add more implementation details to the blueprint, as those may become
+an important context for important technical decisions made along the way. A
+blueprint is also a register of such technical decisions. If a technical
+decision requires additional context before it can be made, you probably should
+document this context in a blueprint. If it is a small technical decision that
+can be made in a merge request by an author and a maintainer, you probably do
+not need to document it here. The impact a technical decision will have is
+another helpful information - if a technical decision is very impactful,
+documenting it, along with associated implementation details, is advisable.
+
+If it's helpful to include workflow diagrams or any other related images.
+Diagrams authored in GitLab flavored markdown are preferred. In cases where
+that is not feasible, images should be placed under `images/` in the same
+directory as the `index.md` for the proposal.
+-->
+
+#### HTML structure
+
+The HTML structure of a diff should have support for assistive technology.
+For this reason, a table could be a preferred solution as it allows to indicate
+logical relationship between the presented data and is easier to navigate for
+screen reader users with keyboard. Labeled columns will make sure that information
+such as line numbers can be associated with the edited piece of code.
+
+Possible structure could include:
+
+```html
+<table>
+ <caption class="gl-sr-only">Changes for file index.js. 10 lines changed: 5 deleted, 5 added.</caption>
+ <tr hidden>
+ <th>Original line number: </th>
+ <th>Diff line number: </th>
+ <th>Line change:</th>
+ </tr>
+ <tr>
+ <td>1234</td>
+ <td></td>
+ <td>.tree-time-ago ,</td>
+ </tr>
+ […]
+</table>
+```
+
+See [WAI tutorial on tables](https://www.w3.org/WAI/tutorials/tables) for
+more implementation guidelines.
+
+Each file table should include a short summary of changes that will read out:
+
+- total number of lines changed,
+- number of added lines,
+- number of removed lines.
+
+The summary of the table content can be placed either within `<caption>` element, or before the table within an element referred as `aria-describedby`.
+See <abbr>WAI</abbr> (Web Accessibility Initiative) for more information on both approaches:
+
+- [Nesting summary inside the `<caption>` element](https://www.w3.org/WAI/tutorials/tables/caption-summary/#nesting-summary-inside-the-caption-element)
+- [Using `aria-describedby` to provide a table summary](https://www.w3.org/WAI/tutorials/tables/caption-summary/#using-aria-describedby-to-provide-a-table-summary)
+
+However, if such a structure will compromise other functional aspects of displaying a diff,
+more generic elements together with ARIA support can be used.
+
+#### Visual indicators
+
+It is important that each visual indicator should have a screen reader text
+denoting the meaning of that indicator. When needed, use `gl-sr-only` or `gl-sr-only-focusable`
+class to make the element accessible by screen readers, but not by sighted users.
+
+Some of the visual indicators that require alternatives for assistive technology are:
+
+- `+` or red highlighting to be read as `added`
+- `-` or green highlighting to be read as `removed`
+
+### Front end (Design A)
+
+NOTE:
+This draft proposal suggests one potential front end architecture which may not be chosen.
+
Ideally, we would meet our definition of done and our accountability metrics on our first try.
We also need to continue to stay within those boundaries as we move forward. To ensure this,
we need to design an application architecture that:
@@ -225,87 +313,6 @@ end
Network --> Socket --> API --> unk
```
-<!--
-This section should contain enough information that the specifics of your
-change are understandable. This may include API specs (though not always
-required) or even code snippets. If there's any ambiguity about HOW your
-proposal will be implemented, this is the place to discuss them.
-
-If you are not sure how many implementation details you should include in the
-blueprint, the rule of thumb here is to provide enough context for people to
-understand the proposal. As you move forward with the implementation, you may
-need to add more implementation details to the blueprint, as those may become
-an important context for important technical decisions made along the way. A
-blueprint is also a register of such technical decisions. If a technical
-decision requires additional context before it can be made, you probably should
-document this context in a blueprint. If it is a small technical decision that
-can be made in a merge request by an author and a maintainer, you probably do
-not need to document it here. The impact a technical decision will have is
-another helpful information - if a technical decision is very impactful,
-documenting it, along with associated implementation details, is advisable.
-
-If it's helpful to include workflow diagrams or any other related images.
-Diagrams authored in GitLab flavored markdown are preferred. In cases where
-that is not feasible, images should be placed under `images/` in the same
-directory as the `index.md` for the proposal.
--->
-
-#### HTML structure
-
-The HTML structure of a diff should have support for assistive technology.
-For this reason, a table could be a preferred solution as it allows to indicate
-logical relationship between the presented data and is easier to navigate for
-screen reader users with keyboard. Labeled columns will make sure that information
-such as line numbers can be associated with the edited piece of code.
-
-Possible structure could include:
-
-```html
-<table>
- <caption class="gl-sr-only">Changes for file index.js. 10 lines changed: 5 deleted, 5 added.</caption>
- <tr hidden>
- <th>Original line number: </th>
- <th>Diff line number: </th>
- <th>Line change:</th>
- </tr>
- <tr>
- <td>1234</td>
- <td></td>
- <td>.tree-time-ago ,</td>
- </tr>
- […]
-</table>
-```
-
-See [WAI tutorial on tables](https://www.w3.org/WAI/tutorials/tables) for
-more implementation guidelines.
-
-Each file table should include a short summary of changes that will read out:
-
-- total number of lines changed,
-- number of added lines,
-- number of removed lines.
-
-The summary of the table content can be placed either within `<caption>` element, or before the table within an element referred as `aria-describedby`.
-See <abbr>WAI</abbr> (Web Accessibility Initiative) for more information on both approaches:
-
-- [Nesting summary inside the `<caption>` element](https://www.w3.org/WAI/tutorials/tables/caption-summary/#nesting-summary-inside-the-caption-element)
-- [Using aria-describedby to provide a table summary](https://www.w3.org/WAI/tutorials/tables/caption-summary/#using-aria-describedby-to-provide-a-table-summary)
-
-However, if such a structure will compromise other functional aspects of displaying a diff,
-more generic elements together with ARIA support can be used.
-
-#### Visual indicators
-
-It is important that each visual indicator should have a screen reader text
-denoting the meaning of that indicator. When needed, use `gl-sr-only` or `gl-sr-only-focusable`
-class to make the element accessible by screen readers, but not by sighted users.
-
-Some of the visual indicators that require alternatives for assistive technology are:
-
-- `+` or red highlighting to be read as `added`
-- `-` or green highlighting to be read as `removed`
-
## Alternative Solutions
<!--
diff --git a/doc/ci/yaml/index.md b/doc/ci/yaml/index.md
index 048a0017a0e..10b603d4d92 100644
--- a/doc/ci/yaml/index.md
+++ b/doc/ci/yaml/index.md
@@ -3633,7 +3633,7 @@ Use `retry:when` with `retry:max` to retry jobs for only specific failure cases.
- `archived_failure`: Retry if the job is archived and can't be run.
- `unmet_prerequisites`: Retry if the job failed to complete prerequisite tasks.
- `scheduler_failure`: Retry if the scheduler failed to assign the job to a runner.
-- `data_integrity_failure`: Retry if there is a structural integrity problem detected.
+- `data_integrity_failure`: Retry if there is an unknown job problem.
**Example of `retry:when`** (single failure type):
diff --git a/doc/subscriptions/gitlab_dedicated/index.md b/doc/subscriptions/gitlab_dedicated/index.md
index 4544124176e..b6f20ef7eb1 100644
--- a/doc/subscriptions/gitlab_dedicated/index.md
+++ b/doc/subscriptions/gitlab_dedicated/index.md
@@ -127,7 +127,8 @@ The following GitLab application features are not available:
- GitLab-managed runners (hosted runners)
- GitLab AI capabilities (Refer to our [direction page](https://about.gitlab.com/direction/saas-platforms/dedicated/#supporting-ai-features-on-gitlab-dedicated) for more information)
- Features other than [available features](#available-features) that must be configured outside of the GitLab user interface
-- [Feature Flags](../../administration/feature_flags.md) and functionality behind them
+- Interacting with GitLab [Feature Flags](../../administration/feature_flags.md)
+- Any functionality or feature behind a Feature Flag that is toggled `off` by default
The following features will not be supported:
diff --git a/doc/topics/git/lfs/index.md b/doc/topics/git/lfs/index.md
index 23d972e9aa7..95c1645f4cc 100644
--- a/doc/topics/git/lfs/index.md
+++ b/doc/topics/git/lfs/index.md
@@ -51,7 +51,7 @@ your operating system. GitLab requires version 1.0.1 or later of the Git LFS cli
- Any Git LFS request asks for HTTPS credentials, so we recommend a good Git
credentials store.
- Git LFS always assumes HTTPS so if you have GitLab server on HTTP you must
- [add the URL to Git configuration manually](#troubleshooting).
+ [add the URL to Git configuration manually](troubleshooting.md#getsockopt-connection-refused).
- [Group wikis](../../../user/project/wiki/group.md) do not support Git LFS.
## How LFS objects affect repository size
@@ -164,158 +164,4 @@ Technical details about how this works can be found in the [development document
- Blog post: [Getting started with Git LFS](https://about.gitlab.com/blog/2017/01/30/getting-started-with-git-lfs-tutorial/)
- [Git LFS developer information](../../../development/lfs.md)
- [GitLab Git Large File Storage (LFS) Administration](../../../administration/lfs/index.md) for self-managed instances
-
-## Troubleshooting
-
-### Encountered `n` files that should have been pointers, but weren't
-
-This error indicates the files are expected to be tracked by LFS, but
-the repository is not tracking them as LFS. This issue can be one
-potential reason for this error:
-[Files not tracked with LFS when uploaded through the web interface](https://gitlab.com/gitlab-org/gitlab/-/issues/326342#note_586820485)
-
-To resolve the problem, migrate the affected file (or files) and push back to the repository:
-
-1. Migrate the file to LFS:
-
- ```shell
- git lfs migrate import --yes --no-rewrite "<your-file>"
- ```
-
-1. Push back to your repository:
-
- ```shell
- git push
- ```
-
-1. Optional. Clean up your `.git` folder:
-
- ```shell
- git reflog expire --expire-unreachable=now --all
- git gc --prune=now
- ```
-
-### error: Repository or object not found
-
-This error can occur for a few reasons, including:
-
-- You don't have permissions to access certain LFS object
-
-Check if you have permissions to push to the project or fetch from the project.
-
-- Project is not allowed to access the LFS object
-
-LFS object you are trying to push to the project or fetch from the project is not
-available to the project anymore. Probably the object was removed from the server.
-
-- Local Git repository is using deprecated LFS API
-
-### Invalid status for `<url>` : 501
-
-Git LFS logs the failures into a log file.
-To view this log file, while in project directory:
-
-```shell
-git lfs logs last
-```
-
-If the status `error 501` is shown, it is because:
-
-- Git LFS is not enabled in project settings. Check your project settings and
- enable Git LFS.
-
-- Git LFS support is not enabled on the GitLab server. Check with your GitLab
- administrator why Git LFS is not enabled on the server. See
- [LFS administration documentation](../../../administration/lfs/index.md) for instructions
- on how to enable LFS support.
-
-- Git LFS client version is not supported by GitLab server. Check your Git LFS
- version with `git lfs version`. Check the Git configuration of the project for traces
- of deprecated API with `git lfs -l`. If `batch = false` is set in the configuration,
- remove the line and try to update your Git LFS client. Only version 1.0.1 and
- newer are supported.
-
-### `getsockopt: connection refused`
-
-If you push an LFS object to a project and receive an error like this,
-the LFS client is trying to reach GitLab through HTTPS. However, your GitLab
-instance is being served on HTTP:
-
-```plaintext
-Post <URL>/info/lfs/objects/batch: dial tcp IP: getsockopt: connection refused
-```
-
-This behavior is caused by Git LFS using HTTPS connections by default when a
-`lfsurl` is not set in the Git configuration.
-
-To prevent this from happening, set the LFS URL in project Git configuration:
-
-```shell
-git config --add lfs.url "http://gitlab.example.com/group/my-sample-project.git/info/lfs"
-```
-
-### Credentials are always required when pushing an object
-
-NOTE:
-With 8.12 GitLab added LFS support to SSH. The Git LFS communication
-still goes over HTTP, but now the SSH client passes the correct credentials
-to the Git LFS client. No action is required by the user.
-
-Git LFS authenticates the user with HTTP Basic Authentication on every push for
-every object, so user HTTPS credentials are required.
-
-By default, Git has support for remembering the credentials for each repository
-you use. For more information, see the [official Git documentation](https://git-scm.com/docs/gitcredentials).
-
-For example, you can tell Git to remember the password for a period of time in
-which you expect to push the objects:
-
-```shell
-git config --global credential.helper 'cache --timeout=3600'
-```
-
-This remembers the credentials for an hour, after which Git operations
-require re-authentication.
-
-If you are using OS X you can use `osxkeychain` to store and encrypt your credentials.
-For Windows, you can use `wincred` or Microsoft's [Git Credential Manager for Windows](https://github.com/Microsoft/Git-Credential-Manager-for-Windows/releases).
-
-More details about various methods of storing the user credentials can be found
-on [Git Credential Storage documentation](https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage).
-
-### LFS objects are missing on push
-
-GitLab checks files to detect LFS pointers on push. If LFS pointers are detected, GitLab tries to verify that those files already exist in LFS on GitLab.
-
-Verify that LFS is installed locally and consider a manual push with `git lfs push --all`.
-
-If you are storing LFS files outside of GitLab you can disable LFS on the project by setting `lfs_enabled: false` with the [projects API](../../../api/projects.md#edit-project).
-
-### Hosting LFS objects externally
-
-It is possible to host LFS objects externally by setting a custom LFS URL with `git config -f .lfsconfig lfs.url https://example.com/<project>.git/info/lfs`.
-
-You might choose to do this if you are using an appliance like a Nexus Repository to store LFS data. If you choose to use an external LFS store,
-GitLab can't verify LFS objects. Pushes then fail if you have GitLab LFS support enabled.
-
-To stop push failure, LFS support can be disabled in the [Project settings](../../../user/project/settings/index.md), which also disables GitLab LFS value-adds (Verifying LFS objects, UI integration for LFS).
-
-### I/O timeout when pushing LFS objects
-
-You might get an error that states:
-
-```shell
-LFS: Put "http://your-instance.com/root/project.git/gitlab-lfs/objects/cc29e205d04a4062d0fb131700e8bfc8e54c44d0176a8dca22f40b24ef26d325/15": read tcp your-instance-ip:54544->your-instance-ip:443: i/o timeout
-error: failed to push some refs to 'ssh://your-instance.com:2222/root/project.git'
-```
-
-When network conditions are unstable, the Git LFS client might time out when trying to upload files
-if network conditions are unstable.
-
-The workaround is to set the client activity timeout a higher value.
-
-For example, to set the timeout to 60 seconds:
-
-```shell
-git config lfs.activitytimeout 60
-```
+- [Troubleshooting Git LFS](troubleshooting.md)
diff --git a/doc/topics/git/lfs/troubleshooting.md b/doc/topics/git/lfs/troubleshooting.md
new file mode 100644
index 00000000000..6aff564b3b5
--- /dev/null
+++ b/doc/topics/git/lfs/troubleshooting.md
@@ -0,0 +1,162 @@
+---
+stage: Create
+group: Source Code
+info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments"
+---
+
+# Troubleshooting Git LFS
+
+When working with Git LFS, you might encounter the following issues.
+
+## Encountered `n` files that should have been pointers, but weren't
+
+This error indicates the files are expected to be tracked by LFS, but
+the repository is not tracking them as LFS. This issue can be one
+potential reason for this error:
+[Files not tracked with LFS when uploaded through the web interface](https://gitlab.com/gitlab-org/gitlab/-/issues/326342#note_586820485)
+
+To resolve the problem, migrate the affected file (or files) and push back to the repository:
+
+1. Migrate the file to LFS:
+
+ ```shell
+ git lfs migrate import --yes --no-rewrite "<your-file>"
+ ```
+
+1. Push back to your repository:
+
+ ```shell
+ git push
+ ```
+
+1. Optional. Clean up your `.git` folder:
+
+ ```shell
+ git reflog expire --expire-unreachable=now --all
+ git gc --prune=now
+ ```
+
+## error: Repository or object not found
+
+This error can occur for a few reasons, including:
+
+- You don't have permissions to access certain LFS object
+
+Check if you have permissions to push to the project or fetch from the project.
+
+- Project is not allowed to access the LFS object
+
+LFS object you are trying to push to the project or fetch from the project is not
+available to the project anymore. Probably the object was removed from the server.
+
+- Local Git repository is using deprecated LFS API
+
+## Invalid status for `<url>` : 501
+
+Git LFS logs the failures into a log file.
+To view this log file, while in project directory:
+
+```shell
+git lfs logs last
+```
+
+If the status `error 501` is shown, it is because:
+
+- Git LFS is not enabled in project settings. Check your project settings and
+ enable Git LFS.
+
+- Git LFS support is not enabled on the GitLab server. Check with your GitLab
+ administrator why Git LFS is not enabled on the server. See
+ [LFS administration documentation](../../../administration/lfs/index.md) for instructions
+ on how to enable LFS support.
+
+- Git LFS client version is not supported by GitLab server. Check your Git LFS
+ version with `git lfs version`. Check the Git configuration of the project for traces
+ of deprecated API with `git lfs -l`. If `batch = false` is set in the configuration,
+ remove the line and try to update your Git LFS client. Only version 1.0.1 and
+ newer are supported.
+
+## `getsockopt: connection refused`
+
+If you push an LFS object to a project and receive an error like this,
+the LFS client is trying to reach GitLab through HTTPS. However, your GitLab
+instance is being served on HTTP:
+
+```plaintext
+Post <URL>/info/lfs/objects/batch: dial tcp IP: getsockopt: connection refused
+```
+
+This behavior is caused by Git LFS using HTTPS connections by default when a
+`lfsurl` is not set in the Git configuration.
+
+To prevent this from happening, set the LFS URL in project Git configuration:
+
+```shell
+git config --add lfs.url "http://gitlab.example.com/group/my-sample-project.git/info/lfs"
+```
+
+## Credentials are always required when pushing an object
+
+NOTE:
+With 8.12 GitLab added LFS support to SSH. The Git LFS communication
+still goes over HTTP, but now the SSH client passes the correct credentials
+to the Git LFS client. No action is required by the user.
+
+Git LFS authenticates the user with HTTP Basic Authentication on every push for
+every object, so user HTTPS credentials are required.
+
+By default, Git has support for remembering the credentials for each repository
+you use. For more information, see the [official Git documentation](https://git-scm.com/docs/gitcredentials).
+
+For example, you can tell Git to remember the password for a period of time in
+which you expect to push the objects:
+
+```shell
+git config --global credential.helper 'cache --timeout=3600'
+```
+
+This remembers the credentials for an hour, after which Git operations
+require re-authentication.
+
+If you are using OS X you can use `osxkeychain` to store and encrypt your credentials.
+For Windows, you can use `wincred` or Microsoft's [Git Credential Manager for Windows](https://github.com/Microsoft/Git-Credential-Manager-for-Windows/releases).
+
+More details about various methods of storing the user credentials can be found
+on [Git Credential Storage documentation](https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage).
+
+## LFS objects are missing on push
+
+GitLab checks files to detect LFS pointers on push. If LFS pointers are detected, GitLab tries to verify that those files already exist in LFS on GitLab.
+
+Verify that LFS is installed locally and consider a manual push with `git lfs push --all`.
+
+If you are storing LFS files outside of GitLab you can disable LFS on the project by setting `lfs_enabled: false` with the [projects API](../../../api/projects.md#edit-project).
+
+## Hosting LFS objects externally
+
+It is possible to host LFS objects externally by setting a custom LFS URL with `git config -f .lfsconfig lfs.url https://example.com/<project>.git/info/lfs`.
+
+You might choose to do this if you are using an appliance like a Nexus Repository to store LFS data. If you choose to use an external LFS store,
+GitLab can't verify LFS objects. Pushes then fail if you have GitLab LFS support enabled.
+
+To stop push failure, LFS support can be disabled in the [Project settings](../../../user/project/settings/index.md), which also disables GitLab LFS value-adds (Verifying LFS objects, UI integration for LFS).
+
+## I/O timeout when pushing LFS objects
+
+You might get an error that states:
+
+```shell
+LFS: Put "http://your-instance.com/root/project.git/gitlab-lfs/objects/cc29e205d04a4062d0fb131700e8bfc8e54c44d0176a8dca22f40b24ef26d325/15": read tcp your-instance-ip:54544->your-instance-ip:443: i/o timeout
+error: failed to push some refs to 'ssh://your-instance.com:2222/root/project.git'
+```
+
+When network conditions are unstable, the Git LFS client might time out when trying to upload files
+if network conditions are unstable.
+
+The workaround is to set the client activity timeout a higher value.
+
+For example, to set the timeout to 60 seconds:
+
+```shell
+git config lfs.activitytimeout 60
+```
diff --git a/lib/api/entities/container_registry.rb b/lib/api/entities/container_registry.rb
index cadd45cb0eb..359dbaf8697 100644
--- a/lib/api/entities/container_registry.rb
+++ b/lib/api/entities/container_registry.rb
@@ -24,6 +24,7 @@ module API
expose :delete_api_path, if: ->(object, options) { Ability.allowed?(options[:user], :admin_container_image, object) },
documentation: { type: 'string', example: 'delete/api/path' }
expose :size, if: -> (_, options) { options[:size] }, documentation: { type: 'integer', example: 12345 }
+ expose :status, documentation: { type: 'string', example: 'delete_scheduled' }
private
diff --git a/lib/feature.rb b/lib/feature.rb
index d6f34cf195f..4b3ebab6dce 100644
--- a/lib/feature.rb
+++ b/lib/feature.rb
@@ -4,6 +4,33 @@ require 'flipper/adapters/active_record'
require 'flipper/adapters/active_support_cache_store'
module Feature
+ module BypassLoadBalancer
+ FLAG = 'FEATURE_FLAGS_BYPASS_LOAD_BALANCER'
+ class FlipperRecord < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord -- This class perfectly replaces
+ # Flipper::Adapters::ActiveRecord::Model, which inherits ActiveRecord::Base
+ include DatabaseReflection
+ self.abstract_class = true
+
+ # Bypass the load balancer by restoring the default behavior of `connection`
+ # before the load balancer patches ActiveRecord::Base
+ def self.connection
+ retrieve_connection
+ end
+ end
+
+ class FlipperFeature < FlipperRecord
+ self.table_name = 'features'
+ end
+
+ class FlipperGate < FlipperRecord
+ self.table_name = 'feature_gates'
+ end
+
+ def self.enabled?
+ Gitlab::Utils.to_boolean(ENV[FLAG], default: false)
+ end
+ end
+
# Classes to override flipper table names
class FlipperFeature < Flipper::Adapters::ActiveRecord::Feature
include DatabaseReflection
@@ -70,7 +97,8 @@ module Feature
end
def persisted_names
- return [] unless ApplicationRecord.database.exists?
+ model = BypassLoadBalancer.enabled? ? BypassLoadBalancer::FlipperRecord : ApplicationRecord
+ return [] unless model.database.exists?
# This loads names of all stored feature flags
# and returns a stable Set in the following order:
@@ -306,7 +334,9 @@ module Feature
def unsafe_get(key)
# During setup the database does not exist yet. So we haven't stored a value
# for the feature yet and return the default.
- return unless ApplicationRecord.database.exists?
+
+ model = BypassLoadBalancer.enabled? ? BypassLoadBalancer::FlipperRecord : ApplicationRecord
+ return unless model.database.exists?
flag_stack = ::Thread.current[:feature_flag_recursion_check] || []
Thread.current[:feature_flag_recursion_check] = flag_stack
@@ -340,10 +370,15 @@ module Feature
end
def build_flipper_instance(memoize: false)
- active_record_adapter = Flipper::Adapters::ActiveRecord.new(
- feature_class: FlipperFeature,
- gate_class: FlipperGate)
-
+ active_record_adapter = if BypassLoadBalancer.enabled?
+ Flipper::Adapters::ActiveRecord.new(
+ feature_class: BypassLoadBalancer::FlipperFeature,
+ gate_class: BypassLoadBalancer::FlipperGate)
+ else
+ Flipper::Adapters::ActiveRecord.new(
+ feature_class: FlipperFeature,
+ gate_class: FlipperGate)
+ end
# Redis L2 cache
redis_cache_adapter =
ActiveSupportCacheStoreAdapter.new(
diff --git a/lib/gitlab/ci/variables/downstream/generator.rb b/lib/gitlab/ci/variables/downstream/generator.rb
index b0e45ac0413..e1fd8200dd6 100644
--- a/lib/gitlab/ci/variables/downstream/generator.rb
+++ b/lib/gitlab/ci/variables/downstream/generator.rb
@@ -5,8 +5,6 @@ module Gitlab
module Variables
module Downstream
class Generator
- include Gitlab::Utils::StrongMemoize
-
Context = Struct.new(:all_bridge_variables, :expand_file_refs, keyword_init: true)
def initialize(bridge)
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 54b97902c1e..fa4221fc2a9 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -6690,9 +6690,6 @@ msgstr ""
msgid "AsanaService|User Personal Access Token. User must have access to the task. All comments are attributed to this user."
msgstr ""
-msgid "Ask GitLab Duo"
-msgstr ""
-
msgid "Ask a maintainer to check the import status for more details."
msgstr ""
@@ -47784,23 +47781,14 @@ msgstr ""
msgid "TanukiBot|Give feedback"
msgstr ""
-msgid "TanukiBot|How to use GitLab"
-msgstr ""
-
msgid "TanukiBot|Source"
msgid_plural "TanukiBot|Sources"
msgstr[0] ""
msgstr[1] ""
-msgid "TanukiBot|The issue, epic, or code you're viewing"
-msgstr ""
-
msgid "TanukiBot|There was an error communicating with GitLab Duo Chat. Please try again later."
msgstr ""
-msgid "TanukiBot|Use AI to answer questions about things like:"
-msgstr ""
-
msgid "TanukiBot|What is a fork?"
msgstr ""
@@ -49018,6 +49006,9 @@ msgstr ""
msgid "There was a problem fetching project users."
msgstr ""
+msgid "There was a problem fetching projects."
+msgstr ""
+
msgid "There was a problem fetching recent groups."
msgstr ""
diff --git a/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js b/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js
deleted file mode 100644
index 5abf2a5ce53..00000000000
--- a/spec/frontend/ci/job_details/components/log/collapsible_section_spec.js
+++ /dev/null
@@ -1,103 +0,0 @@
-import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import CollapsibleSection from '~/ci/job_details/components/log/collapsible_section.vue';
-import LogLine from '~/ci/job_details/components/log/line.vue';
-import LogLineHeader from '~/ci/job_details/components/log/line_header.vue';
-import { collapsibleSectionClosed, collapsibleSectionOpened } from './mock_data';
-
-describe('Job Log Collapsible Section', () => {
- let wrapper;
-
- const jobLogEndpoint = 'jobs/335';
-
- const findLogLineHeader = () => wrapper.findComponent(LogLineHeader);
- const findLogLineHeaderSvg = () => findLogLineHeader().find('svg');
- const findLogLines = () => wrapper.findAllComponents(LogLine);
-
- const createComponent = (props = {}) => {
- wrapper = mount(CollapsibleSection, {
- propsData: {
- ...props,
- },
- });
- };
-
- describe('with closed section', () => {
- beforeEach(() => {
- createComponent({
- section: collapsibleSectionClosed,
- jobLogEndpoint,
- });
- });
-
- it('renders clickable header line', () => {
- expect(findLogLineHeader().text()).toBe('1 foo');
- expect(findLogLineHeader().attributes('role')).toBe('button');
- });
-
- it('renders an icon with a closed state', () => {
- expect(findLogLineHeaderSvg().attributes('data-testid')).toBe('chevron-lg-right-icon');
- });
-
- it('does not render collapsed lines', () => {
- expect(findLogLines()).toHaveLength(0);
- });
- });
-
- describe('with opened section', () => {
- beforeEach(() => {
- createComponent({
- section: collapsibleSectionOpened,
- jobLogEndpoint,
- });
- });
-
- it('renders clickable header line', () => {
- expect(findLogLineHeader().text()).toContain('foo');
- expect(findLogLineHeader().attributes('role')).toBe('button');
- });
-
- it('renders an icon with the open state', () => {
- expect(findLogLineHeaderSvg().attributes('data-testid')).toBe('chevron-lg-down-icon');
- });
-
- it('renders collapsible lines', () => {
- expect(findLogLines().at(0).text()).toContain('this is a collapsible nested section');
- expect(findLogLines()).toHaveLength(collapsibleSectionOpened.lines.length);
- });
- });
-
- it('emits onClickCollapsibleLine on click', async () => {
- createComponent({
- section: collapsibleSectionOpened,
- jobLogEndpoint,
- });
-
- findLogLineHeader().trigger('click');
-
- await nextTick();
- expect(wrapper.emitted('onClickCollapsibleLine').length).toBe(1);
- });
-
- describe('with search results', () => {
- it('passes isHighlighted prop correctly', () => {
- const mockSearchResults = [
- {
- content: [{ text: 'foo' }],
- lineNumber: 1,
- offset: 5,
- section: 'prepare-script',
- section_header: true,
- },
- ];
-
- createComponent({
- section: collapsibleSectionOpened,
- jobLogEndpoint,
- searchResults: mockSearchResults,
- });
-
- expect(findLogLineHeader().props('isHighlighted')).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/ci/job_details/components/log/line_header_spec.js b/spec/frontend/ci/job_details/components/log/line_header_spec.js
index c75f5fa30d5..0ac33f5aa5a 100644
--- a/spec/frontend/ci/job_details/components/log/line_header_spec.js
+++ b/spec/frontend/ci/job_details/components/log/line_header_spec.js
@@ -95,12 +95,14 @@ describe('Job Log Header Line', () => {
});
describe('with duration', () => {
- beforeEach(() => {
+ it('renders the duration badge', () => {
createComponent({ ...defaultProps, duration: '00:10' });
+ expect(wrapper.findComponent(DurationBadge).exists()).toBe(true);
});
- it('renders the duration badge', () => {
- expect(wrapper.findComponent(DurationBadge).exists()).toBe(true);
+ it('does not render the duration badge with hidden duration', () => {
+ createComponent({ ...defaultProps, hideDuration: true, duration: '00:10' });
+ expect(wrapper.findComponent(DurationBadge).exists()).toBe(false);
});
});
diff --git a/spec/frontend/ci/job_details/components/log/log_spec.js b/spec/frontend/ci/job_details/components/log/log_spec.js
index 1931d5046dc..a791732ec9b 100644
--- a/spec/frontend/ci/job_details/components/log/log_spec.js
+++ b/spec/frontend/ci/job_details/components/log/log_spec.js
@@ -6,6 +6,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { scrollToElement } from '~/lib/utils/common_utils';
import Log from '~/ci/job_details/components/log/log.vue';
import LogLineHeader from '~/ci/job_details/components/log/line_header.vue';
+import LineNumber from '~/ci/job_details/components/log/line_number.vue';
import { logLinesParser } from '~/ci/job_details/store/utils';
import { mockJobLog, mockJobLogLineCount } from './mock_data';
@@ -24,6 +25,8 @@ describe('Job Log', () => {
Vue.use(Vuex);
const createComponent = (props) => {
+ store = new Vuex.Store({ actions, state });
+
wrapper = mount(Log, {
propsData: {
...props,
@@ -38,17 +41,16 @@ describe('Job Log', () => {
toggleCollapsibleLine: toggleCollapsibleLineMock,
};
+ const { lines, sections } = logLinesParser(mockJobLog);
+
state = {
- jobLog: logLinesParser(mockJobLog),
+ jobLog: lines,
+ jobLogSections: sections,
jobLogEndpoint: 'jobs/id',
};
-
- store = new Vuex.Store({
- actions,
- state,
- });
});
+ const findLineNumbers = () => wrapper.findAllComponents(LineNumber).wrappers.map((w) => w.text());
const findCollapsibleLine = () => wrapper.findComponent(LogLineHeader);
const findAllCollapsibleLines = () => wrapper.findAllComponents(LogLineHeader);
@@ -57,18 +59,13 @@ describe('Job Log', () => {
createComponent();
});
- it.each([...Array(mockJobLogLineCount).keys()])(
- 'renders a line number for each line %d',
- (index) => {
- const lineNumber = wrapper
- .findAll('.js-log-line')
- .at(index)
- .find(`#L${index + 1}`);
+ it('renders a line number for each line %d', () => {
+ const expectLineNumbers = Array(mockJobLogLineCount)
+ .fill()
+ .map((_, i) => `${i + 1}`);
- expect(lineNumber.text()).toBe(`${index + 1}`);
- expect(lineNumber.attributes('href')).toBe(`${state.jobLogEndpoint}#L${index + 1}`);
- },
- );
+ expect(findLineNumbers()).toEqual(expectLineNumbers);
+ });
});
describe('collapsible sections', () => {
@@ -93,6 +90,40 @@ describe('Job Log', () => {
expect(toggleCollapsibleLineMock).toHaveBeenCalled();
});
});
+
+ describe('duration', () => {
+ it('shows duration', () => {
+ expect(findCollapsibleLine().props('duration')).toBe('00:00');
+ expect(findCollapsibleLine().props('hideDuration')).toBe(false);
+ });
+
+ it('hides duration', () => {
+ state.jobLogSections['resolve-secrets'].hideDuration = true;
+ createComponent();
+
+ expect(findCollapsibleLine().props('duration')).toBe('00:00');
+ expect(findCollapsibleLine().props('hideDuration')).toBe(true);
+ });
+ });
+
+ describe('when a section is collapsed', () => {
+ beforeEach(() => {
+ state.jobLogSections['prepare-executor'].isClosed = true;
+
+ createComponent();
+ });
+
+ it('hides lines in section', () => {
+ expect(findLineNumbers()).toEqual([
+ '1',
+ '2',
+ '3',
+ '4',
+ // closed section not shown
+ '7',
+ ]);
+ });
+ });
});
describe('anchor scrolling', () => {
@@ -119,19 +150,19 @@ describe('Job Log', () => {
it('scrolls to line number', async () => {
createComponent();
- state.jobLog = logLinesParser(mockJobLog, [], '#L6');
+ state.jobLog = logLinesParser(mockJobLog, [], '#L6').lines;
await waitForPromises();
expect(scrollToElement).toHaveBeenCalledTimes(1);
- state.jobLog = logLinesParser(mockJobLog, [], '#L7');
+ state.jobLog = logLinesParser(mockJobLog, [], '#L6').lines;
await waitForPromises();
expect(scrollToElement).toHaveBeenCalledTimes(1);
});
it('line number within collapsed section is visible', () => {
- state.jobLog = logLinesParser(mockJobLog, [], '#L6');
+ state.jobLog = logLinesParser(mockJobLog, [], '#L6').lines;
createComponent();
@@ -150,7 +181,6 @@ describe('Job Log', () => {
},
],
section: 'prepare-executor',
- section_header: true,
lineNumber: 3,
},
];
diff --git a/spec/frontend/ci/job_details/components/log/mock_data.js b/spec/frontend/ci/job_details/components/log/mock_data.js
index d9b1354f475..066f783586b 100644
--- a/spec/frontend/ci/job_details/components/log/mock_data.js
+++ b/spec/frontend/ci/job_details/components/log/mock_data.js
@@ -65,141 +65,182 @@ export const mockContentSection = [
},
];
-export const mockJobLog = [...mockJobLines, ...mockEmptySection, ...mockContentSection];
-
-export const mockJobLogLineCount = 6; // `text` entries in mockJobLog
-
-export const originalTrace = [
+export const mockJobLogEnd = [
{
- offset: 1,
- content: [
- {
- text: 'Downloading',
- },
- ],
+ offset: 1008,
+ content: [{ text: 'Job succeeded' }],
},
];
-export const regularIncremental = [
- {
- offset: 2,
- content: [
- {
- text: 'log line',
- },
- ],
- },
+export const mockJobLog = [
+ ...mockJobLines,
+ ...mockEmptySection,
+ ...mockContentSection,
+ ...mockJobLogEnd,
];
-export const regularIncrementalRepeated = [
+export const mockJobLogLineCount = 7; // `text` entries in mockJobLog
+
+export const mockContentSectionClosed = [
{
- offset: 1,
+ offset: 0,
content: [
{
- text: 'log line',
+ text: 'Using Docker executor with image dev.gitlab.org3',
},
],
+ section: 'mock-closed-section',
+ section_header: true,
+ section_options: { collapsed: true },
+ },
+ {
+ offset: 1003,
+ content: [{ text: 'Docker executor with image registry.gitlab.com ...' }],
+ section: 'mock-closed-section',
+ },
+ {
+ offset: 1004,
+ content: [{ text: 'Starting service ...', style: 'term-fg-l-green' }],
+ section: 'mock-closed-section',
+ },
+ {
+ offset: 1005,
+ content: [],
+ section: 'mock-closed-section',
+ section_footer: true,
+ section_duration: '00:09',
},
];
-export const headerTrace = [
+export const mockContentSectionHiddenDuration = [
{
- offset: 1,
+ offset: 0,
+ content: [{ text: 'Line 1' }],
+ section: 'mock-hidden-duration-section',
section_header: true,
- content: [
- {
- text: 'log line',
- },
- ],
- section: 'section',
+ section_options: { hide_duration: 'true' },
+ },
+ {
+ offset: 1001,
+ content: [{ text: 'Line 2' }],
+ section: 'mock-hidden-duration-section',
+ },
+ {
+ offset: 1002,
+ content: [],
+ section: 'mock-hidden-duration-section',
+ section_footer: true,
+ section_duration: '00:09',
},
];
-export const headerTraceIncremental = [
+export const mockContentSubsection = [
{
- offset: 1,
+ offset: 0,
+ content: [{ text: 'Line 1' }],
+ section: 'mock-section',
section_header: true,
- content: [
- {
- text: 'updated log line',
- },
- ],
- section: 'section',
},
-];
-
-export const collapsibleTrace = [
{
- offset: 1,
+ offset: 1002,
+ content: [{ text: 'Line 2 - section content' }],
+ section: 'mock-section',
+ },
+ {
+ offset: 1003,
+ content: [{ text: 'Line 3 - sub section header' }],
+ section: 'sub-section',
section_header: true,
- content: [
- {
- text: 'log line',
- },
- ],
- section: 'section',
},
{
- offset: 2,
- content: [
- {
- text: 'log line',
- },
- ],
- section: 'section',
+ offset: 1004,
+ content: [{ text: 'Line 4 - sub section content' }],
+ section: 'sub-section',
+ },
+ {
+ offset: 1005,
+ content: [{ text: 'Line 5 - sub sub section header with no content' }],
+ section: 'sub-sub-section',
+ section_header: true,
+ },
+ {
+ offset: 1006,
+ content: [],
+ section: 'sub-sub-section',
+ section_footer: true,
+ section_duration: '00:00',
+ },
+
+ {
+ offset: 1007,
+ content: [{ text: 'Line 6 - sub section content 2' }],
+ section: 'sub-section',
+ },
+ {
+ offset: 1008,
+ content: [],
+ section: 'sub-section',
+ section_footer: true,
+ section_duration: '00:29',
+ },
+ {
+ offset: 1009,
+ content: [{ text: 'Line 7 - section content' }],
+ section: 'mock-section',
+ },
+ {
+ offset: 1010,
+ content: [],
+ section: 'mock-section',
+ section_footer: true,
+ section_duration: '00:59',
+ },
+ {
+ offset: 1011,
+ content: [{ text: 'Job succeeded' }],
},
];
-export const collapsibleTraceIncremental = [
+export const mockTruncatedBottomSection = [
+ // only the top of a section is obtained, such as when a job gets cancelled
{
- offset: 2,
+ offset: 1004,
content: [
{
- text: 'updated log line',
+ text: 'Starting job',
},
],
- section: 'section',
+ section: 'mock-section',
+ section_header: true,
+ },
+ {
+ offset: 1005,
+ content: [{ text: 'Job interrupted' }],
+ section: 'mock-section',
},
];
-export const collapsibleSectionClosed = {
- offset: 5,
- section_header: true,
- isHeader: true,
- isClosed: true,
- line: {
- content: [{ text: 'foo' }],
- section: 'prepare-script',
- lineNumber: 1,
- },
- section_duration: '00:03',
- lines: [
- {
- offset: 80,
- content: [{ text: 'this is a collapsible nested section' }],
- section: 'prepare-script',
- lineNumber: 2,
- },
- ],
-};
-
-export const collapsibleSectionOpened = {
- offset: 5,
- section_header: true,
- isHeader: true,
- isClosed: false,
- line: {
- content: [{ text: 'foo' }],
- section: 'prepare-script',
- lineNumber: 1,
- },
- section_duration: '00:03',
- lines: [
- {
- offset: 80,
- content: [{ text: 'this is a collapsible nested section' }],
- section: 'prepare-script',
- lineNumber: 2,
- },
- ],
-};
+export const mockTruncatedTopSection = [
+ // only the bottom half of a section is obtained, such as when jobs are cut off due to large sizes
+ {
+ offset: 1008,
+ content: [{ text: 'Line N - incomplete section content' }],
+ section: 'mock-section',
+ },
+ {
+ offset: 1009,
+ content: [{ text: 'Line N+1 - incomplete section content' }],
+ section: 'mock-section',
+ },
+ {
+ offset: 1010,
+ content: [],
+ section: 'mock-section',
+ section_footer: true,
+ section_duration: '00:59',
+ },
+ {
+ offset: 1011,
+ content: [{ text: 'Job succeeded' }],
+ },
+];
diff --git a/spec/frontend/ci/job_details/store/mutations_spec.js b/spec/frontend/ci/job_details/store/mutations_spec.js
index 601dff47584..87387fe9272 100644
--- a/spec/frontend/ci/job_details/store/mutations_spec.js
+++ b/spec/frontend/ci/job_details/store/mutations_spec.js
@@ -113,7 +113,7 @@ describe('Jobs Store Mutations', () => {
it('sets the parsed log', () => {
mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, mockLog);
- expect(utils.logLinesParser).toHaveBeenCalledWith(mockLog.lines, [], '');
+ expect(utils.logLinesParser).toHaveBeenCalledWith(mockLog.lines, {}, '');
expect(stateCopy.jobLog).toEqual([
{
@@ -133,7 +133,7 @@ describe('Jobs Store Mutations', () => {
it('sets the parsed log', () => {
mutations[types.RECEIVE_JOB_LOG_SUCCESS](stateCopy, mockLog);
- expect(utils.logLinesParser).toHaveBeenCalledWith(mockLog.lines, [], '#L1');
+ expect(utils.logLinesParser).toHaveBeenCalledWith(mockLog.lines, {}, '#L1');
expect(stateCopy.jobLog).toEqual([
{
@@ -214,9 +214,17 @@ describe('Jobs Store Mutations', () => {
describe('TOGGLE_COLLAPSIBLE_LINE', () => {
it('toggles the `isClosed` property of the provided object', () => {
- const section = { isClosed: true };
- mutations[types.TOGGLE_COLLAPSIBLE_LINE](stateCopy, section);
- expect(section.isClosed).toEqual(false);
+ stateCopy.jobLogSections = {
+ 'step-script': { isClosed: true },
+ };
+
+ mutations[types.TOGGLE_COLLAPSIBLE_LINE](stateCopy, 'step-script');
+
+ expect(stateCopy.jobLogSections['step-script'].isClosed).toEqual(false);
+
+ mutations[types.TOGGLE_COLLAPSIBLE_LINE](stateCopy, 'step-script');
+
+ expect(stateCopy.jobLogSections['step-script'].isClosed).toEqual(true);
});
});
diff --git a/spec/frontend/ci/job_details/store/utils_spec.js b/spec/frontend/ci/job_details/store/utils_spec.js
index 8fc4eeb0ca8..6105c53a306 100644
--- a/spec/frontend/ci/job_details/store/utils_spec.js
+++ b/spec/frontend/ci/job_details/store/utils_spec.js
@@ -1,524 +1,305 @@
+import { logLinesParser } from '~/ci/job_details/store/utils';
+
import {
- logLinesParser,
- updateIncrementalJobLog,
- parseHeaderLine,
- parseLine,
- addDurationToHeader,
- isCollapsibleSection,
- findOffsetAndRemove,
- getNextLineNumber,
-} from '~/ci/job_details/store/utils';
-import {
- mockJobLog,
- originalTrace,
- regularIncremental,
- regularIncrementalRepeated,
- headerTrace,
- headerTraceIncremental,
- collapsibleTrace,
- collapsibleTraceIncremental,
+ mockJobLines,
+ mockEmptySection,
+ mockContentSection,
+ mockContentSectionClosed,
+ mockContentSectionHiddenDuration,
+ mockContentSubsection,
+ mockTruncatedBottomSection,
+ mockTruncatedTopSection,
} from '../components/log/mock_data';
describe('Jobs Store Utils', () => {
- describe('parseHeaderLine', () => {
- it('returns a new object with the header keys and the provided line parsed', () => {
- const headerLine = { content: [{ text: 'foo' }] };
- const parsedHeaderLine = parseHeaderLine(headerLine, 2);
+ describe('logLinesParser', () => {
+ it('parses plain lines', () => {
+ const result = logLinesParser(mockJobLines);
- expect(parsedHeaderLine).toEqual({
- isClosed: false,
- isHeader: true,
- line: {
- ...headerLine,
- lineNumber: 2,
- },
- lines: [],
+ expect(result).toEqual({
+ lines: [
+ {
+ offset: 0,
+ content: [
+ {
+ text: 'Running with gitlab-runner 12.1.0 (de7731dd)',
+ style: 'term-fg-l-cyan term-bold',
+ },
+ ],
+ lineNumber: 1,
+ },
+ {
+ offset: 1001,
+ content: [{ text: ' on docker-auto-scale-com 8a6210b8' }],
+ lineNumber: 2,
+ },
+ ],
+ sections: {},
});
});
- it('pre-closes a section when specified in options', () => {
- const headerLine = { content: [{ text: 'foo' }], section_options: { collapsed: 'true' } };
-
- const parsedHeaderLine = parseHeaderLine(headerLine, 2);
-
- expect(parsedHeaderLine.isClosed).toBe(true);
- });
-
- it('expands all pre-closed sections if hash is present', () => {
- const headerLine = { content: [{ text: 'foo' }], section_options: { collapsed: 'true' } };
-
- const parsedHeaderLine = parseHeaderLine(headerLine, 2, '#L33');
-
- expect(parsedHeaderLine.isClosed).toBe(false);
- });
- });
-
- describe('parseLine', () => {
- it('returns a new object with the lineNumber key added to the provided line object', () => {
- const line = { content: [{ text: 'foo' }] };
- const parsed = parseLine(line, 1);
- expect(parsed.content).toEqual(line.content);
- expect(parsed.lineNumber).toEqual(1);
- });
- });
+ it('parses an empty section', () => {
+ const result = logLinesParser(mockEmptySection);
- describe('addDurationToHeader', () => {
- const duration = {
- offset: 106,
- content: [],
- section: 'prepare-script',
- section_duration: '00:03',
- };
-
- it('adds the section duration to the correct header', () => {
- const parsed = [
- {
- isClosed: false,
- isHeader: true,
- line: {
- section: 'prepare-script',
- content: [{ text: 'foo' }],
+ expect(result).toEqual({
+ lines: [
+ {
+ offset: 1002,
+ content: [
+ {
+ text: 'Resolving secrets',
+ style: 'term-fg-l-cyan term-bold',
+ },
+ ],
+ lineNumber: 1,
+ section: 'resolve-secrets',
+ isHeader: true,
},
- lines: [],
- },
- {
- isClosed: false,
- isHeader: true,
- line: {
- section: 'foo-bar',
- content: [{ text: 'foo' }],
+ ],
+ sections: {
+ 'resolve-secrets': {
+ startLineNumber: 1,
+ endLineNumber: 1,
+ duration: '00:00',
+ isClosed: false,
},
- lines: [],
},
- ];
-
- addDurationToHeader(parsed, duration);
-
- expect(parsed[0].line.section_duration).toEqual(duration.section_duration);
- expect(parsed[1].line.section_duration).toEqual(undefined);
+ });
});
- it('does not add the section duration when the headers do not match', () => {
- const parsed = [
- {
- isClosed: false,
- isHeader: true,
- line: {
- section: 'bar-foo',
- content: [{ text: 'foo' }],
+ it('parses a section with content', () => {
+ const result = logLinesParser(mockContentSection);
+
+ expect(result).toEqual({
+ lines: [
+ {
+ content: [{ text: 'Using Docker executor with image dev.gitlab.org3' }],
+ isHeader: true,
+ lineNumber: 1,
+ offset: 1004,
+ section: 'prepare-executor',
},
- lines: [],
- },
- {
- isClosed: false,
- isHeader: true,
- line: {
- section: 'foo-bar',
- content: [{ text: 'foo' }],
+ {
+ content: [{ text: 'Docker executor with image registry.gitlab.com ...' }],
+ lineNumber: 2,
+ offset: 1005,
+ section: 'prepare-executor',
+ },
+ {
+ content: [{ style: 'term-fg-l-green', text: 'Starting service ...' }],
+ lineNumber: 3,
+ offset: 1006,
+ section: 'prepare-executor',
+ },
+ ],
+ sections: {
+ 'prepare-executor': {
+ startLineNumber: 1,
+ endLineNumber: 3,
+ duration: '00:09',
+ isClosed: false,
},
- lines: [],
- },
- ];
- addDurationToHeader(parsed, duration);
-
- expect(parsed[0].line.section_duration).toEqual(undefined);
- expect(parsed[1].line.section_duration).toEqual(undefined);
- });
-
- it('does not add when content has no headers', () => {
- const parsed = [
- {
- section: 'bar-foo',
- content: [{ text: 'foo' }],
- lineNumber: 1,
- },
- {
- section: 'foo-bar',
- content: [{ text: 'foo' }],
- lineNumber: 2,
},
- ];
-
- addDurationToHeader(parsed, duration);
-
- expect(parsed[0].line).toEqual(undefined);
- expect(parsed[1].line).toEqual(undefined);
- });
- });
-
- describe('isCollapsibleSection', () => {
- const header = {
- isHeader: true,
- line: {
- section: 'foo',
- },
- };
- const line = {
- lineNumber: 1,
- section: 'foo',
- content: [],
- };
-
- it('returns true when line belongs to the last section', () => {
- expect(isCollapsibleSection([header], header, { section: 'foo', content: [] })).toEqual(true);
- });
-
- it('returns false when last line was not an header', () => {
- expect(isCollapsibleSection([line], line, { section: 'bar' })).toEqual(false);
- });
-
- it('returns false when accumulator is empty', () => {
- expect(isCollapsibleSection([], { isHeader: true }, { section: 'bar' })).toEqual(false);
- });
-
- it('returns false when section_duration is defined', () => {
- expect(isCollapsibleSection([header], header, { section_duration: '10:00' })).toEqual(false);
- });
-
- it('returns false when `section` is not a match', () => {
- expect(isCollapsibleSection([header], header, { section: 'bar' })).toEqual(false);
- });
-
- it('returns false when no parameters are provided', () => {
- expect(isCollapsibleSection()).toEqual(false);
- });
- });
- describe('logLinesParser', () => {
- let result;
-
- beforeEach(() => {
- result = logLinesParser(mockJobLog);
- });
-
- describe('regular line', () => {
- it('adds a lineNumber property with correct index', () => {
- expect(result[0].lineNumber).toEqual(1);
- expect(result[1].lineNumber).toEqual(2);
- expect(result[2].line.lineNumber).toEqual(3);
- expect(result[3].line.lineNumber).toEqual(4);
- expect(result[3].lines[0].lineNumber).toEqual(5);
- expect(result[3].lines[1].lineNumber).toEqual(6);
});
});
- describe('collapsible section', () => {
- it('adds a `isClosed` property', () => {
- expect(result[2].isClosed).toEqual(false);
- expect(result[3].isClosed).toEqual(false);
- });
-
- it('adds a `isHeader` property', () => {
- expect(result[2].isHeader).toEqual(true);
- expect(result[3].isHeader).toEqual(true);
- });
+ it('parses a closed section with content', () => {
+ const result = logLinesParser(mockContentSectionClosed);
- it('creates a lines array property with the content of the collapsible section', () => {
- expect(result[3].lines.length).toEqual(2);
- expect(result[3].lines[0].content).toEqual(mockJobLog[5].content);
- expect(result[3].lines[1].content).toEqual(mockJobLog[6].content);
+ expect(result.sections['mock-closed-section']).toMatchObject({
+ isClosed: true,
});
});
- describe('section duration', () => {
- it('adds the section information to the header section', () => {
- expect(result[2].line.section_duration).toEqual(mockJobLog[3].section_duration);
- expect(result[3].line.section_duration).toEqual(mockJobLog[7].section_duration);
- });
-
- it('does not add section duration as a line', () => {
- expect(result[2].lines.includes(mockJobLog[5])).toEqual(false);
- expect(result[3].lines.includes(mockJobLog[9])).toEqual(false);
- });
- });
- });
-
- describe('findOffsetAndRemove', () => {
- describe('when last item is header', () => {
- const existingLog = [
- {
- isHeader: true,
- isClosed: false,
- line: { content: [{ text: 'bar' }], offset: 10, lineNumber: 1 },
- },
- ];
-
- describe('and matches the offset', () => {
- it('returns an array with the item removed', () => {
- const newData = [{ offset: 10, content: [{ text: 'foobar' }] }];
- const result = findOffsetAndRemove(newData, existingLog);
-
- expect(result).toEqual([]);
- });
- });
+ it('parses a closed section as open when hash is present', () => {
+ const result = logLinesParser(mockContentSectionClosed, {}, '#L1');
- describe('and does not match the offset', () => {
- it('returns the provided existing log', () => {
- const newData = [{ offset: 110, content: [{ text: 'foobar' }] }];
- const result = findOffsetAndRemove(newData, existingLog);
-
- expect(result).toEqual(existingLog);
- });
- });
- });
-
- describe('when last item is a regular line', () => {
- const existingLog = [{ content: [{ text: 'bar' }], offset: 10, lineNumber: 1 }];
-
- describe('and matches the offset', () => {
- it('returns an array with the item removed', () => {
- const newData = [{ offset: 10, content: [{ text: 'foobar' }] }];
- const result = findOffsetAndRemove(newData, existingLog);
-
- expect(result).toEqual([]);
- });
- });
-
- describe('and does not match the fofset', () => {
- it('returns the provided old log', () => {
- const newData = [{ offset: 101, content: [{ text: 'foobar' }] }];
- const result = findOffsetAndRemove(newData, existingLog);
-
- expect(result).toEqual(existingLog);
- });
+ expect(result.sections['mock-closed-section']).toMatchObject({
+ isClosed: false,
});
});
- describe('when last item is nested', () => {
- const existingLog = [
- {
- isHeader: true,
- isClosed: false,
- lines: [{ offset: 101, content: [{ text: 'foobar' }], lineNumber: 2 }],
- line: {
- offset: 10,
- lineNumber: 1,
- section_duration: '10:00',
- },
- },
- ];
-
- describe('and matches the offset', () => {
- it('returns an array with the last nested line item removed', () => {
- const newData = [{ offset: 101, content: [{ text: 'foobar' }] }];
+ it('parses a section with a hidden duration', () => {
+ const result = logLinesParser(mockContentSectionHiddenDuration);
- const result = findOffsetAndRemove(newData, existingLog);
- expect(result[0].lines).toEqual([]);
- });
- });
-
- describe('and does not match the offset', () => {
- it('returns the provided old log', () => {
- const newData = [{ offset: 120, content: [{ text: 'foobar' }] }];
-
- const result = findOffsetAndRemove(newData, existingLog);
- expect(result).toEqual(existingLog);
- });
+ expect(result.sections['mock-hidden-duration-section']).toMatchObject({
+ hideDuration: true,
+ duration: '00:09',
});
});
- describe('when no data is provided', () => {
- it('returns an empty array', () => {
- const result = findOffsetAndRemove();
- expect(result).toEqual([]);
- });
- });
- });
-
- describe('getNextLineNumber', () => {
- describe('when there is no previous log', () => {
- it('returns 1', () => {
- expect(getNextLineNumber([])).toEqual(1);
- expect(getNextLineNumber(undefined)).toEqual(1);
- });
- });
+ it('parses a section with a sub section', () => {
+ const result = logLinesParser(mockContentSubsection);
- describe('when last line is 1', () => {
- it('returns 1', () => {
- const log = [
+ expect(result).toEqual({
+ lines: [
{
- content: [],
+ offset: 0,
+ content: [{ text: 'Line 1' }],
lineNumber: 1,
+ section: 'mock-section',
+ isHeader: true,
},
- ];
-
- expect(getNextLineNumber(log)).toEqual(2);
- });
- });
-
- describe('with unnested line', () => {
- it('returns the lineNumber of the last item in the array', () => {
- const log = [
{
- content: [],
- lineNumber: 10,
+ offset: 1002,
+ content: [{ text: 'Line 2 - section content' }],
+ lineNumber: 2,
+ section: 'mock-section',
},
{
- content: [],
- lineNumber: 101,
+ offset: 1003,
+ content: [{ text: 'Line 3 - sub section header' }],
+ lineNumber: 3,
+ section: 'sub-section',
+ isHeader: true,
},
- ];
-
- expect(getNextLineNumber(log)).toEqual(102);
- });
- });
-
- describe('when last line is the header section', () => {
- it('returns the lineNumber of the last item in the array', () => {
- const log = [
{
- content: [],
- lineNumber: 10,
+ offset: 1004,
+ content: [{ text: 'Line 4 - sub section content' }],
+ lineNumber: 4,
+ section: 'sub-section',
},
{
+ offset: 1005,
+ content: [{ text: 'Line 5 - sub sub section header with no content' }],
+ lineNumber: 5,
+ section: 'sub-sub-section',
isHeader: true,
- line: {
- lineNumber: 101,
- content: [],
- },
- lines: [],
},
- ];
-
- expect(getNextLineNumber(log)).toEqual(102);
- });
- });
-
- describe('when last line is a nested line', () => {
- it('returns the lineNumber of the last item in the nested array', () => {
- const log = [
{
- content: [],
- lineNumber: 10,
+ offset: 1007,
+ content: [{ text: 'Line 6 - sub section content 2' }],
+ lineNumber: 6,
+ section: 'sub-section',
},
{
- isHeader: true,
- line: {
- lineNumber: 101,
- content: [],
- },
- lines: [
- {
- lineNumber: 102,
- content: [],
- },
- { lineNumber: 103, content: [] },
- ],
+ offset: 1009,
+ content: [{ text: 'Line 7 - section content' }],
+ lineNumber: 7,
+ section: 'mock-section',
+ },
+ {
+ offset: 1011,
+ content: [{ text: 'Job succeeded' }],
+ lineNumber: 8,
+ },
+ ],
+ sections: {
+ 'mock-section': {
+ startLineNumber: 1,
+ endLineNumber: 7,
+ duration: '00:59',
+ isClosed: false,
},
- ];
+ 'sub-section': {
+ startLineNumber: 3,
+ endLineNumber: 6,
+ duration: '00:29',
+ isClosed: false,
+ },
+ 'sub-sub-section': {
+ startLineNumber: 5,
+ endLineNumber: 5,
+ duration: '00:00',
+ isClosed: false,
+ },
+ },
+ });
+ });
- expect(getNextLineNumber(log)).toEqual(104);
+ it('parsing repeated lines returns the same result', () => {
+ const result1 = logLinesParser(mockJobLines);
+ const result2 = logLinesParser(mockJobLines, {
+ currentLines: result1.lines,
+ currentSections: result1.sections,
});
+
+ // `toBe` is used to ensure objects do not change and trigger Vue reactivity
+ expect(result1.lines).toBe(result2.lines);
+ expect(result1.sections).toBe(result2.sections);
});
- });
- describe('updateIncrementalJobLog', () => {
- describe('without repeated section', () => {
- it('concats and parses both arrays', () => {
- const oldLog = logLinesParser(originalTrace);
- const result = updateIncrementalJobLog(regularIncremental, oldLog);
+ it('discards repeated lines and adds new ones', () => {
+ const result1 = logLinesParser(mockContentSection);
+ const result2 = logLinesParser(
+ [
+ ...mockContentSection,
+ {
+ content: [{ text: 'offset is too low, is ignored' }],
+ offset: 500,
+ },
+ {
+ content: [{ text: 'one new line' }],
+ offset: 1007,
+ },
+ ],
+ {
+ currentLines: result1.lines,
+ currentSections: result1.sections,
+ },
+ );
- expect(result).toEqual([
+ expect(result2).toEqual({
+ lines: [
{
- offset: 1,
- content: [
- {
- text: 'Downloading',
- },
- ],
+ content: [{ text: 'Using Docker executor with image dev.gitlab.org3' }],
+ isHeader: true,
lineNumber: 1,
+ offset: 1004,
+ section: 'prepare-executor',
},
{
- offset: 2,
- content: [
- {
- text: 'log line',
- },
- ],
+ content: [{ text: 'Docker executor with image registry.gitlab.com ...' }],
lineNumber: 2,
+ offset: 1005,
+ section: 'prepare-executor',
},
- ]);
- });
- });
-
- describe('with regular line repeated offset', () => {
- it('updates the last line and formats with the incremental part', () => {
- const oldLog = logLinesParser(originalTrace);
- const result = updateIncrementalJobLog(regularIncrementalRepeated, oldLog);
-
- expect(result).toEqual([
{
- offset: 1,
- content: [
- {
- text: 'log line',
- },
- ],
- lineNumber: 1,
+ content: [{ style: 'term-fg-l-green', text: 'Starting service ...' }],
+ lineNumber: 3,
+ offset: 1006,
+ section: 'prepare-executor',
+ },
+ {
+ content: [{ text: 'one new line' }],
+ lineNumber: 4,
+ offset: 1007,
},
- ]);
+ ],
+ sections: {
+ 'prepare-executor': {
+ startLineNumber: 1,
+ endLineNumber: 3,
+ duration: '00:09',
+ isClosed: false,
+ },
+ },
});
});
- describe('with header line repeated', () => {
- it('updates the header line and formats with the incremental part', () => {
- const oldLog = logLinesParser(headerTrace);
- const result = updateIncrementalJobLog(headerTraceIncremental, oldLog);
+ it('parses an interrupted job', () => {
+ const result = logLinesParser(mockTruncatedBottomSection);
- expect(result).toEqual([
- {
- isClosed: false,
- isHeader: true,
- line: {
- offset: 1,
- section_header: true,
- content: [
- {
- text: 'updated log line',
- },
- ],
- section: 'section',
- lineNumber: 1,
- },
- lines: [],
- },
- ]);
+ expect(result.sections).toEqual({
+ 'mock-section': {
+ startLineNumber: 1,
+ endLineNumber: Infinity,
+ duration: null,
+ isClosed: false,
+ },
});
});
- describe('with collapsible line repeated', () => {
- it('updates the collapsible line and formats with the incremental part', () => {
- const oldLog = logLinesParser(collapsibleTrace);
- const result = updateIncrementalJobLog(collapsibleTraceIncremental, oldLog);
+ it('parses the ending of an incomplete section', () => {
+ const result = logLinesParser(mockTruncatedTopSection);
- expect(result).toEqual([
- {
- isClosed: false,
- isHeader: true,
- line: {
- offset: 1,
- section_header: true,
- content: [
- {
- text: 'log line',
- },
- ],
- section: 'section',
- lineNumber: 1,
- },
- lines: [
- {
- offset: 2,
- content: [
- {
- text: 'updated log line',
- },
- ],
- section: 'section',
- lineNumber: 2,
- },
- ],
- },
- ]);
+ expect(result.sections).toEqual({
+ 'mock-section': {
+ startLineNumber: 0,
+ endLineNumber: 2,
+ duration: '00:59',
+ isClosed: false,
+ },
});
});
});
diff --git a/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js b/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js
index b354ecf97fc..d1eef5fbc8c 100644
--- a/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js
+++ b/spec/frontend/kubernetes_dashboard/graphql/resolvers/kubernetes_spec.js
@@ -1,5 +1,6 @@
-import { CoreV1Api } from '@gitlab/cluster-client';
+import { CoreV1Api, WatchApi } from '@gitlab/cluster-client';
import { resolvers } from '~/kubernetes_dashboard/graphql/resolvers';
+import k8sDashboardPodsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_pods.query.graphql';
import { k8sPodsMock } from '../mock_data';
describe('~/frontend/environments/graphql/resolvers', () => {
@@ -18,6 +19,18 @@ describe('~/frontend/environments/graphql/resolvers', () => {
describe('k8sPods', () => {
const client = { writeQuery: jest.fn() };
+
+ const mockWatcher = WatchApi.prototype;
+ const mockPodsListWatcherFn = jest.fn().mockImplementation(() => {
+ return Promise.resolve(mockWatcher);
+ });
+
+ const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
+ if (eventName === 'data') {
+ callback([]);
+ }
+ });
+
const mockPodsListFn = jest.fn().mockImplementation(() => {
return Promise.resolve({
items: k8sPodsMock,
@@ -26,25 +39,55 @@ describe('~/frontend/environments/graphql/resolvers', () => {
const mockAllPodsListFn = jest.fn().mockImplementation(mockPodsListFn);
- beforeEach(() => {
- jest
- .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces')
- .mockImplementation(mockAllPodsListFn);
+ describe('when the pods data is present', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces')
+ .mockImplementation(mockAllPodsListFn);
+ jest.spyOn(mockWatcher, 'subscribeToStream').mockImplementation(mockPodsListWatcherFn);
+ jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
+ });
+
+ it('should request all pods from the cluster_client library and watch the events', async () => {
+ const pods = await mockResolvers.Query.k8sPods(
+ null,
+ {
+ configuration,
+ },
+ { client },
+ );
+
+ expect(mockAllPodsListFn).toHaveBeenCalled();
+ expect(mockPodsListWatcherFn).toHaveBeenCalled();
+
+ expect(pods).toEqual(k8sPodsMock);
+ });
+
+ it('should update cache with the new data when received from the library', async () => {
+ await mockResolvers.Query.k8sPods(null, { configuration, namespace: '' }, { client });
+
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ query: k8sDashboardPodsQuery,
+ variables: { configuration, namespace: '' },
+ data: { k8sPods: [] },
+ });
+ });
});
- it('should request all pods from the cluster_client library', async () => {
- const pods = await mockResolvers.Query.k8sPods(
- null,
- {
- configuration,
- },
- { client },
+ it('should not watch pods from the cluster_client library when the pods data is not present', async () => {
+ jest.spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces').mockImplementation(
+ jest.fn().mockImplementation(() => {
+ return Promise.resolve({
+ items: [],
+ });
+ }),
);
- expect(mockAllPodsListFn).toHaveBeenCalled();
+ await mockResolvers.Query.k8sPods(null, { configuration }, { client });
- expect(pods).toEqual(k8sPodsMock);
+ expect(mockPodsListWatcherFn).not.toHaveBeenCalled();
});
+
it('should throw an error if the API call fails', async () => {
jest
.spyOn(CoreV1Api.prototype, 'listCoreV1PodForAllNamespaces')
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index c7faf267348..64c249770b7 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -5,1101 +5,1123 @@ require 'spec_helper'
RSpec.describe Feature, :clean_gitlab_redis_feature_flag, stub_feature_flags: false, feature_category: :shared do
include StubVersion
- before do
- # reset Flipper AR-engine
- Feature.reset
- skip_feature_flags_yaml_validation
- end
+ # Pick a long-lasting real feature flag to test that we can check feature flags in the load balancer
+ let(:load_balancer_test_feature_flag) { :require_email_verification }
- describe '.current_request' do
- it 'returns a FlipperRequest with a flipper_id' do
- flipper_request = described_class.current_request
+ where(:bypass_load_balancer) { [true, false] }
- expect(flipper_request.flipper_id).to include("FlipperRequest:")
+ with_them do
+ def wrap_all_methods_with_flag_check(lb, flag)
+ lb.methods(false).each do |meth|
+ allow(lb).to receive(meth).and_wrap_original do |m, *args, **kwargs, &block|
+ Feature.enabled?(flag)
+ m.call(*args, **kwargs, &block)
+ end
+ end
end
-
- context 'when request store is inactive' do
- it 'does not cache flipper_id' do
- previous_id = described_class.current_request.flipper_id
-
- expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ before do
+ if bypass_load_balancer
+ stub_env(Feature::BypassLoadBalancer::FLAG, 'true')
+ wrap_all_methods_with_flag_check(ApplicationRecord.load_balancer, load_balancer_test_feature_flag)
end
+
+ # reset Flipper AR-engine
+ Feature.reset
+ skip_feature_flags_yaml_validation
end
- context 'when request store is active', :request_store do
- it 'caches flipper_id when request store is active' do
- previous_id = described_class.current_request.flipper_id
+ describe '.current_request' do
+ it 'returns a FlipperRequest with a flipper_id' do
+ flipper_request = described_class.current_request
- expect(described_class.current_request.flipper_id).to eq(previous_id)
+ expect(flipper_request.flipper_id).to include("FlipperRequest:")
end
- it 'returns a new flipper_id when request ends' do
- previous_id = described_class.current_request.flipper_id
+ context 'when request store is inactive' do
+ it 'does not cache flipper_id' do
+ previous_id = described_class.current_request.flipper_id
- RequestStore.end!
-
- expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ end
end
- end
- end
- describe '.gitlab_instance' do
- it 'returns a FlipperGitlabInstance with a flipper_id' do
- flipper_request = described_class.gitlab_instance
+ context 'when request store is active', :request_store do
+ it 'caches flipper_id when request store is active' do
+ previous_id = described_class.current_request.flipper_id
- expect(flipper_request.flipper_id).to include("FlipperGitlabInstance:")
- end
+ expect(described_class.current_request.flipper_id).to eq(previous_id)
+ end
- it 'caches flipper_id' do
- previous_id = described_class.gitlab_instance.flipper_id
+ it 'returns a new flipper_id when request ends' do
+ previous_id = described_class.current_request.flipper_id
- expect(described_class.gitlab_instance.flipper_id).to eq(previous_id)
+ RequestStore.end!
+
+ expect(described_class.current_request.flipper_id).not_to eq(previous_id)
+ end
+ end
end
- end
- describe '.get' do
- let(:feature) { double(:feature) }
- let(:key) { 'my_feature' }
+ describe '.gitlab_instance' do
+ it 'returns a FlipperGitlabInstance with a flipper_id' do
+ flipper_request = described_class.gitlab_instance
+
+ expect(flipper_request.flipper_id).to include("FlipperGitlabInstance:")
+ end
- it 'returns the Flipper feature' do
- expect_any_instance_of(Flipper::DSL).to receive(:feature).with(key)
- .and_return(feature)
+ it 'caches flipper_id' do
+ previous_id = described_class.gitlab_instance.flipper_id
- expect(described_class.get(key)).to eq(feature)
+ expect(described_class.gitlab_instance.flipper_id).to eq(previous_id)
+ end
end
- end
- describe '.persisted_names' do
- it 'returns the names of the persisted features' do
- Feature.enable('foo')
+ describe '.get' do
+ let(:feature) { double(:feature) }
+ let(:key) { 'my_feature' }
- expect(described_class.persisted_names).to contain_exactly('foo')
- end
+ it 'returns the Flipper feature' do
+ expect_any_instance_of(Flipper::DSL).to receive(:feature).with(key)
+ .and_return(feature)
- it 'returns an empty Array when no features are presisted' do
- expect(described_class.persisted_names).to be_empty
+ expect(described_class.get(key)).to eq(feature)
+ end
end
- it 'caches the feature names when request store is active',
- :request_store, :use_clean_rails_memory_store_caching do
- Feature.enable('foo')
-
- expect(Gitlab::ProcessMemoryCache.cache_backend)
- .to receive(:fetch)
- .once
- .with('flipper/v1/features', { expires_in: 1.minute })
- .and_call_original
+ describe '.persisted_names' do
+ it 'returns the names of the persisted features' do
+ Feature.enable('foo')
- 2.times do
expect(described_class.persisted_names).to contain_exactly('foo')
end
- end
- it 'fetches all flags once in a single query', :request_store do
- Feature.enable('foo1')
- Feature.enable('foo2')
+ it 'returns an empty Array when no features are presisted' do
+ expect(described_class.persisted_names).to be_empty
+ end
- queries = ActiveRecord::QueryRecorder.new(skip_cached: false) do
- expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+ it 'caches the feature names when request store is active',
+ :request_store, :use_clean_rails_memory_store_caching do
+ Feature.enable('foo')
- RequestStore.clear!
+ expect(Gitlab::ProcessMemoryCache.cache_backend)
+ .to receive(:fetch)
+ .once
+ .with('flipper/v1/features', { expires_in: 1.minute })
+ .and_call_original
- expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+ 2.times do
+ expect(described_class.persisted_names).to contain_exactly('foo')
+ end
end
- expect(queries.count).to eq(1)
- end
- end
+ it 'fetches all flags once in a single query', :request_store do
+ Feature.enable('foo1')
+ Feature.enable('foo2')
- describe '.persisted_name?' do
- context 'when the feature is persisted' do
- it 'returns true when feature name is a string' do
- Feature.enable('foo')
+ queries = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
- expect(described_class.persisted_name?('foo')).to eq(true)
- end
+ RequestStore.clear!
- it 'returns true when feature name is a symbol' do
- Feature.enable('foo')
+ expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+ end
- expect(described_class.persisted_name?(:foo)).to eq(true)
+ expect(queries.count).to eq(1)
end
end
- context 'when the feature is not persisted' do
- it 'returns false when feature name is a string' do
- expect(described_class.persisted_name?('foo')).to eq(false)
+ describe '.persisted_name?' do
+ context 'when the feature is persisted' do
+ it 'returns true when feature name is a string' do
+ Feature.enable('foo')
+
+ expect(described_class.persisted_name?('foo')).to eq(true)
+ end
+
+ it 'returns true when feature name is a symbol' do
+ Feature.enable('foo')
+
+ expect(described_class.persisted_name?(:foo)).to eq(true)
+ end
end
- it 'returns false when feature name is a symbol' do
- expect(described_class.persisted_name?(:bar)).to eq(false)
+ context 'when the feature is not persisted' do
+ it 'returns false when feature name is a string' do
+ expect(described_class.persisted_name?('foo')).to eq(false)
+ end
+
+ it 'returns false when feature name is a symbol' do
+ expect(described_class.persisted_name?(:bar)).to eq(false)
+ end
end
end
- end
- describe '.all' do
- let(:features) { Set.new }
+ describe '.all' do
+ let(:features) { Set.new }
- it 'returns the Flipper features as an array' do
- expect_any_instance_of(Flipper::DSL).to receive(:features)
- .and_return(features)
+ it 'returns the Flipper features as an array' do
+ expect_any_instance_of(Flipper::DSL).to receive(:features)
+ .and_return(features)
- expect(described_class.all).to eq(features.to_a)
+ expect(described_class.all).to eq(features.to_a)
+ end
end
- end
- describe '.flipper' do
- context 'when request store is inactive' do
- it 'memoizes the Flipper instance but does not not enable Flipper memoization' do
- expect(Flipper).to receive(:new).once.and_call_original
+ describe '.flipper' do
+ context 'when request store is inactive' do
+ it 'memoizes the Flipper instance but does not not enable Flipper memoization' do
+ expect(Flipper).to receive(:new).once.and_call_original
- 2.times do
- described_class.flipper
- end
+ 2.times do
+ described_class.flipper
+ end
- expect(described_class.flipper.adapter.memoizing?).to eq(false)
+ expect(described_class.flipper.adapter.memoizing?).to eq(false)
+ end
end
- end
- context 'when request store is active', :request_store do
- it 'memoizes the Flipper instance' do
- expect(Flipper).to receive(:new).once.and_call_original
+ context 'when request store is active', :request_store do
+ it 'memoizes the Flipper instance' do
+ expect(Flipper).to receive(:new).once.and_call_original
- described_class.flipper
- described_class.instance_variable_set(:@flipper, nil)
- described_class.flipper
+ described_class.flipper
+ described_class.instance_variable_set(:@flipper, nil)
+ described_class.flipper
- expect(described_class.flipper.adapter.memoizing?).to eq(true)
+ expect(described_class.flipper.adapter.memoizing?).to eq(true)
+ end
end
end
- end
- describe '.enabled?' do
- before do
- allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+ describe '.enabled?' do
+ before do
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
- stub_feature_flag_definition(:disabled_feature_flag)
- stub_feature_flag_definition(:enabled_feature_flag, default_enabled: true)
- end
+ stub_feature_flag_definition(:disabled_feature_flag)
+ stub_feature_flag_definition(:enabled_feature_flag, default_enabled: true)
+ end
- context 'when using redis cache', :use_clean_rails_redis_caching do
- it 'does not make recursive feature-flag calls' do
- expect(described_class).to receive(:enabled?).once.and_call_original
- described_class.enabled?(:disabled_feature_flag)
+ context 'when using redis cache', :use_clean_rails_redis_caching do
+ it 'does not make recursive feature-flag calls' do
+ expect(described_class).to receive(:enabled?).once.and_call_original
+ described_class.enabled?(:disabled_feature_flag)
+ end
end
- end
- context 'when self-recursive' do
- before do
- allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
- original.call(name) do |ff|
- Feature.enabled?(name)
- block.call(ff)
+ context 'when self-recursive' do
+ before do
+ allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
+ original.call(name) do |ff|
+ Feature.enabled?(name)
+ block.call(ff)
+ end
end
end
- end
- it 'returns the default value' do
- expect(described_class.enabled?(:enabled_feature_flag)).to eq true
- end
+ it 'returns the default value' do
+ expect(described_class.enabled?(:enabled_feature_flag)).to eq true
+ end
- it 'detects self recursion' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(have_attributes(message: 'self recursion'), { stack: [:enabled_feature_flag] })
+ it 'detects self recursion' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(have_attributes(message: 'self recursion'), { stack: [:enabled_feature_flag] })
- described_class.enabled?(:enabled_feature_flag)
+ described_class.enabled?(:enabled_feature_flag)
+ end
end
- end
- context 'when deeply recursive' do
- before do
- allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
- original.call(name) do |ff|
- Feature.enabled?(:"deeper_#{name}", type: :undefined, default_enabled_if_undefined: true)
- block.call(ff)
+ context 'when deeply recursive' do
+ before do
+ allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block|
+ original.call(name) do |ff|
+ Feature.enabled?(:"deeper_#{name}", type: :undefined, default_enabled_if_undefined: true)
+ block.call(ff)
+ end
end
end
- end
- it 'detects deep recursion' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(have_attributes(message: 'deep recursion'), stack: have_attributes(size: be > 10))
+ it 'detects deep recursion' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(have_attributes(message: 'deep recursion'), stack: have_attributes(size: be > 10))
- described_class.enabled?(:enabled_feature_flag)
+ described_class.enabled?(:enabled_feature_flag)
+ end
end
- end
- it 'returns false (and tracks / raises exception for dev) for undefined feature' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ it 'returns false (and tracks / raises exception for dev) for undefined feature' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- expect(described_class.enabled?(:some_random_feature_flag)).to be_falsey
- end
+ expect(described_class.enabled?(:some_random_feature_flag)).to be_falsey
+ end
- it 'returns false for undefined feature with default_enabled_if_undefined: false' do
- expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_falsey
- end
+ it 'returns false for undefined feature with default_enabled_if_undefined: false' do
+ expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_falsey
+ end
- it 'returns true for undefined feature with default_enabled_if_undefined: true' do
- expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_truthy
- end
+ it 'returns true for undefined feature with default_enabled_if_undefined: true' do
+ expect(described_class.enabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_truthy
+ end
- it 'returns false for existing disabled feature in the database' do
- described_class.disable(:disabled_feature_flag)
+ it 'returns false for existing disabled feature in the database' do
+ described_class.disable(:disabled_feature_flag)
- expect(described_class.enabled?(:disabled_feature_flag)).to be_falsey
- end
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_falsey
+ end
- it 'returns true for existing enabled feature in the database' do
- described_class.enable(:enabled_feature_flag)
+ it 'returns true for existing enabled feature in the database' do
+ described_class.enable(:enabled_feature_flag)
- expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy
- end
+ expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy
+ end
- it { expect(described_class.send(:l1_cache_backend)).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
- it { expect(described_class.send(:l2_cache_backend)).to eq(Gitlab::Redis::FeatureFlag.cache_store) }
+ it { expect(described_class.send(:l1_cache_backend)).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
+ it { expect(described_class.send(:l2_cache_backend)).to eq(Gitlab::Redis::FeatureFlag.cache_store) }
- it 'caches the status in L1 and L2 caches',
- :request_store, :use_clean_rails_memory_store_caching do
- described_class.enable(:disabled_feature_flag)
- flipper_key = "flipper/v1/feature/disabled_feature_flag"
+ it 'caches the status in L1 and L2 caches',
+ :request_store, :use_clean_rails_memory_store_caching do
+ described_class.enable(:disabled_feature_flag)
+ flipper_key = "flipper/v1/feature/disabled_feature_flag"
- expect(described_class.send(:l2_cache_backend))
- .to receive(:fetch)
- .once
- .with(flipper_key, { expires_in: 1.hour })
- .and_call_original
+ expect(described_class.send(:l2_cache_backend))
+ .to receive(:fetch)
+ .once
+ .with(flipper_key, { expires_in: 1.hour })
+ .and_call_original
- expect(described_class.send(:l1_cache_backend))
- .to receive(:fetch)
- .once
- .with(flipper_key, { expires_in: 1.minute })
- .and_call_original
+ expect(described_class.send(:l1_cache_backend))
+ .to receive(:fetch)
+ .once
+ .with(flipper_key, { expires_in: 1.minute })
+ .and_call_original
- 2.times do
- expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ 2.times do
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ end
end
- end
- it 'returns the default value when the database does not exist' do
- fake_default = double('fake default')
- expect(ActiveRecord::Base).to receive(:connection) { raise ActiveRecord::NoDatabaseError, "No database" }
+ it 'returns the default value when the database does not exist' do
+ fake_default = double('fake default')
- expect(described_class.enabled?(:a_feature, default_enabled_if_undefined: fake_default)).to eq(fake_default)
- end
+ base_class = Feature::BypassLoadBalancer.enabled? ? Feature::BypassLoadBalancer::FlipperRecord : ActiveRecord::Base
+ expect(base_class).to receive(:connection) { raise ActiveRecord::NoDatabaseError, "No database" }
- context 'logging is enabled', :request_store do
- before do
- allow(Feature).to receive(:log_feature_flag_states?).and_call_original
+ expect(described_class.enabled?(:a_feature, default_enabled_if_undefined: fake_default)).to eq(fake_default)
+ end
- stub_feature_flag_definition(:enabled_feature_flag, log_state_changes: true)
+ context 'logging is enabled', :request_store do
+ before do
+ allow(Feature).to receive(:log_feature_flag_states?).and_call_original
- described_class.enable(:feature_flag_state_logs)
- described_class.enable(:enabled_feature_flag)
- described_class.enabled?(:enabled_feature_flag)
- end
+ stub_feature_flag_definition(:enabled_feature_flag, log_state_changes: true)
- it 'does not log feature_flag_state_logs' do
- expect(described_class.logged_states).not_to have_key("feature_flag_state_logs")
- end
+ described_class.enable(:feature_flag_state_logs)
+ described_class.enable(:enabled_feature_flag)
+ described_class.enabled?(:enabled_feature_flag)
+ end
- it 'logs other feature flags' do
- expect(described_class.logged_states).to have_key(:enabled_feature_flag)
- expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy
- end
- end
+ it 'does not log feature_flag_state_logs' do
+ expect(described_class.logged_states).not_to have_key("feature_flag_state_logs")
+ end
- context 'cached feature flag', :request_store do
- before do
- described_class.send(:flipper).memoize = false
- described_class.enabled?(:disabled_feature_flag)
+ it 'logs other feature flags' do
+ expect(described_class.logged_states).to have_key(:enabled_feature_flag)
+ expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy
+ end
end
- it 'caches the status in L1 cache for the first minute' do
- expect do
- expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.send(:l2_cache_backend)).not_to receive(:fetch)
- expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
- end.not_to exceed_query_limit(0)
- end
+ context 'cached feature flag', :request_store do
+ before do
+ described_class.send(:flipper).memoize = false
+ described_class.enabled?(:disabled_feature_flag)
+ end
- it 'caches the status in L2 cache after 2 minutes' do
- travel_to 2.minutes.from_now do
+ it 'caches the status in L1 cache for the first minute' do
expect do
expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).not_to receive(:fetch)
expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
end.not_to exceed_query_limit(0)
end
- end
- it 'fetches the status after an hour' do
- travel_to 61.minutes.from_now do
- expect do
- expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
- expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
- end.not_to exceed_query_limit(1)
+ it 'caches the status in L2 cache after 2 minutes' do
+ travel_to 2.minutes.from_now do
+ expect do
+ expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ end.not_to exceed_query_limit(0)
+ end
end
- end
- end
-
- [:current_request, :request, described_class.current_request].each do |thing|
- context "with #{thing} actor" do
- context 'when request store is inactive' do
- it 'returns the approximate percentage set' do
- number_of_times = 1_000
- percentage = 50
- described_class.enable_percentage_of_actors(:enabled_feature_flag, percentage)
- gate_values = Array.new(number_of_times) do
- described_class.enabled?(:enabled_feature_flag, thing)
- end
-
- margin_of_error = 0.05 * number_of_times
- expected_size = number_of_times * percentage / 100
- expect(gate_values.count { |v| v }).to be_within(margin_of_error).of(expected_size)
+ it 'fetches the status after an hour' do
+ travel_to 61.minutes.from_now do
+ expect do
+ expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy
+ end.not_to exceed_query_limit(1)
end
end
+ end
- context 'when request store is active', :request_store do
- it 'always returns the same gate value' do
- described_class.enable_percentage_of_actors(:enabled_feature_flag, 50)
+ [:current_request, :request, described_class.current_request].each do |thing|
+ context "with #{thing} actor" do
+ context 'when request store is inactive' do
+ it 'returns the approximate percentage set' do
+ number_of_times = 1_000
+ percentage = 50
+ described_class.enable_percentage_of_actors(:enabled_feature_flag, percentage)
- previous_gate_value = described_class.enabled?(:enabled_feature_flag, thing)
+ gate_values = Array.new(number_of_times) do
+ described_class.enabled?(:enabled_feature_flag, thing)
+ end
- 1_000.times do
- expect(described_class.enabled?(:enabled_feature_flag, thing)).to eq(previous_gate_value)
+ margin_of_error = 0.05 * number_of_times
+ expected_size = number_of_times * percentage / 100
+ expect(gate_values.count { |v| v }).to be_within(margin_of_error).of(expected_size)
end
end
- end
- end
- end
-
- context 'with gitlab_instance actor' do
- it 'always returns the same gate value' do
- described_class.enable(:enabled_feature_flag, described_class.gitlab_instance)
- expect(described_class.enabled?(:enabled_feature_flag, described_class.gitlab_instance)).to be_truthy
- end
- end
+ context 'when request store is active', :request_store do
+ it 'always returns the same gate value' do
+ described_class.enable_percentage_of_actors(:enabled_feature_flag, 50)
- context 'with :instance actor' do
- it 'always returns the same gate value' do
- described_class.enable(:enabled_feature_flag, :instance)
+ previous_gate_value = described_class.enabled?(:enabled_feature_flag, thing)
- expect(described_class.enabled?(:enabled_feature_flag, :instance)).to be_truthy
+ 1_000.times do
+ expect(described_class.enabled?(:enabled_feature_flag, thing)).to eq(previous_gate_value)
+ end
+ end
+ end
+ end
end
- end
- context 'with a group member' do
- let(:key) { :awesome_feature }
- let(:guinea_pigs) { create_list(:user, 3) }
+ context 'with gitlab_instance actor' do
+ it 'always returns the same gate value' do
+ described_class.enable(:enabled_feature_flag, described_class.gitlab_instance)
- before do
- described_class.reset
- stub_feature_flag_definition(key)
- Flipper.unregister_groups
- Flipper.register(:guinea_pigs) do |actor|
- guinea_pigs.include?(actor.thing)
+ expect(described_class.enabled?(:enabled_feature_flag, described_class.gitlab_instance)).to be_truthy
end
- described_class.enable(key, described_class.group(:guinea_pigs))
end
- it 'is true for all group members' do
- expect(described_class.enabled?(key, guinea_pigs[0])).to be_truthy
- expect(described_class.enabled?(key, guinea_pigs[1])).to be_truthy
- expect(described_class.enabled?(key, guinea_pigs[2])).to be_truthy
- end
+ context 'with :instance actor' do
+ it 'always returns the same gate value' do
+ described_class.enable(:enabled_feature_flag, :instance)
- it 'is false for any other actor' do
- expect(described_class.enabled?(key, create(:user))).to be_falsey
+ expect(described_class.enabled?(:enabled_feature_flag, :instance)).to be_truthy
+ end
end
- end
-
- context 'with an individual actor' do
- let(:actor) { stub_feature_flag_gate('CustomActor:5') }
- let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
- before do
- described_class.enable(:enabled_feature_flag, actor)
- end
+ context 'with a group member' do
+ let(:key) { :awesome_feature }
+ let(:guinea_pigs) { create_list(:user, 3) }
- it 'returns true when same actor is informed' do
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be_truthy
- end
+ before do
+ described_class.reset
+ stub_feature_flag_definition(key)
+ Flipper.unregister_groups
+ Flipper.register(:guinea_pigs) do |actor|
+ guinea_pigs.include?(actor.thing)
+ end
+ described_class.enable(key, described_class.group(:guinea_pigs))
+ end
- it 'returns false when different actor is informed' do
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be_falsey
- end
+ it 'is true for all group members' do
+ expect(described_class.enabled?(key, guinea_pigs[0])).to be_truthy
+ expect(described_class.enabled?(key, guinea_pigs[1])).to be_truthy
+ expect(described_class.enabled?(key, guinea_pigs[2])).to be_truthy
+ end
- it 'returns false when no actor is informed' do
- expect(described_class.enabled?(:enabled_feature_flag)).to be_falsey
+ it 'is false for any other actor' do
+ expect(described_class.enabled?(key, create(:user))).to be_falsey
+ end
end
- end
- context 'with invalid actor' do
- let(:actor) { double('invalid actor') }
+ context 'with an individual actor' do
+ let(:actor) { stub_feature_flag_gate('CustomActor:5') }
+ let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
- context 'when is dev_or_test_env' do
- it 'does raise exception' do
- expect { described_class.enabled?(:enabled_feature_flag, actor) }
- .to raise_error /needs to include `FeatureGate` or implement `flipper_id`/
+ before do
+ described_class.enable(:enabled_feature_flag, actor)
end
- end
- end
-
- context 'validates usage of feature flag with YAML definition' do
- let(:definition) do
- Feature::Definition.new('development/my_feature_flag.yml',
- name: 'my_feature_flag',
- type: 'development',
- default_enabled: default_enabled
- ).tap(&:validate!)
- end
- let(:default_enabled) { false }
+ it 'returns true when same actor is informed' do
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be_truthy
+ end
- before do
- stub_env('LAZILY_CREATE_FEATURE_FLAG', '0')
+ it 'returns false when different actor is informed' do
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be_falsey
+ end
- allow(Feature::Definition).to receive(:valid_usage!).and_call_original
- allow(Feature::Definition).to receive(:definitions) do
- { definition.key => definition }
+ it 'returns false when no actor is informed' do
+ expect(described_class.enabled?(:enabled_feature_flag)).to be_falsey
end
end
- it 'when usage is correct' do
- expect { described_class.enabled?(:my_feature_flag) }.not_to raise_error
- end
+ context 'with invalid actor' do
+ let(:actor) { double('invalid actor') }
- it 'when invalid type is used' do
- expect { described_class.enabled?(:my_feature_flag, type: :ops) }
- .to raise_error(/The `type:` of/)
+ context 'when is dev_or_test_env' do
+ it 'does raise exception' do
+ expect { described_class.enabled?(:enabled_feature_flag, actor) }
+ .to raise_error /needs to include `FeatureGate` or implement `flipper_id`/
+ end
+ end
end
- context 'when default_enabled: is false in the YAML definition' do
- it 'reads the default from the YAML definition' do
- expect(described_class.enabled?(:my_feature_flag)).to eq(default_enabled)
+ context 'validates usage of feature flag with YAML definition' do
+ let(:definition) do
+ Feature::Definition.new('development/my_feature_flag.yml',
+ name: 'my_feature_flag',
+ type: 'development',
+ default_enabled: default_enabled
+ ).tap(&:validate!)
end
- end
- context 'when default_enabled: is true in the YAML definition' do
- let(:default_enabled) { true }
+ let(:default_enabled) { false }
- it 'reads the default from the YAML definition' do
- expect(described_class.enabled?(:my_feature_flag)).to eq(true)
+ before do
+ stub_env('LAZILY_CREATE_FEATURE_FLAG', '0')
+ lb_ff_definition = Feature::Definition.get(load_balancer_test_feature_flag)
+ allow(Feature::Definition).to receive(:valid_usage!).and_call_original
+ allow(Feature::Definition).to receive(:definitions) do
+ { definition.key => definition, lb_ff_definition.key => lb_ff_definition }
+ end
end
- context 'and feature has been disabled' do
- before do
- described_class.disable(:my_feature_flag)
- end
+ it 'when usage is correct' do
+ expect { described_class.enabled?(:my_feature_flag) }.not_to raise_error
+ end
- it 'is not enabled' do
- expect(described_class.enabled?(:my_feature_flag)).to eq(false)
- end
+ it 'when invalid type is used' do
+ expect { described_class.enabled?(:my_feature_flag, type: :ops) }
+ .to raise_error(/The `type:` of/)
end
- context 'with a cached value and the YAML definition is changed thereafter' do
- before do
- described_class.enabled?(:my_feature_flag)
+ context 'when default_enabled: is false in the YAML definition' do
+ it 'reads the default from the YAML definition' do
+ expect(described_class.enabled?(:my_feature_flag)).to eq(default_enabled)
end
+ end
- it 'reads new default value' do
- allow(definition).to receive(:default_enabled).and_return(true)
+ context 'when default_enabled: is true in the YAML definition' do
+ let(:default_enabled) { true }
+ it 'reads the default from the YAML definition' do
expect(described_class.enabled?(:my_feature_flag)).to eq(true)
end
- end
- context 'when YAML definition does not exist for an optional type' do
- let(:optional_type) { described_class::Shared::TYPES.find { |name, attrs| attrs[:optional] }.first }
+ context 'and feature has been disabled' do
+ before do
+ described_class.disable(:my_feature_flag)
+ end
- context 'when in dev or test environment' do
- it 'raises an error for dev' do
- expect { described_class.enabled?(:non_existent_flag, type: optional_type) }
- .to raise_error(
- Feature::InvalidFeatureFlagError,
- "The feature flag YAML definition for 'non_existent_flag' does not exist")
+ it 'is not enabled' do
+ expect(described_class.enabled?(:my_feature_flag)).to eq(false)
end
end
- context 'when in production' do
+ context 'with a cached value and the YAML definition is changed thereafter' do
before do
- allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false)
+ described_class.enabled?(:my_feature_flag)
end
- context 'when database exists' do
- before do
- allow(ApplicationRecord.database).to receive(:exists?).and_return(true)
- end
+ it 'reads new default value' do
+ allow(definition).to receive(:default_enabled).and_return(true)
+
+ expect(described_class.enabled?(:my_feature_flag)).to eq(true)
+ end
+ end
- it 'checks the persisted status and returns false' do
- expect(described_class).to receive(:with_feature).with(:non_existent_flag).and_call_original
+ context 'when YAML definition does not exist for an optional type' do
+ let(:optional_type) { described_class::Shared::TYPES.find { |name, attrs| attrs[:optional] }.first }
- expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ context 'when in dev or test environment' do
+ it 'raises an error for dev' do
+ expect { described_class.enabled?(:non_existent_flag, type: optional_type) }
+ .to raise_error(
+ Feature::InvalidFeatureFlagError,
+ "The feature flag YAML definition for 'non_existent_flag' does not exist")
end
end
- context 'when database does not exist' do
+ context 'when in production' do
before do
- allow(ApplicationRecord.database).to receive(:exists?).and_return(false)
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false)
+ end
+
+ context 'when database exists' do
+ before do
+ allow(ApplicationRecord.database).to receive(:exists?).and_return(true)
+ end
+
+ it 'checks the persisted status and returns false' do
+ expect(described_class).to receive(:with_feature).with(:non_existent_flag).and_call_original
+
+ expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ end
end
- it 'returns false without checking the status in the database' do
- expect(described_class).not_to receive(:get)
+ context 'when database does not exist' do
+ before do
+ allow(ApplicationRecord.database).to receive(:exists?).and_return(false)
+ end
- expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ it 'returns false without checking the status in the database' do
+ expect(described_class).not_to receive(:get)
+
+ expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false)
+ end
end
end
end
end
end
end
- end
-
- describe '.disable?' do
- it 'returns true (and tracks / raises exception for dev) for undefined feature' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
-
- expect(described_class.disabled?(:some_random_feature_flag)).to be_truthy
- end
- it 'returns true for undefined feature with default_enabled_if_undefined: false' do
- expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_truthy
- end
+ describe '.disable?' do
+ it 'returns true (and tracks / raises exception for dev) for undefined feature' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- it 'returns false for undefined feature with default_enabled_if_undefined: true' do
- expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_falsey
- end
+ expect(described_class.disabled?(:some_random_feature_flag)).to be_truthy
+ end
- it 'returns true for existing disabled feature in the database' do
- stub_feature_flag_definition(:disabled_feature_flag)
- described_class.disable(:disabled_feature_flag)
+ it 'returns true for undefined feature with default_enabled_if_undefined: false' do
+ expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: false)).to be_truthy
+ end
- expect(described_class.disabled?(:disabled_feature_flag)).to be_truthy
- end
+ it 'returns false for undefined feature with default_enabled_if_undefined: true' do
+ expect(described_class.disabled?(:some_random_feature_flag, default_enabled_if_undefined: true)).to be_falsey
+ end
- it 'returns false for existing enabled feature in the database' do
- stub_feature_flag_definition(:enabled_feature_flag)
- described_class.enable(:enabled_feature_flag)
+ it 'returns true for existing disabled feature in the database' do
+ stub_feature_flag_definition(:disabled_feature_flag)
+ described_class.disable(:disabled_feature_flag)
- expect(described_class.disabled?(:enabled_feature_flag)).to be_falsey
- end
- end
-
- shared_examples_for 'logging' do
- let(:expected_action) {}
- let(:expected_extra) {}
+ expect(described_class.disabled?(:disabled_feature_flag)).to be_truthy
+ end
- it 'logs the event' do
- expect(Feature.logger).to receive(:info).at_least(:once).with(key: key, action: expected_action, **expected_extra)
+ it 'returns false for existing enabled feature in the database' do
+ stub_feature_flag_definition(:enabled_feature_flag)
+ described_class.enable(:enabled_feature_flag)
- subject
+ expect(described_class.disabled?(:enabled_feature_flag)).to be_falsey
+ end
end
- end
- describe '.enable' do
- subject { described_class.enable(key, thing) }
+ shared_examples_for 'logging' do
+ let(:expected_action) {}
+ let(:expected_extra) {}
- let(:key) { :awesome_feature }
- let(:thing) { true }
+ it 'logs the event' do
+ expect(Feature.logger).to receive(:info).at_least(:once).with(key: key, action: expected_action, **expected_extra)
- it_behaves_like 'logging' do
- let(:expected_action) { :enable }
- let(:expected_extra) { { "extra.thing" => "true" } }
+ subject
+ end
end
- # This is documented to return true, modify doc/administration/feature_flags.md if it changes
- it 'returns true' do
- expect(subject).to be true
- end
+ describe '.enable' do
+ subject { described_class.enable(key, thing) }
- context 'when thing is an actor' do
- let(:thing) { create(:user) }
+ let(:key) { :awesome_feature }
+ let(:thing) { true }
it_behaves_like 'logging' do
- let(:expected_action) { eq(:enable) | eq(:remove_opt_out) }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ let(:expected_action) { :enable }
+ let(:expected_extra) { { "extra.thing" => "true" } }
end
- end
- end
- describe '.disable' do
- subject { described_class.disable(key, thing) }
+ # This is documented to return true, modify doc/administration/feature_flags.md if it changes
+ it 'returns true' do
+ expect(subject).to be true
+ end
- let(:key) { :awesome_feature }
- let(:thing) { false }
+ context 'when thing is an actor' do
+ let(:thing) { create(:user) }
- it_behaves_like 'logging' do
- let(:expected_action) { :disable }
- let(:expected_extra) { { "extra.thing" => "false" } }
+ it_behaves_like 'logging' do
+ let(:expected_action) { eq(:enable) | eq(:remove_opt_out) }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ end
+ end
end
- # This is documented to return true, modify doc/administration/feature_flags.md if it changes
- it 'returns true' do
- expect(subject).to be true
- end
+ describe '.disable' do
+ subject { described_class.disable(key, thing) }
- context 'when thing is an actor' do
- let(:thing) { create(:user) }
- let(:flag_opts) { {} }
+ let(:key) { :awesome_feature }
+ let(:thing) { false }
it_behaves_like 'logging' do
let(:expected_action) { :disable }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ let(:expected_extra) { { "extra.thing" => "false" } }
end
- before do
- stub_feature_flag_definition(key, flag_opts)
+ # This is documented to return true, modify doc/administration/feature_flags.md if it changes
+ it 'returns true' do
+ expect(subject).to be true
end
- context 'when the feature flag was enabled for this actor' do
- before do
- described_class.enable(key, thing)
- end
+ context 'when thing is an actor' do
+ let(:thing) { create(:user) }
+ let(:flag_opts) { {} }
- it 'marks this thing as disabled' do
- expect { subject }.to change { thing_enabled? }.from(true).to(false)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :disable }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(false)
+ before do
+ stub_feature_flag_definition(key, flag_opts)
end
- it 'is possible to re-enable the feature' do
- subject
+ context 'when the feature flag was enabled for this actor' do
+ before do
+ described_class.enable(key, thing)
+ end
- expect { described_class.enable(key, thing) }
- .to change { thing_enabled? }.from(false).to(true)
- end
- end
+ it 'marks this thing as disabled' do
+ expect { subject }.to change { thing_enabled? }.from(true).to(false)
+ end
- context 'when the feature flag is enabled globally' do
- before do
- described_class.enable(key)
- end
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(false)
+ end
- it 'does not mark this thing as disabled' do
- expect { subject }.not_to change { thing_enabled? }.from(true)
+ it 'is possible to re-enable the feature' do
+ subject
+
+ expect { described_class.enable(key, thing) }
+ .to change { thing_enabled? }.from(false).to(true)
+ end
end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ context 'when the feature flag is enabled globally' do
+ before do
+ described_class.enable(key)
+ end
+
+ it 'does not mark this thing as disabled' do
+ expect { subject }.not_to change { thing_enabled? }.from(true)
+ end
+
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ end
end
end
end
- end
- describe 'opt_out' do
- subject { described_class.opt_out(key, thing) }
+ describe 'opt_out' do
+ subject { described_class.opt_out(key, thing) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- before do
- stub_feature_flag_definition(key)
- described_class.enable(key)
- end
+ before do
+ stub_feature_flag_definition(key)
+ described_class.enable(key)
+ end
- context 'when thing is an actor' do
- let_it_be(:thing) { create(:project) }
+ context 'when thing is an actor' do
+ let_it_be(:thing) { create(:project) }
- it 'marks this thing as disabled' do
- expect { subject }.to change { thing_enabled? }.from(true).to(false)
- end
+ it 'marks this thing as disabled' do
+ expect { subject }.to change { thing_enabled? }.from(true).to(false)
+ end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
- end
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ end
- it_behaves_like 'logging' do
- let(:expected_action) { eq(:opt_out) }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
- end
+ it_behaves_like 'logging' do
+ let(:expected_action) { eq(:opt_out) }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ end
- it 'stores the opt-out information as a gate' do
- subject
+ it 'stores the opt-out information as a gate' do
+ subject
- flag = described_class.get(key)
+ flag = described_class.get(key)
- expect(flag.actors_value).to include(include(thing.flipper_id))
- expect(flag.actors_value).not_to include(thing.flipper_id)
+ expect(flag.actors_value).to include(include(thing.flipper_id))
+ expect(flag.actors_value).not_to include(thing.flipper_id)
+ end
end
- end
- context 'when thing is a group' do
- let(:thing) { Feature.group(:guinea_pigs) }
- let(:guinea_pigs) { create_list(:user, 3) }
+ context 'when thing is a group' do
+ let(:thing) { Feature.group(:guinea_pigs) }
+ let(:guinea_pigs) { create_list(:user, 3) }
- before do
- Feature.reset
- Flipper.unregister_groups
- Flipper.register(:guinea_pigs) do |actor|
- guinea_pigs.include?(actor.thing)
+ before do
+ Feature.reset
+ Flipper.unregister_groups
+ Flipper.register(:guinea_pigs) do |actor|
+ guinea_pigs.include?(actor.thing)
+ end
end
- end
- it 'has no effect' do
- expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ it 'has no effect' do
+ expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ end
end
end
- end
- describe 'remove_opt_out' do
- subject { described_class.remove_opt_out(key, thing) }
+ describe 'remove_opt_out' do
+ subject { described_class.remove_opt_out(key, thing) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- before do
- stub_feature_flag_definition(key)
- described_class.enable(key)
- described_class.opt_out(key, thing)
- end
+ before do
+ stub_feature_flag_definition(key)
+ described_class.enable(key)
+ described_class.opt_out(key, thing)
+ end
- context 'when thing is an actor' do
- let_it_be(:thing) { create(:project) }
+ context 'when thing is an actor' do
+ let_it_be(:thing) { create(:project) }
- it 're-enables this thing' do
- expect { subject }.to change { thing_enabled? }.from(false).to(true)
- end
+ it 're-enables this thing' do
+ expect { subject }.to change { thing_enabled? }.from(false).to(true)
+ end
- it 'does not change the global value' do
- expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
- end
+ it 'does not change the global value' do
+ expect { subject }.not_to change { described_class.enabled?(key) }.from(true)
+ end
- it_behaves_like 'logging' do
- let(:expected_action) { eq(:remove_opt_out) }
- let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
- end
+ it_behaves_like 'logging' do
+ let(:expected_action) { eq(:remove_opt_out) }
+ let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } }
+ end
- it 'removes the opt-out information' do
- subject
+ it 'removes the opt-out information' do
+ subject
- flag = described_class.get(key)
+ flag = described_class.get(key)
- expect(flag.actors_value).to be_empty
+ expect(flag.actors_value).to be_empty
+ end
end
- end
- context 'when thing is a group' do
- let(:thing) { Feature.group(:guinea_pigs) }
- let(:guinea_pigs) { create_list(:user, 3) }
+ context 'when thing is a group' do
+ let(:thing) { Feature.group(:guinea_pigs) }
+ let(:guinea_pigs) { create_list(:user, 3) }
- before do
- Feature.reset
- Flipper.unregister_groups
- Flipper.register(:guinea_pigs) do |actor|
- guinea_pigs.include?(actor.thing)
+ before do
+ Feature.reset
+ Flipper.unregister_groups
+ Flipper.register(:guinea_pigs) do |actor|
+ guinea_pigs.include?(actor.thing)
+ end
end
- end
- it 'has no effect' do
- expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ it 'has no effect' do
+ expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true)
+ end
end
end
- end
-
- describe '.enable_percentage_of_time' do
- subject { described_class.enable_percentage_of_time(key, percentage) }
- let(:key) { :awesome_feature }
- let(:percentage) { 50 }
+ describe '.enable_percentage_of_time' do
+ subject { described_class.enable_percentage_of_time(key, percentage) }
- it_behaves_like 'logging' do
- let(:expected_action) { :enable_percentage_of_time }
- let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
- end
+ let(:key) { :awesome_feature }
+ let(:percentage) { 50 }
- context 'when the flag is on' do
- before do
- described_class.enable(key)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :enable_percentage_of_time }
+ let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
end
- it 'fails with InvalidOperation' do
- expect { subject }.to raise_error(described_class::InvalidOperation)
+ context 'when the flag is on' do
+ before do
+ described_class.enable(key)
+ end
+
+ it 'fails with InvalidOperation' do
+ expect { subject }.to raise_error(described_class::InvalidOperation)
+ end
end
end
- end
- describe '.disable_percentage_of_time' do
- subject { described_class.disable_percentage_of_time(key) }
+ describe '.disable_percentage_of_time' do
+ subject { described_class.disable_percentage_of_time(key) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- it_behaves_like 'logging' do
- let(:expected_action) { :disable_percentage_of_time }
- let(:expected_extra) { {} }
+ it_behaves_like 'logging' do
+ let(:expected_action) { :disable_percentage_of_time }
+ let(:expected_extra) { {} }
+ end
end
- end
-
- describe '.enable_percentage_of_actors' do
- subject { described_class.enable_percentage_of_actors(key, percentage) }
- let(:key) { :awesome_feature }
- let(:percentage) { 50 }
+ describe '.enable_percentage_of_actors' do
+ subject { described_class.enable_percentage_of_actors(key, percentage) }
- it_behaves_like 'logging' do
- let(:expected_action) { :enable_percentage_of_actors }
- let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
- end
+ let(:key) { :awesome_feature }
+ let(:percentage) { 50 }
- context 'when the flag is on' do
- before do
- described_class.enable(key)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :enable_percentage_of_actors }
+ let(:expected_extra) { { "extra.percentage" => percentage.to_s } }
end
- it 'fails with InvalidOperation' do
- expect { subject }.to raise_error(described_class::InvalidOperation)
+ context 'when the flag is on' do
+ before do
+ described_class.enable(key)
+ end
+
+ it 'fails with InvalidOperation' do
+ expect { subject }.to raise_error(described_class::InvalidOperation)
+ end
end
end
- end
- describe '.disable_percentage_of_actors' do
- subject { described_class.disable_percentage_of_actors(key) }
+ describe '.disable_percentage_of_actors' do
+ subject { described_class.disable_percentage_of_actors(key) }
- let(:key) { :awesome_feature }
+ let(:key) { :awesome_feature }
- it_behaves_like 'logging' do
- let(:expected_action) { :disable_percentage_of_actors }
- let(:expected_extra) { {} }
+ it_behaves_like 'logging' do
+ let(:expected_action) { :disable_percentage_of_actors }
+ let(:expected_extra) { {} }
+ end
end
- end
-
- describe '.remove' do
- subject { described_class.remove(key) }
- let(:key) { :awesome_feature }
- let(:actor) { create(:user) }
-
- before do
- described_class.enable(key)
- end
+ describe '.remove' do
+ subject { described_class.remove(key) }
- it_behaves_like 'logging' do
- let(:expected_action) { :remove }
- let(:expected_extra) { {} }
- end
+ let(:key) { :awesome_feature }
+ let(:actor) { create(:user) }
- context 'for a non-persisted feature' do
- it 'returns nil' do
- expect(described_class.remove(:non_persisted_feature_flag)).to be_nil
+ before do
+ described_class.enable(key)
end
- it 'returns true, and cleans up' do
- expect(subject).to be_truthy
- expect(described_class.persisted_names).not_to include(key)
+ it_behaves_like 'logging' do
+ let(:expected_action) { :remove }
+ let(:expected_extra) { {} }
end
- end
- end
-
- describe '.log_feature_flag_states?' do
- let(:log_state_changes) { false }
- let(:milestone) { "0.0" }
- let(:flag_name) { :some_flag }
- let(:flag_type) { 'development' }
-
- before do
- Feature.enable(:feature_flag_state_logs)
- Feature.enable(:some_flag)
- allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
- allow(Feature).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original
- allow(Feature).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original
+ context 'for a non-persisted feature' do
+ it 'returns nil' do
+ expect(described_class.remove(:non_persisted_feature_flag)).to be_nil
+ end
- stub_feature_flag_definition(flag_name,
- type: flag_type,
- milestone: milestone,
- log_state_changes: log_state_changes)
+ it 'returns true, and cleans up' do
+ expect(subject).to be_truthy
+ expect(described_class.persisted_names).not_to include(key)
+ end
+ end
end
- subject { described_class.log_feature_flag_states?(flag_name) }
+ describe '.log_feature_flag_states?' do
+ let(:log_state_changes) { false }
+ let(:milestone) { "0.0" }
+ let(:flag_name) { :some_flag }
+ let(:flag_type) { 'development' }
- context 'when flag is feature_flag_state_logs' do
- let(:milestone) { "14.6" }
- let(:flag_name) { :feature_flag_state_logs }
- let(:flag_type) { 'ops' }
- let(:log_state_changes) { true }
+ before do
+ Feature.enable(:feature_flag_state_logs)
+ Feature.enable(:some_flag)
- it { is_expected.to be_falsey }
- end
+ allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
+ allow(Feature).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original
+ allow(Feature).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original
- context 'when flag is old' do
- it { is_expected.to be_falsey }
- end
+ stub_feature_flag_definition(flag_name,
+ type: flag_type,
+ milestone: milestone,
+ log_state_changes: log_state_changes)
+ end
- context 'when flag is old while log_state_changes is not present ' do
- let(:log_state_changes) { nil }
+ subject { described_class.log_feature_flag_states?(flag_name) }
- it { is_expected.to be_falsey }
- end
+ context 'when flag is feature_flag_state_logs' do
+ let(:milestone) { "14.6" }
+ let(:flag_name) { :feature_flag_state_logs }
+ let(:flag_type) { 'ops' }
+ let(:log_state_changes) { true }
- context 'when flag is old but log_state_changes is true' do
- let(:log_state_changes) { true }
+ it { is_expected.to be_falsey }
+ end
- it { is_expected.to be_truthy }
- end
+ context 'when flag is old' do
+ it { is_expected.to be_falsey }
+ end
- context 'when flag is new and not feature_flag_state_logs' do
- let(:milestone) { "14.6" }
+ context 'when flag is old while log_state_changes is not present ' do
+ let(:log_state_changes) { nil }
- before do
- stub_version('14.5.123', 'deadbeef')
+ it { is_expected.to be_falsey }
end
- it { is_expected.to be_truthy }
- end
+ context 'when flag is old but log_state_changes is true' do
+ let(:log_state_changes) { true }
- context 'when milestone is nil' do
- let(:milestone) { nil }
+ it { is_expected.to be_truthy }
+ end
- it { is_expected.to be_falsey }
- end
- end
+ context 'when flag is new and not feature_flag_state_logs' do
+ let(:milestone) { "14.6" }
- context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do
- let(:actor) { stub_feature_flag_gate('CustomActor:5') }
- let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
+ before do
+ stub_version('14.5.123', 'deadbeef')
+ end
- # This is a bit unpleasant. For these tests we want to simulate stale reads
- # from the database (due to database load balancing). A simple way to do
- # that is to stub the response on the adapter Flipper uses for reading from
- # the database. However, there isn't a convenient API for this. We know that
- # the ActiveRecord adapter is always at the 'bottom' of the chain, so we can
- # find it that way.
- let(:active_record_adapter) do
- adapter = described_class.flipper
+ it { is_expected.to be_truthy }
+ end
- loop do
- break adapter unless adapter.instance_variable_get(:@adapter)
+ context 'when milestone is nil' do
+ let(:milestone) { nil }
- adapter = adapter.instance_variable_get(:@adapter)
+ it { is_expected.to be_falsey }
end
end
- before do
- stub_feature_flag_definition(:enabled_feature_flag)
- end
+ context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do
+ let(:actor) { stub_feature_flag_gate('CustomActor:5') }
+ let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
- it 'gives the correct value when enabling for an additional actor' do
- described_class.enable(:enabled_feature_flag, actor)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ # This is a bit unpleasant. For these tests we want to simulate stale reads
+ # from the database (due to database load balancing). A simple way to do
+ # that is to stub the response on the adapter Flipper uses for reading from
+ # the database. However, there isn't a convenient API for this. We know that
+ # the ActiveRecord adapter is always at the 'bottom' of the chain, so we can
+ # find it that way.
+ let(:active_record_adapter) do
+ adapter = described_class.flipper
- # This should only be enabled for `actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ loop do
+ break adapter unless adapter.instance_variable_get(:@adapter)
- # Enable for `another_actor` and simulate a stale read
- described_class.enable(:enabled_feature_flag, another_actor)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ adapter = adapter.instance_variable_get(:@adapter)
+ end
+ end
- # Should read from the cache and be enabled for both of these actors
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- end
+ before do
+ stub_feature_flag_definition(:enabled_feature_flag)
+ end
- it 'gives the correct value when enabling for percentage of time' do
- described_class.enable_percentage_of_time(:enabled_feature_flag, 10)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ it 'gives the correct value when enabling for an additional actor' do
+ described_class.enable(:enabled_feature_flag, actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- # Test against `gate_values` directly as otherwise it would be non-determistic
- expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(10)
+ # This should only be enabled for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- # Enable 50% of time and simulate a stale read
- described_class.enable_percentage_of_time(:enabled_feature_flag, 50)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ # Enable for `another_actor` and simulate a stale read
+ described_class.enable(:enabled_feature_flag, another_actor)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- # Should read from the cache and be enabled 50% of the time
- expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(50)
- end
+ # Should read from the cache and be enabled for both of these actors
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ end
- it 'gives the correct value when disabling the flag' do
- described_class.enable(:enabled_feature_flag, actor)
- described_class.enable(:enabled_feature_flag, another_actor)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ it 'gives the correct value when enabling for percentage of time' do
+ described_class.enable_percentage_of_time(:enabled_feature_flag, 10)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- # This be enabled for `actor` and `another_actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ # Test against `gate_values` directly as otherwise it would be non-determistic
+ expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(10)
- # Disable for `another_actor` and simulate a stale read
- described_class.disable(:enabled_feature_flag, another_actor)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ # Enable 50% of time and simulate a stale read
+ described_class.enable_percentage_of_time(:enabled_feature_flag, 50)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- # Should read from the cache and be enabled only for `actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- end
+ # Should read from the cache and be enabled 50% of the time
+ expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(50)
+ end
- it 'gives the correct value when deleting the flag' do
- described_class.enable(:enabled_feature_flag, actor)
- initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+ it 'gives the correct value when disabling the flag' do
+ described_class.enable(:enabled_feature_flag, actor)
+ described_class.enable(:enabled_feature_flag, another_actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- # This should only be enabled for `actor`
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ # This be enabled for `actor` and `another_actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- # Remove and simulate a stale read
- described_class.remove(:enabled_feature_flag)
- allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+ # Disable for `another_actor` and simulate a stale read
+ described_class.disable(:enabled_feature_flag, another_actor)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- # Should read from the cache and be disabled everywhere
- expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(false)
- expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- end
- end
+ # Should read from the cache and be enabled only for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ end
+
+ it 'gives the correct value when deleting the flag' do
+ described_class.enable(:enabled_feature_flag, actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
- describe Feature::Target do
- describe '#targets' do
- let(:project) { create(:project) }
- let(:group) { create(:group) }
- let(:user_name) { project.first_owner.username }
+ # This should only be enabled for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
- subject do
- described_class.new(
- user: user_name,
- project: project.full_path,
- group: group.full_path,
- repository: project.repository.full_path
- )
- end
+ # Remove and simulate a stale read
+ described_class.remove(:enabled_feature_flag)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
- it 'returns all found targets' do
- expect(subject.targets).to be_an(Array)
- expect(subject.targets).to eq([project.first_owner, project, group, project.repository])
+ # Should read from the cache and be disabled everywhere
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
end
+ end
- context 'when repository target works with different types of repositories' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :wiki_repo, group: group) }
- let_it_be(:project_in_user_namespace) { create(:project, namespace: create(:user).namespace) }
- let(:personal_snippet) { create(:personal_snippet) }
- let(:project_snippet) { create(:project_snippet, project: project) }
-
- let(:targets) do
- [
- project,
- project.wiki,
- project_in_user_namespace,
- personal_snippet,
- project_snippet
- ]
- end
+ describe Feature::Target do
+ describe '#targets' do
+ let(:project) { create(:project) }
+ let(:group) { create(:group) }
+ let(:user_name) { project.first_owner.username }
subject do
described_class.new(
- repository: targets.map { |t| t.repository.full_path }.join(",")
+ user: user_name,
+ project: project.full_path,
+ group: group.full_path,
+ repository: project.repository.full_path
)
end
it 'returns all found targets' do
expect(subject.targets).to be_an(Array)
- expect(subject.targets).to eq(targets.map(&:repository))
+ expect(subject.targets).to eq([project.first_owner, project, group, project.repository])
+ end
+
+ context 'when repository target works with different types of repositories' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :wiki_repo, group: group) }
+ let_it_be(:project_in_user_namespace) { create(:project, namespace: create(:user).namespace) }
+ let(:personal_snippet) { create(:personal_snippet) }
+ let(:project_snippet) { create(:project_snippet, project: project) }
+
+ let(:targets) do
+ [
+ project,
+ project.wiki,
+ project_in_user_namespace,
+ personal_snippet,
+ project_snippet
+ ]
+ end
+
+ subject do
+ described_class.new(
+ repository: targets.map { |t| t.repository.full_path }.join(",")
+ )
+ end
+
+ it 'returns all found targets' do
+ expect(subject.targets).to be_an(Array)
+ expect(subject.targets).to eq(targets.map(&:repository))
+ end
end
end
end
diff --git a/spec/lib/gitlab/checks/tag_check_spec.rb b/spec/lib/gitlab/checks/tag_check_spec.rb
index b5aafde006f..158dfad42e2 100644
--- a/spec/lib/gitlab/checks/tag_check_spec.rb
+++ b/spec/lib/gitlab/checks/tag_check_spec.rb
@@ -11,47 +11,50 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
it 'raises an error when user does not have access' do
allow(user_access).to receive(:can_do_action?).with(:admin_tag).and_return(false)
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to change existing tags on this project.')
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You are not allowed to change existing tags on this project.'
+ )
end
- context "prohibited tags check" do
+ describe "prohibited tags check" do
it 'prohibits tags name that include refs/heads at the head' do
- allow(subject).to receive(:tag_name).and_return("refs/heads/foo")
+ allow(change_check).to receive(:tag_name).and_return("refs/heads/foo")
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a tag with a prohibited pattern.")
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ "You cannot create a tag with a prohibited pattern."
+ )
end
it "prohibits tag names that include refs/tags/ at the head" do
- allow(subject).to receive(:tag_name).and_return("refs/tags/foo")
+ allow(change_check).to receive(:tag_name).and_return("refs/tags/foo")
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a tag with a prohibited pattern.")
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ "You cannot create a tag with a prohibited pattern."
+ )
end
it "doesn't prohibit a nested refs/tags/ string in a tag name" do
- allow(subject).to receive(:tag_name).and_return("fix-for-refs/tags/foo")
+ allow(change_check).to receive(:tag_name).and_return("fix-for-refs/tags/foo")
- expect { subject.validate! }.not_to raise_error
- end
-
- context "deleting a refs/tags headed tag" do
- let(:newrev) { "0000000000000000000000000000000000000000" }
- let(:ref) { "refs/tags/refs/tags/267208abfe40e546f5e847444276f7d43a39503e" }
-
- it "doesn't prohibit the deletion of a refs/tags/ tag name" do
- expect { subject.validate! }.not_to raise_error
- end
+ expect { change_check.validate! }.not_to raise_error
end
it "prohibits tag names that include characters incompatible with UTF-8" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "Tag names must be valid when converted to UTF-8 encoding")
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ "Tag names must be valid when converted to UTF-8 encoding"
+ )
end
it "doesn't prohibit UTF-8 compatible characters" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
context "when prohibited_tag_name_encoding_check feature flag is disabled" do
@@ -60,15 +63,24 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
end
it "doesn't prohibit tag names that include characters incompatible with UTF-8" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-\xCE.BETA")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
it "doesn't prohibit UTF-8 compatible characters" do
- allow(subject).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
+ allow(change_check).to receive(:tag_name).and_return("v6.0.0-Ü.BETA")
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
+ end
+ end
+
+ describe "deleting a refs/tags headed tag" do
+ let(:newrev) { "0000000000000000000000000000000000000000" }
+ let(:ref) { "refs/tags/refs/tags/267208abfe40e546f5e847444276f7d43a39503e" }
+
+ it "doesn't prohibit the deletion of a refs/tags/ tag name" do
+ expect { change_check.validate! }.not_to raise_error
end
end
end
@@ -81,31 +93,36 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
project.add_maintainer(user)
end
- context 'deletion' do
+ describe 'deleting a tag' do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
- context 'via web interface' do
+ context 'when deleting via web interface' do
let(:protocol) { 'web' }
it 'is allowed' do
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
end
- context 'via SSH' do
+ context 'when deleting via SSH' do
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /only delete.*web interface/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You can only delete protected tags using the web interface.'
+ )
end
end
end
- context 'update' do
+ describe 'updating a tag' do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot be updated/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError, 'Protected tags cannot be updated.'
+ )
end
end
end
@@ -115,37 +132,47 @@ RSpec.describe Gitlab::Checks::TagCheck, feature_category: :source_code_manageme
project.add_developer(user)
end
- context 'deletion' do
+ describe 'deleting a tag' do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /not allowed to delete/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You are not allowed to delete protected tags from this project. ' \
+ 'Only a project maintainer or owner can delete a protected tag.'
+ )
end
end
end
- context 'creation' do
+ describe 'creating a tag' do
let(:oldrev) { '0000000000000000000000000000000000000000' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
let(:ref) { 'refs/tags/v9.1.0' }
it 'prevents creation below access level' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /allowed to create this tag as it is protected/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You are not allowed to create this tag as it is protected.'
+ )
end
context 'when user has access' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, project: project, name: 'v*') }
it 'allows tag creation' do
- expect { subject.validate! }.not_to raise_error
+ expect { change_check.validate! }.not_to raise_error
end
context 'when tag name is the same as default branch' do
let(:ref) { "refs/tags/#{project.default_branch}" }
it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot use default branch name to create a tag/)
+ expect { change_check.validate! }.to raise_error(
+ Gitlab::GitAccess::ForbiddenError,
+ 'You cannot use default branch name to create a tag'
+ )
end
end
end
diff --git a/spec/support/shared_contexts/single_change_access_checks_shared_context.rb b/spec/support/shared_contexts/single_change_access_checks_shared_context.rb
index bf90c26047b..5945302824b 100644
--- a/spec/support/shared_contexts/single_change_access_checks_shared_context.rb
+++ b/spec/support/shared_contexts/single_change_access_checks_shared_context.rb
@@ -21,7 +21,7 @@ RSpec.shared_context 'change access checks context' do
)
end
- subject { described_class.new(change_access) }
+ subject(:change_check) { described_class.new(change_access) }
before do
project.add_developer(user)