Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--app/assets/javascripts/filtered_search/filtered_search_manager.js43
-rw-r--r--app/assets/javascripts/jira_import/components/jira_import_form.vue8
-rw-r--r--app/assets/javascripts/lib/utils/keycodes.js7
-rw-r--r--app/assets/javascripts/releases/components/app_edit.vue12
-rw-r--r--app/assets/javascripts/releases/components/asset_links_form.vue12
-rw-r--r--app/assets/javascripts/snippets/components/snippet_header.vue14
-rw-r--r--app/assets/javascripts/static_site_editor/components/invalid_content_message.vue29
-rw-r--r--app/assets/javascripts/static_site_editor/components/publish_toolbar.vue16
-rw-r--r--app/assets/javascripts/static_site_editor/components/saved_changes_message.vue18
-rw-r--r--app/assets/javascripts/static_site_editor/components/static_site_editor.vue60
-rw-r--r--app/assets/javascripts/static_site_editor/index.js11
-rw-r--r--app/assets/javascripts/static_site_editor/store/state.js1
-rw-r--r--app/assets/javascripts/vue_shared/components/clone_dropdown.vue12
-rw-r--r--app/finders/autocomplete/routes_finder.rb47
-rw-r--r--app/graphql/types/merge_request_type.rb2
-rw-r--r--app/models/application_setting.rb1
-rw-r--r--app/models/ci/job_artifact.rb15
-rw-r--r--app/models/diff_discussion.rb1
-rw-r--r--app/models/lfs_object.rb13
-rw-r--r--app/models/merge_request.rb16
-rw-r--r--app/models/note.rb2
-rw-r--r--app/models/project_setting.rb2
-rw-r--r--app/models/route.rb4
-rw-r--r--app/serializers/discussion_entity.rb15
-rw-r--r--app/serializers/merge_request_basic_entity.rb2
-rw-r--r--app/serializers/merge_request_poll_cached_widget_entity.rb2
-rw-r--r--app/serializers/route_entity.rb8
-rw-r--r--app/serializers/route_serializer.rb5
-rw-r--r--app/services/pod_logs/base_service.rb6
-rw-r--r--app/services/pod_logs/elasticsearch_service.rb21
-rw-r--r--app/services/pod_logs/kubernetes_service.rb19
-rw-r--r--app/uploaders/records_uploads.rb23
-rw-r--r--app/workers/concerns/cronjob_queue.rb10
-rw-r--r--changelogs/unreleased/213382-use-not-valid-to-immediately-enforce-a-not-null-constraint.yml6
-rw-r--r--changelogs/unreleased/ak-historical-pods.yml5
-rw-r--r--changelogs/unreleased/issue-211404.yml5
-rw-r--r--changelogs/unreleased/refactor_push_rules.yml5
-rw-r--r--config/routes.rb2
-rw-r--r--db/migrate/20200325104755_add_push_rules_id_to_project_settings.rb19
-rw-r--r--db/migrate/20200325104756_add_push_rules_foreign_key_to_project_settings.rb18
-rw-r--r--db/migrate/20200325104833_add_push_rules_id_to_application_settings.rb19
-rw-r--r--db/migrate/20200325104834_add_push_rules_foreign_key_to_application_settings.rb18
-rw-r--r--db/migrate/20200406165950_add_not_null_constraint_on_file_store_to_lfs_objects.rb24
-rw-r--r--db/migrate/20200406171857_add_not_null_constraint_on_file_store_to_ci_job_artifacts.rb24
-rw-r--r--db/migrate/20200406172135_add_not_null_constraint_on_file_store_to_uploads.rb24
-rw-r--r--db/migrate/20200408133211_add_index_on_route_path_trigram.rb18
-rw-r--r--db/migrate/20200415160722_remove_not_null_lfs_objects_constraint.rb19
-rw-r--r--db/migrate/20200415161021_remove_not_null_ci_job_artifacts_constraint.rb19
-rw-r--r--db/migrate/20200415161206_remove_not_null_uploads_constraint.rb19
-rw-r--r--db/post_migrate/20200325162730_schedule_backfill_push_rules_id_in_projects.rb36
-rw-r--r--db/post_migrate/20200406193427_add_index_to_issues_health_status.rb23
-rw-r--r--db/structure.sql44
-rw-r--r--doc/README.md2
-rw-r--r--doc/administration/geo/replication/datatypes.md1
-rw-r--r--doc/administration/gitaly/praefect.md25
-rw-r--r--doc/administration/packages/index.md1
-rw-r--r--doc/api/packages.md4
-rw-r--r--doc/ci/pipelines/settings.md2
-rw-r--r--doc/install/aws/index.md2
-rw-r--r--doc/user/admin_area/settings/usage_statistics.md1
-rw-r--r--doc/user/application_security/container_scanning/index.md50
-rw-r--r--doc/user/application_security/security_dashboard/index.md6
-rw-r--r--doc/user/clusters/applications.md4
-rw-r--r--doc/user/clusters/img/fluentd_v12_10.pngbin26758 -> 26438 bytes
-rw-r--r--doc/user/compliance/license_compliance/index.md3
-rw-r--r--doc/user/group/img/group_activity_analytics_v12_10.pngbin0 -> 183422 bytes
-rw-r--r--doc/user/group/index.md18
-rw-r--r--doc/user/packages/index.md1
-rw-r--r--doc/user/packages/pypi_repository/index.md84
-rw-r--r--doc/user/permissions.md2
-rw-r--r--doc/user/project/index.md1
-rw-r--r--doc/user/project/requirements/img/requirement_archive_view_v12_10.pngbin0 -> 112233 bytes
-rw-r--r--doc/user/project/requirements/img/requirement_create_view_v12_10.pngbin0 -> 124402 bytes
-rw-r--r--doc/user/project/requirements/img/requirement_edit_save_v12_10.pngbin0 -> 123899 bytes
-rw-r--r--doc/user/project/requirements/img/requirement_edit_view_v12_10.pngbin0 -> 118066 bytes
-rw-r--r--doc/user/project/requirements/img/requirements_archived_list_view_v12_10.pngbin0 -> 68623 bytes
-rw-r--r--doc/user/project/requirements/img/requirements_list_view_v12_10.pngbin0 -> 117250 bytes
-rw-r--r--doc/user/project/requirements/index.md67
-rw-r--r--doc/user/search/index.md4
-rw-r--r--doc/user/shortcuts.md9
-rw-r--r--lib/api/entities/merge_request_basic.rb2
-rw-r--r--lib/api/helpers/internal_helpers.rb19
-rw-r--r--lib/gitlab/application_context.rb4
-rw-r--r--lib/gitlab/background_migration/backfill_push_rules_id_in_projects.rb27
-rw-r--r--lib/gitlab/cycle_analytics/group_stage_summary.rb36
-rw-r--r--lib/gitlab/cycle_analytics/summary/group/deployment_frequency.rb33
-rw-r--r--lib/gitlab/cycle_analytics/summary_helper.rb15
-rw-r--r--lib/gitlab/data_builder/pipeline.rb2
-rw-r--r--lib/gitlab/elasticsearch/logs.rb154
-rw-r--r--lib/gitlab/elasticsearch/logs/lines.rb156
-rw-r--r--lib/gitlab/elasticsearch/logs/pods.rb70
-rw-r--r--locale/gitlab.pot24
-rw-r--r--package.json2
-rw-r--r--spec/factories/ci/job_artifacts.rb2
-rw-r--r--spec/fixtures/lib/elasticsearch/pods_query.json28
-rw-r--r--spec/fixtures/lib/elasticsearch/pods_response.json75
-rw-r--r--spec/frontend/jira_import/components/jira_import_form_spec.js6
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js10
-rw-r--r--spec/frontend/static_site_editor/components/invalid_content_message_spec.js23
-rw-r--r--spec/frontend/static_site_editor/components/publish_toolbar_spec.js4
-rw-r--r--spec/frontend/static_site_editor/components/static_site_editor_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap8
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js102
-rw-r--r--spec/lib/gitlab/application_context_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb32
-rw-r--r--spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb46
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb2
-rw-r--r--spec/lib/gitlab/elasticsearch/logs/lines_spec.rb (renamed from spec/lib/gitlab/elasticsearch/logs_spec.rb)2
-rw-r--r--spec/lib/gitlab/elasticsearch/logs/pods_spec.rb35
-rw-r--r--spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb35
-rw-r--r--spec/models/ci/job_artifact_spec.rb21
-rw-r--r--spec/models/cycle_analytics/group_level_spec.rb2
-rw-r--r--spec/models/merge_request_spec.rb66
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb11
-rw-r--r--spec/requests/api/merge_requests_spec.rb8
-rw-r--r--spec/serializers/discussion_entity_spec.rb10
-rw-r--r--spec/serializers/merge_request_basic_entity_spec.rb17
-rw-r--r--spec/serializers/merge_request_poll_cached_widget_entity_spec.rb6
-rw-r--r--spec/services/pod_logs/base_service_spec.rb27
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb63
-rw-r--r--spec/services/pod_logs/kubernetes_service_spec.rb32
-rw-r--r--spec/uploaders/records_uploads_spec.rb6
-rw-r--r--spec/workers/concerns/cronjob_queue_spec.rb22
-rw-r--r--yarn.lock8
124 files changed, 1861 insertions, 509 deletions
diff --git a/app/assets/javascripts/filtered_search/filtered_search_manager.js b/app/assets/javascripts/filtered_search/filtered_search_manager.js
index 7ea7313f648..724f80f8866 100644
--- a/app/assets/javascripts/filtered_search/filtered_search_manager.js
+++ b/app/assets/javascripts/filtered_search/filtered_search_manager.js
@@ -14,7 +14,13 @@ import FilteredSearchTokenizer from './filtered_search_tokenizer';
import FilteredSearchDropdownManager from './filtered_search_dropdown_manager';
import FilteredSearchVisualTokens from './filtered_search_visual_tokens';
import DropdownUtils from './dropdown_utils';
-import { BACKSPACE_KEY_CODE } from '~/lib/utils/keycodes';
+import {
+ ENTER_KEY_CODE,
+ BACKSPACE_KEY_CODE,
+ DELETE_KEY_CODE,
+ UP_KEY_CODE,
+ DOWN_KEY_CODE,
+} from '~/lib/utils/keycodes';
import { __ } from '~/locale';
export default class FilteredSearchManager {
@@ -176,6 +182,8 @@ export default class FilteredSearchManager {
this.checkForEnterWrapper = this.checkForEnter.bind(this);
this.onClearSearchWrapper = this.onClearSearch.bind(this);
this.checkForBackspaceWrapper = this.checkForBackspace.call(this);
+ this.checkForMetaBackspaceWrapper = this.checkForMetaBackspace.bind(this);
+ this.checkForAltOrCtrlBackspaceWrapper = this.checkForAltOrCtrlBackspace.bind(this);
this.removeSelectedTokenKeydownWrapper = this.removeSelectedTokenKeydown.bind(this);
this.unselectEditTokensWrapper = this.unselectEditTokens.bind(this);
this.editTokenWrapper = this.editToken.bind(this);
@@ -192,6 +200,9 @@ export default class FilteredSearchManager {
this.filteredSearchInput.addEventListener('keyup', this.handleInputVisualTokenWrapper);
this.filteredSearchInput.addEventListener('keydown', this.checkForEnterWrapper);
this.filteredSearchInput.addEventListener('keyup', this.checkForBackspaceWrapper);
+ // e.metaKey only works with keydown, not keyup
+ this.filteredSearchInput.addEventListener('keydown', this.checkForMetaBackspaceWrapper);
+ this.filteredSearchInput.addEventListener('keydown', this.checkForAltOrCtrlBackspaceWrapper);
this.filteredSearchInput.addEventListener('click', this.tokenChange);
this.filteredSearchInput.addEventListener('keyup', this.tokenChange);
this.filteredSearchInput.addEventListener('focus', this.addInputContainerFocusWrapper);
@@ -213,6 +224,8 @@ export default class FilteredSearchManager {
this.filteredSearchInput.removeEventListener('input', this.handleInputPlaceholderWrapper);
this.filteredSearchInput.removeEventListener('keyup', this.handleInputVisualTokenWrapper);
this.filteredSearchInput.removeEventListener('keydown', this.checkForEnterWrapper);
+ this.filteredSearchInput.removeEventListener('keydown', this.checkForMetaBackspaceWrapper);
+ this.filteredSearchInput.removeEventListener('keydown', this.checkForAltOrCtrlBackspaceWrapper);
this.filteredSearchInput.removeEventListener('keyup', this.checkForBackspaceWrapper);
this.filteredSearchInput.removeEventListener('click', this.tokenChange);
this.filteredSearchInput.removeEventListener('keyup', this.tokenChange);
@@ -235,7 +248,11 @@ export default class FilteredSearchManager {
return e => {
// 8 = Backspace Key
// 46 = Delete Key
- if (e.keyCode === 8 || e.keyCode === 46) {
+ // Handled by respective backspace-combination check functions
+ if (e.altKey || e.ctrlKey || e.metaKey) {
+ return;
+ }
+ if (e.keyCode === BACKSPACE_KEY_CODE || e.keyCode === DELETE_KEY_CODE) {
const { lastVisualToken } = FilteredSearchVisualTokens.getLastVisualTokenBeforeInput();
const { tokenName, tokenValue } = DropdownUtils.getVisualTokenValues(lastVisualToken);
const canEdit = tokenName && this.canEdit && this.canEdit(tokenName, tokenValue);
@@ -258,15 +275,31 @@ export default class FilteredSearchManager {
};
}
+ checkForAltOrCtrlBackspace(e) {
+ if ((e.altKey || e.ctrlKey) && e.keyCode === BACKSPACE_KEY_CODE) {
+ // Default to native OS behavior if input value present
+ if (this.filteredSearchInput.value === '') {
+ FilteredSearchVisualTokens.removeLastTokenPartial();
+ }
+ }
+ }
+
+ checkForMetaBackspace(e) {
+ const onlyMeta = e.metaKey && !e.altKey && !e.ctrlKey && !e.shiftKey;
+ if (onlyMeta && e.keyCode === BACKSPACE_KEY_CODE) {
+ this.clearSearch();
+ }
+ }
+
checkForEnter(e) {
- if (e.keyCode === 38 || e.keyCode === 40) {
+ if (e.keyCode === UP_KEY_CODE || e.keyCode === DOWN_KEY_CODE) {
const { selectionStart } = this.filteredSearchInput;
e.preventDefault();
this.filteredSearchInput.setSelectionRange(selectionStart, selectionStart);
}
- if (e.keyCode === 13) {
+ if (e.keyCode === ENTER_KEY_CODE) {
const dropdown = this.dropdownManager.mapping[this.dropdownManager.currentDropdown];
const dropdownEl = dropdown.element;
const activeElements = dropdownEl.querySelectorAll('.droplab-item-active');
@@ -375,7 +408,7 @@ export default class FilteredSearchManager {
removeSelectedTokenKeydown(e) {
// 8 = Backspace Key
// 46 = Delete Key
- if (e.keyCode === 8 || e.keyCode === 46) {
+ if (e.keyCode === BACKSPACE_KEY_CODE || e.keyCode === DELETE_KEY_CODE) {
this.removeSelectedToken();
}
}
diff --git a/app/assets/javascripts/jira_import/components/jira_import_form.vue b/app/assets/javascripts/jira_import/components/jira_import_form.vue
index 4de04efe1b0..26e51c02b41 100644
--- a/app/assets/javascripts/jira_import/components/jira_import_form.vue
+++ b/app/assets/javascripts/jira_import/components/jira_import_form.vue
@@ -1,11 +1,11 @@
<script>
-import { GlAvatar, GlNewButton, GlFormGroup, GlFormSelect, GlLabel } from '@gitlab/ui';
+import { GlAvatar, GlButton, GlFormGroup, GlFormSelect, GlLabel } from '@gitlab/ui';
export default {
name: 'JiraImportForm',
components: {
GlAvatar,
- GlNewButton,
+ GlButton,
GlFormGroup,
GlFormSelect,
GlLabel,
@@ -86,8 +86,8 @@ export default {
</gl-form-group>
<div class="footer-block row-content-block d-flex justify-content-between">
- <gl-new-button category="primary" variant="success">{{ __('Next') }}</gl-new-button>
- <gl-new-button>{{ __('Cancel') }}</gl-new-button>
+ <gl-button category="primary" variant="success">{{ __('Next') }}</gl-button>
+ <gl-button>{{ __('Cancel') }}</gl-button>
</div>
</form>
</div>
diff --git a/app/assets/javascripts/lib/utils/keycodes.js b/app/assets/javascripts/lib/utils/keycodes.js
index 2270d329c24..16bffc5c2cf 100644
--- a/app/assets/javascripts/lib/utils/keycodes.js
+++ b/app/assets/javascripts/lib/utils/keycodes.js
@@ -1,5 +1,6 @@
-export const UP_KEY_CODE = 38;
-export const DOWN_KEY_CODE = 40;
+export const BACKSPACE_KEY_CODE = 8;
export const ENTER_KEY_CODE = 13;
export const ESC_KEY_CODE = 27;
-export const BACKSPACE_KEY_CODE = 8;
+export const UP_KEY_CODE = 38;
+export const DOWN_KEY_CODE = 40;
+export const DELETE_KEY_CODE = 46;
diff --git a/app/assets/javascripts/releases/components/app_edit.vue b/app/assets/javascripts/releases/components/app_edit.vue
index df356c18417..8d68ff02116 100644
--- a/app/assets/javascripts/releases/components/app_edit.vue
+++ b/app/assets/javascripts/releases/components/app_edit.vue
@@ -1,6 +1,6 @@
<script>
import { mapState, mapActions, mapGetters } from 'vuex';
-import { GlNewButton, GlFormInput, GlFormGroup } from '@gitlab/ui';
+import { GlButton, GlFormInput, GlFormGroup } from '@gitlab/ui';
import { escape as esc } from 'lodash';
import { __, sprintf } from '~/locale';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
@@ -15,7 +15,7 @@ export default {
components: {
GlFormInput,
GlFormGroup,
- GlNewButton,
+ GlButton,
MarkdownField,
AssetLinksForm,
},
@@ -167,7 +167,7 @@ export default {
<asset-links-form v-if="showAssetLinksForm" />
<div class="d-flex pt-3">
- <gl-new-button
+ <gl-button
class="mr-auto js-no-auto-disable"
category="primary"
variant="success"
@@ -176,10 +176,10 @@ export default {
:disabled="isSaveChangesDisabled"
>
{{ __('Save changes') }}
- </gl-new-button>
- <gl-new-button :href="cancelPath" class="js-cancel-button">
+ </gl-button>
+ <gl-button :href="cancelPath" class="js-cancel-button">
{{ __('Cancel') }}
- </gl-new-button>
+ </gl-button>
</div>
</form>
</div>
diff --git a/app/assets/javascripts/releases/components/asset_links_form.vue b/app/assets/javascripts/releases/components/asset_links_form.vue
index 6ca700c2b30..4bdc88f01dd 100644
--- a/app/assets/javascripts/releases/components/asset_links_form.vue
+++ b/app/assets/javascripts/releases/components/asset_links_form.vue
@@ -4,7 +4,7 @@ import {
GlSprintf,
GlLink,
GlFormGroup,
- GlNewButton,
+ GlButton,
GlIcon,
GlTooltipDirective,
GlFormInput,
@@ -12,7 +12,7 @@ import {
export default {
name: 'AssetLinksForm',
- components: { GlSprintf, GlLink, GlFormGroup, GlNewButton, GlIcon, GlFormInput },
+ components: { GlSprintf, GlLink, GlFormGroup, GlButton, GlIcon, GlFormInput },
directives: { GlTooltip: GlTooltipDirective },
computed: {
...mapState('detail', ['release', 'releaseAssetsDocsPath']),
@@ -170,7 +170,7 @@ export default {
</gl-form-group>
<div class="mb-5 mb-sm-3 mt-sm-4 col col-sm-auto">
- <gl-new-button
+ <gl-button
v-gl-tooltip
class="remove-button w-100"
:aria-label="__('Remove asset link')"
@@ -179,16 +179,16 @@ export default {
>
<gl-icon class="mr-1 mr-sm-0 mb-1" :size="16" name="remove" />
<span class="d-inline d-sm-none">{{ __('Remove asset link') }}</span>
- </gl-new-button>
+ </gl-button>
</div>
</div>
- <gl-new-button
+ <gl-button
ref="addAnotherLinkButton"
variant="link"
class="align-self-end mb-5 mb-sm-0"
@click="onAddAnotherClicked"
>
{{ __('Add another link') }}
- </gl-new-button>
+ </gl-button>
</div>
</template>
diff --git a/app/assets/javascripts/snippets/components/snippet_header.vue b/app/assets/javascripts/snippets/components/snippet_header.vue
index 79b191cb25a..30a23b51bc4 100644
--- a/app/assets/javascripts/snippets/components/snippet_header.vue
+++ b/app/assets/javascripts/snippets/components/snippet_header.vue
@@ -9,7 +9,7 @@ import {
GlLoadingIcon,
GlDropdown,
GlDropdownItem,
- GlNewButton,
+ GlButton,
} from '@gitlab/ui';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
@@ -28,7 +28,7 @@ export default {
GlDropdown,
GlDropdownItem,
TimeAgoTooltip,
- GlNewButton,
+ GlButton,
},
apollo: {
canCreateSnippet: {
@@ -186,7 +186,7 @@ export default {
<div class="detail-page-header-actions">
<div class="d-none d-sm-flex">
<template v-for="(action, index) in personalSnippetActions">
- <gl-new-button
+ <gl-button
v-if="action.condition"
:key="index"
:disabled="action.disabled"
@@ -197,7 +197,7 @@ export default {
@click="action.click ? action.click() : undefined"
>
{{ action.text }}
- </gl-new-button>
+ </gl-button>
</template>
</div>
<div class="d-block d-sm-none dropdown">
@@ -227,8 +227,8 @@ export default {
</gl-sprintf>
<template #modal-footer>
- <gl-new-button @click="closeDeleteModal">{{ __('Cancel') }}</gl-new-button>
- <gl-new-button
+ <gl-button @click="closeDeleteModal">{{ __('Cancel') }}</gl-button>
+ <gl-button
variant="danger"
category="primary"
:disabled="isDeleting"
@@ -237,7 +237,7 @@ export default {
>
<gl-loading-icon v-if="isDeleting" inline />
{{ __('Delete snippet') }}
- </gl-new-button>
+ </gl-button>
</template>
</gl-modal>
</div>
diff --git a/app/assets/javascripts/static_site_editor/components/invalid_content_message.vue b/app/assets/javascripts/static_site_editor/components/invalid_content_message.vue
new file mode 100644
index 00000000000..51ece15e29f
--- /dev/null
+++ b/app/assets/javascripts/static_site_editor/components/invalid_content_message.vue
@@ -0,0 +1,29 @@
+<script>
+import { GlNewButton } from '@gitlab/ui';
+
+export default {
+ components: {
+ GlNewButton,
+ },
+};
+</script>
+
+<template>
+ <div>
+ <h3>{{ s__('StaticSiteEditor|Incompatible file content') }}</h3>
+ <p>
+ {{
+ s__(
+ 'StaticSiteEditor|The Static Site Editor is currently configured to only edit Markdown content on pages generated from Middleman. Visit the documentation to learn more about configuring your site to use the Static Site Editor.',
+ )
+ }}
+ </p>
+ <div>
+ <gl-new-button
+ ref="documentationButton"
+ href="https://gitlab.com/gitlab-org/project-templates/static-site-editor-middleman"
+ >{{ s__('StaticSiteEditor|View documentation') }}</gl-new-button
+ >
+ </div>
+ </div>
+</template>
diff --git a/app/assets/javascripts/static_site_editor/components/publish_toolbar.vue b/app/assets/javascripts/static_site_editor/components/publish_toolbar.vue
index efb442d4d09..274d2f71749 100644
--- a/app/assets/javascripts/static_site_editor/components/publish_toolbar.vue
+++ b/app/assets/javascripts/static_site_editor/components/publish_toolbar.vue
@@ -1,9 +1,9 @@
<script>
-import { GlNewButton, GlLoadingIcon } from '@gitlab/ui';
+import { GlButton, GlLoadingIcon } from '@gitlab/ui';
export default {
components: {
- GlNewButton,
+ GlButton,
GlLoadingIcon,
},
props: {
@@ -29,16 +29,12 @@ export default {
<div class="d-flex bg-light border-top justify-content-between align-items-center py-3 px-4">
<gl-loading-icon :class="{ invisible: !savingChanges }" size="md" />
<div>
- <gl-new-button v-if="returnUrl" ref="returnUrlLink" :href="returnUrl">{{
+ <gl-button v-if="returnUrl" ref="returnUrlLink" :href="returnUrl">{{
s__('StaticSiteEditor|Return to site')
- }}</gl-new-button>
- <gl-new-button
- variant="success"
- :disabled="!saveable || savingChanges"
- @click="$emit('submit')"
- >
+ }}</gl-button>
+ <gl-button variant="success" :disabled="!saveable || savingChanges" @click="$emit('submit')">
{{ __('Submit Changes') }}
- </gl-new-button>
+ </gl-button>
</div>
</div>
</template>
diff --git a/app/assets/javascripts/static_site_editor/components/saved_changes_message.vue b/app/assets/javascripts/static_site_editor/components/saved_changes_message.vue
index d76c6d9d681..13f66551c00 100644
--- a/app/assets/javascripts/static_site_editor/components/saved_changes_message.vue
+++ b/app/assets/javascripts/static_site_editor/components/saved_changes_message.vue
@@ -1,14 +1,14 @@
<script>
import { isString } from 'lodash';
-import { GlLink, GlNewButton } from '@gitlab/ui';
+import { GlLink, GlButton } from '@gitlab/ui';
const validateUrlAndLabel = value => isString(value.label) && isString(value.url);
export default {
components: {
GlLink,
- GlNewButton,
+ GlButton,
},
props: {
branch: {
@@ -46,16 +46,12 @@ export default {
}}
</p>
<div class="d-flex justify-content-end">
- <gl-new-button ref="returnToSiteButton" :href="returnUrl">{{
+ <gl-button ref="returnToSiteButton" :href="returnUrl">{{
s__('StaticSiteEditor|Return to site')
- }}</gl-new-button>
- <gl-new-button
- ref="mergeRequestButton"
- class="ml-2"
- :href="mergeRequest.url"
- variant="success"
- >{{ s__('StaticSiteEditor|View merge request') }}</gl-new-button
- >
+ }}</gl-button>
+ <gl-button ref="mergeRequestButton" class="ml-2" :href="mergeRequest.url" variant="success">
+ {{ s__('StaticSiteEditor|View merge request') }}
+ </gl-button>
</div>
</div>
diff --git a/app/assets/javascripts/static_site_editor/components/static_site_editor.vue b/app/assets/javascripts/static_site_editor/components/static_site_editor.vue
index 4d912f5c0b5..28e10c3c2cb 100644
--- a/app/assets/javascripts/static_site_editor/components/static_site_editor.vue
+++ b/app/assets/javascripts/static_site_editor/components/static_site_editor.vue
@@ -5,11 +5,13 @@ import { GlSkeletonLoader } from '@gitlab/ui';
import EditArea from './edit_area.vue';
import EditHeader from './edit_header.vue';
import Toolbar from './publish_toolbar.vue';
+import InvalidContentMessage from './invalid_content_message.vue';
export default {
components: {
EditArea,
EditHeader,
+ InvalidContentMessage,
GlSkeletonLoader,
Toolbar,
},
@@ -19,13 +21,16 @@ export default {
'isLoadingContent',
'isSavingChanges',
'isContentLoaded',
+ 'isSupportedContent',
'returnUrl',
'title',
]),
...mapGetters(['contentChanged']),
},
mounted() {
- this.loadContent();
+ if (this.isSupportedContent) {
+ this.loadContent();
+ }
},
methods: {
...mapActions(['loadContent', 'setContent', 'submitChanges']),
@@ -33,30 +38,33 @@ export default {
};
</script>
<template>
- <div class="d-flex justify-content-center h-100 pt-2">
- <div v-if="isLoadingContent" class="w-50 h-50">
- <gl-skeleton-loader :width="500" :height="102">
- <rect width="500" height="16" rx="4" />
- <rect y="20" width="375" height="16" rx="4" />
- <rect x="380" y="20" width="120" height="16" rx="4" />
- <rect y="40" width="250" height="16" rx="4" />
- <rect x="255" y="40" width="150" height="16" rx="4" />
- <rect x="410" y="40" width="90" height="16" rx="4" />
- </gl-skeleton-loader>
- </div>
- <div v-if="isContentLoaded" class="d-flex flex-grow-1 flex-column">
- <edit-header class="w-75 align-self-center py-2" :title="title" />
- <edit-area
- class="w-75 h-100 shadow-none align-self-center"
- :value="content"
- @input="setContent"
- />
- <toolbar
- :return-url="returnUrl"
- :saveable="contentChanged"
- :saving-changes="isSavingChanges"
- @submit="submitChanges"
- />
- </div>
+ <div class="d-flex justify-content-center h-100 pt-2">
+ <template v-if="isSupportedContent">
+ <div v-if="isLoadingContent" class="w-50 h-50">
+ <gl-skeleton-loader :width="500" :height="102">
+ <rect width="500" height="16" rx="4" />
+ <rect y="20" width="375" height="16" rx="4" />
+ <rect x="380" y="20" width="120" height="16" rx="4" />
+ <rect y="40" width="250" height="16" rx="4" />
+ <rect x="255" y="40" width="150" height="16" rx="4" />
+ <rect x="410" y="40" width="90" height="16" rx="4" />
+ </gl-skeleton-loader>
+ </div>
+ <div v-if="isContentLoaded" class="d-flex flex-grow-1 flex-column">
+ <edit-header class="w-75 align-self-center py-2" :title="title" />
+ <edit-area
+ class="w-75 h-100 shadow-none align-self-center"
+ :value="content"
+ @input="setContent"
+ />
+ <toolbar
+ :return-url="returnUrl"
+ :saveable="contentChanged"
+ :saving-changes="isSavingChanges"
+ @submit="submitChanges"
+ />
+ </div>
+ </template>
+ <invalid-content-message v-else class="w-75" />
</div>
</template>
diff --git a/app/assets/javascripts/static_site_editor/index.js b/app/assets/javascripts/static_site_editor/index.js
index c6a883c659a..15d668fd431 100644
--- a/app/assets/javascripts/static_site_editor/index.js
+++ b/app/assets/javascripts/static_site_editor/index.js
@@ -3,10 +3,17 @@ import StaticSiteEditor from './components/static_site_editor.vue';
import createStore from './store';
const initStaticSiteEditor = el => {
- const { projectId, returnUrl, path: sourcePath } = el.dataset;
+ const { projectId, path: sourcePath, returnUrl } = el.dataset;
+ const isSupportedContent = 'isSupportedContent' in el.dataset;
const store = createStore({
- initialState: { projectId, returnUrl, sourcePath, username: window.gon.current_username },
+ initialState: {
+ isSupportedContent,
+ projectId,
+ returnUrl,
+ sourcePath,
+ username: window.gon.current_username,
+ },
});
return new Vue({
diff --git a/app/assets/javascripts/static_site_editor/store/state.js b/app/assets/javascripts/static_site_editor/store/state.js
index 98a84d9f75d..3d9fa9e6303 100644
--- a/app/assets/javascripts/static_site_editor/store/state.js
+++ b/app/assets/javascripts/static_site_editor/store/state.js
@@ -6,6 +6,7 @@ const createState = (initialState = {}) => ({
isLoadingContent: false,
isSavingChanges: false,
+ isSupportedContent: false,
isContentLoaded: false,
diff --git a/app/assets/javascripts/vue_shared/components/clone_dropdown.vue b/app/assets/javascripts/vue_shared/components/clone_dropdown.vue
index 3b9b9f37f52..7826c179889 100644
--- a/app/assets/javascripts/vue_shared/components/clone_dropdown.vue
+++ b/app/assets/javascripts/vue_shared/components/clone_dropdown.vue
@@ -3,7 +3,7 @@ import {
GlNewDropdown,
GlNewDropdownHeader,
GlFormInputGroup,
- GlNewButton,
+ GlButton,
GlIcon,
GlTooltipDirective,
} from '@gitlab/ui';
@@ -15,7 +15,7 @@ export default {
GlNewDropdown,
GlNewDropdownHeader,
GlFormInputGroup,
- GlNewButton,
+ GlButton,
GlIcon,
},
directives: {
@@ -55,13 +55,13 @@ export default {
<div class="mx-3">
<gl-form-input-group :value="sshLink" readonly select-on-click>
<template #append>
- <gl-new-button
+ <gl-button
v-gl-tooltip.hover
:title="$options.copyURLTooltip"
:data-clipboard-text="sshLink"
>
<gl-icon name="copy-to-clipboard" :title="$options.copyURLTooltip" />
- </gl-new-button>
+ </gl-button>
</template>
</gl-form-input-group>
</div>
@@ -73,13 +73,13 @@ export default {
<div class="mx-3">
<gl-form-input-group :value="httpLink" readonly select-on-click>
<template #append>
- <gl-new-button
+ <gl-button
v-gl-tooltip.hover
:title="$options.copyURLTooltip"
:data-clipboard-text="httpLink"
>
<gl-icon name="copy-to-clipboard" :title="$options.copyURLTooltip" />
- </gl-new-button>
+ </gl-button>
</template>
</gl-form-input-group>
</div>
diff --git a/app/finders/autocomplete/routes_finder.rb b/app/finders/autocomplete/routes_finder.rb
new file mode 100644
index 00000000000..b3f2693b273
--- /dev/null
+++ b/app/finders/autocomplete/routes_finder.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+module Autocomplete
+ # Finder that returns a list of routes that match on the `path` attribute.
+ class RoutesFinder
+ attr_reader :current_user, :search
+
+ LIMIT = 20
+
+ def initialize(current_user, params = {})
+ @current_user = current_user
+ @search = params[:search]
+ end
+
+ def execute
+ return [] if @search.blank?
+
+ Route
+ .for_routable(routables)
+ .sort_by_path_length
+ .fuzzy_search(@search, [:path])
+ .limit(LIMIT) # rubocop: disable CodeReuse/ActiveRecord
+ end
+
+ private
+
+ def routables
+ raise NotImplementedError
+ end
+
+ class NamespacesOnly < self
+ def routables
+ return Namespace.all if current_user.admin?
+
+ current_user.namespaces
+ end
+ end
+
+ class ProjectsOnly < self
+ def routables
+ return Project.all if current_user.admin?
+
+ current_user.projects
+ end
+ end
+ end
+end
diff --git a/app/graphql/types/merge_request_type.rb b/app/graphql/types/merge_request_type.rb
index 8f6b742a93c..cd4c6b4d46a 100644
--- a/app/graphql/types/merge_request_type.rb
+++ b/app/graphql/types/merge_request_type.rb
@@ -60,7 +60,7 @@ module Types
description: 'Indicates if the source branch of the merge request will be deleted after merge'
field :force_remove_source_branch, GraphQL::BOOLEAN_TYPE, method: :force_remove_source_branch?, null: true,
description: 'Indicates if the project settings will lead to source branch deletion after merge'
- field :merge_status, GraphQL::STRING_TYPE, null: true,
+ field :merge_status, GraphQL::STRING_TYPE, method: :public_merge_status, null: true,
description: 'Status of the merge request'
field :in_progress_merge_commit_sha, GraphQL::STRING_TYPE, null: true,
description: 'Commit SHA of the merge request if merge is in progress'
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index c1e44748304..0aa0216558f 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -14,6 +14,7 @@ class ApplicationSetting < ApplicationRecord
add_authentication_token_field :static_objects_external_storage_auth_token
belongs_to :self_monitoring_project, class_name: "Project", foreign_key: 'instance_administration_project_id'
+ belongs_to :push_rule
alias_attribute :self_monitoring_project_id, :instance_administration_project_id
belongs_to :instance_administrators_group, class_name: "Group"
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index c4ac10814a9..ef0701b3874 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -73,14 +73,12 @@ module Ci
validates :file_format, presence: true, unless: :trace?, on: :create
validate :valid_file_format?, unless: :trace?, on: :create
-
before_save :set_size, if: :file_changed?
- before_save :set_file_store, if: ->(job_artifact) { job_artifact.file_store.nil? }
-
- after_save :update_file_store, if: :saved_change_to_file?
update_project_statistics project_statistics_name: :build_artifacts_size
+ after_save :update_file_store, if: :saved_change_to_file?
+
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
scope :for_sha, ->(sha, project_id) { joins(job: :pipeline).where(ci_pipelines: { sha: sha, project_id: project_id }) }
@@ -228,15 +226,6 @@ module Ci
self.size = file.size
end
- def set_file_store
- self.file_store =
- if JobArtifactUploader.object_store_enabled? && JobArtifactUploader.direct_upload_enabled?
- JobArtifactUploader::Store::REMOTE
- else
- file.object_store
- end
- end
-
def project_destroyed?
# Use job.project to avoid extra DB query for project
job.project.pending_delete?
diff --git a/app/models/diff_discussion.rb b/app/models/diff_discussion.rb
index 93e3ebf7896..f9e2f00b9f3 100644
--- a/app/models/diff_discussion.rb
+++ b/app/models/diff_discussion.rb
@@ -13,6 +13,7 @@ class DiffDiscussion < Discussion
delegate :position,
:original_position,
:change_position,
+ :diff_note_positions,
:on_text?,
:on_image?,
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index c5233deaa96..6a86aebae39 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -17,8 +17,6 @@ class LfsObject < ApplicationRecord
mount_uploader :file, LfsObjectUploader
- before_save :set_file_store, if: ->(lfs_object) { lfs_object.file_store.nil? }
-
after_save :update_file_store, if: :saved_change_to_file?
def self.not_linked_to_project(project)
@@ -57,17 +55,6 @@ class LfsObject < ApplicationRecord
def self.calculate_oid(path)
self.hexdigest(path)
end
-
- private
-
- def set_file_store
- self.file_store =
- if LfsObjectUploader.object_store_enabled? && LfsObjectUploader.direct_upload_enabled?
- LfsObjectUploader::Store::REMOTE
- else
- file.object_store
- end
- end
end
LfsObject.prepend_if_ee('EE::LfsObject')
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index b9acb539404..c47f1af2a73 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -167,20 +167,22 @@ class MergeRequest < ApplicationRecord
end
event :mark_as_checking do
- transition [:unchecked, :cannot_be_merged_recheck] => :checking
+ transition unchecked: :checking
+ transition cannot_be_merged_recheck: :cannot_be_merged_rechecking
end
event :mark_as_mergeable do
- transition [:unchecked, :cannot_be_merged_recheck, :checking] => :can_be_merged
+ transition [:unchecked, :cannot_be_merged_recheck, :checking, :cannot_be_merged_rechecking] => :can_be_merged
end
event :mark_as_unmergeable do
- transition [:unchecked, :cannot_be_merged_recheck, :checking] => :cannot_be_merged
+ transition [:unchecked, :cannot_be_merged_recheck, :checking, :cannot_be_merged_rechecking] => :cannot_be_merged
end
state :unchecked
state :cannot_be_merged_recheck
state :checking
+ state :cannot_be_merged_rechecking
state :can_be_merged
state :cannot_be_merged
@@ -189,7 +191,7 @@ class MergeRequest < ApplicationRecord
end
# rubocop: disable CodeReuse/ServiceClass
- after_transition unchecked: :cannot_be_merged do |merge_request, transition|
+ after_transition [:unchecked, :checking] => :cannot_be_merged do |merge_request, transition|
if merge_request.notify_conflict?
NotificationService.new.merge_request_unmergeable(merge_request)
TodoService.new.merge_request_became_unmergeable(merge_request)
@@ -202,6 +204,12 @@ class MergeRequest < ApplicationRecord
end
end
+ # Returns current merge_status except it returns `cannot_be_merged_rechecking` as `checking`
+ # to avoid exposing unnecessary internal state
+ def public_merge_status
+ cannot_be_merged_rechecking? ? 'checking' : merge_status
+ end
+
validates :source_project, presence: true, unless: [:allow_broken, :importing?, :closed_without_fork?]
validates :source_branch, presence: true
validates :target_project, presence: true
diff --git a/app/models/note.rb b/app/models/note.rb
index e6ad7c2227f..a2a711c987f 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -125,7 +125,7 @@ class Note < ApplicationRecord
scope :inc_author, -> { includes(:author) }
scope :inc_relations_for_view, -> do
includes(:project, { author: :status }, :updated_by, :resolved_by, :award_emoji,
- { system_note_metadata: :description_version }, :note_diff_file, :suggestions)
+ { system_note_metadata: :description_version }, :note_diff_file, :diff_note_positions, :suggestions)
end
scope :with_notes_filter, -> (notes_filter) do
diff --git a/app/models/project_setting.rb b/app/models/project_setting.rb
index 37e4a7be770..7c93faf3928 100644
--- a/app/models/project_setting.rb
+++ b/app/models/project_setting.rb
@@ -9,3 +9,5 @@ class ProjectSetting < ApplicationRecord
where(primary_key => safe_find_or_create_by(attrs))
end
end
+
+ProjectSetting.prepend_if_ee('EE::ProjectSetting')
diff --git a/app/models/route.rb b/app/models/route.rb
index 91ea2966013..63a0461807b 100644
--- a/app/models/route.rb
+++ b/app/models/route.rb
@@ -2,9 +2,9 @@
class Route < ApplicationRecord
include CaseSensitivity
+ include Gitlab::SQL::Pattern
belongs_to :source, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
-
validates :source, presence: true
validates :path,
@@ -19,6 +19,8 @@ class Route < ApplicationRecord
after_update :rename_descendants
scope :inside_path, -> (path) { where('routes.path LIKE ?', "#{sanitize_sql_like(path)}/%") }
+ scope :for_routable, -> (routable) { where(source: routable) }
+ scope :sort_by_path_length, -> { order('LENGTH(routes.path)', :path) }
def rename_descendants
return unless saved_change_to_path? || saved_change_to_name?
diff --git a/app/serializers/discussion_entity.rb b/app/serializers/discussion_entity.rb
index e302672042e..77881eaba0c 100644
--- a/app/serializers/discussion_entity.rb
+++ b/app/serializers/discussion_entity.rb
@@ -20,6 +20,14 @@ class DiscussionEntity < Grape::Entity
discussion_path(discussion)
end
+ expose :positions, if: -> (d, _) { display_merge_ref_discussions?(d) } do |discussion|
+ discussion.diff_note_positions.map(&:position)
+ end
+
+ expose :line_codes, if: -> (d, _) { display_merge_ref_discussions?(d) } do |discussion|
+ discussion.diff_note_positions.map(&:line_code)
+ end
+
expose :individual_note?, as: :individual_note
expose :resolvable do |discussion|
discussion.resolvable?
@@ -59,4 +67,11 @@ class DiscussionEntity < Grape::Entity
def current_user
request.current_user
end
+
+ def display_merge_ref_discussions?(discussion)
+ return unless discussion.diff_discussion?
+ return if discussion.legacy_diff_discussion?
+
+ Feature.enabled?(:merge_ref_head_comments, discussion.project)
+ end
end
diff --git a/app/serializers/merge_request_basic_entity.rb b/app/serializers/merge_request_basic_entity.rb
index 973e971b4c0..82baf4a4a78 100644
--- a/app/serializers/merge_request_basic_entity.rb
+++ b/app/serializers/merge_request_basic_entity.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class MergeRequestBasicEntity < Grape::Entity
- expose :merge_status
+ expose :public_merge_status, as: :merge_status
expose :merge_error
expose :state
expose :source_branch_exists?, as: :source_branch_exists
diff --git a/app/serializers/merge_request_poll_cached_widget_entity.rb b/app/serializers/merge_request_poll_cached_widget_entity.rb
index 2f8eb6650e8..72f629b3507 100644
--- a/app/serializers/merge_request_poll_cached_widget_entity.rb
+++ b/app/serializers/merge_request_poll_cached_widget_entity.rb
@@ -6,7 +6,7 @@ class MergeRequestPollCachedWidgetEntity < IssuableEntity
expose :merge_commit_sha
expose :short_merge_commit_sha
expose :merge_error
- expose :merge_status
+ expose :public_merge_status, as: :merge_status
expose :merge_user_id
expose :source_branch
expose :source_project_id
diff --git a/app/serializers/route_entity.rb b/app/serializers/route_entity.rb
new file mode 100644
index 00000000000..158fda5e00e
--- /dev/null
+++ b/app/serializers/route_entity.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+class RouteEntity < Grape::Entity
+ expose :id
+ expose :source_id
+ expose :source_type
+ expose :path
+end
diff --git a/app/serializers/route_serializer.rb b/app/serializers/route_serializer.rb
new file mode 100644
index 00000000000..0b187588301
--- /dev/null
+++ b/app/serializers/route_serializer.rb
@@ -0,0 +1,5 @@
+# frozen_string_literal: true
+
+class RouteSerializer < BaseSerializer
+ entity RouteEntity
+end
diff --git a/app/services/pod_logs/base_service.rb b/app/services/pod_logs/base_service.rb
index 8cc8fb913a2..2451ab8e0ce 100644
--- a/app/services/pod_logs/base_service.rb
+++ b/app/services/pod_logs/base_service.rb
@@ -62,13 +62,11 @@ module PodLogs
end
def get_raw_pods(result)
- result[:raw_pods] = cluster.kubeclient.get_pods(namespace: namespace)
-
- success(result)
+ raise NotImplementedError
end
def get_pod_names(result)
- result[:pods] = result[:raw_pods].map(&:metadata).map(&:name)
+ result[:pods] = result[:raw_pods].map { |p| p[:name] }
success(result)
end
diff --git a/app/services/pod_logs/elasticsearch_service.rb b/app/services/pod_logs/elasticsearch_service.rb
index 0a5185999ab..aac0fa424ca 100644
--- a/app/services/pod_logs/elasticsearch_service.rb
+++ b/app/services/pod_logs/elasticsearch_service.rb
@@ -23,6 +23,23 @@ module PodLogs
super + %i(cursor)
end
+ def get_raw_pods(result)
+ client = cluster&.application_elastic_stack&.elasticsearch_client
+ return error(_('Unable to connect to Elasticsearch')) unless client
+
+ result[:raw_pods] = ::Gitlab::Elasticsearch::Logs::Pods.new(client).pods(namespace)
+
+ success(result)
+ rescue Elasticsearch::Transport::Transport::ServerError => e
+ ::Gitlab::ErrorTracking.track_exception(e)
+
+ error(_('Elasticsearch returned status code: %{status_code}') % {
+ # ServerError is the parent class of exceptions named after HTTP status codes, eg: "Elasticsearch::Transport::Transport::Errors::NotFound"
+ # there is no method on the exception other than the class name to determine the type of error encountered.
+ status_code: e.class.name.split('::').last
+ })
+ end
+
def check_times(result)
result[:start_time] = params['start_time'] if params.key?('start_time') && Time.iso8601(params['start_time'])
result[:end_time] = params['end_time'] if params.key?('end_time') && Time.iso8601(params['end_time'])
@@ -48,7 +65,7 @@ module PodLogs
client = cluster&.application_elastic_stack&.elasticsearch_client
return error(_('Unable to connect to Elasticsearch')) unless client
- response = ::Gitlab::Elasticsearch::Logs.new(client).pod_logs(
+ response = ::Gitlab::Elasticsearch::Logs::Lines.new(client).pod_logs(
namespace,
pod_name: result[:pod_name],
container_name: result[:container_name],
@@ -69,7 +86,7 @@ module PodLogs
# there is no method on the exception other than the class name to determine the type of error encountered.
status_code: e.class.name.split('::').last
})
- rescue ::Gitlab::Elasticsearch::Logs::InvalidCursor
+ rescue ::Gitlab::Elasticsearch::Logs::Lines::InvalidCursor
error(_('Invalid cursor value provided'))
end
end
diff --git a/app/services/pod_logs/kubernetes_service.rb b/app/services/pod_logs/kubernetes_service.rb
index 31e26912c73..0a8072a9037 100644
--- a/app/services/pod_logs/kubernetes_service.rb
+++ b/app/services/pod_logs/kubernetes_service.rb
@@ -21,6 +21,17 @@ module PodLogs
private
+ def get_raw_pods(result)
+ result[:raw_pods] = cluster.kubeclient.get_pods(namespace: namespace).map do |pod|
+ {
+ name: pod.metadata.name,
+ container_names: pod.spec.containers.map(&:name)
+ }
+ end
+
+ success(result)
+ end
+
def check_pod_name(result)
# If pod_name is not received as parameter, get the pod logs of the first
# pod of this namespace.
@@ -43,11 +54,11 @@ module PodLogs
end
def check_container_name(result)
- pod_details = result[:raw_pods].find { |p| p.metadata.name == result[:pod_name] }
- containers = pod_details.spec.containers.map(&:name)
+ pod_details = result[:raw_pods].find { |p| p[:name] == result[:pod_name] }
+ container_names = pod_details[:container_names]
# select first container if not specified
- result[:container_name] ||= containers.first
+ result[:container_name] ||= container_names.first
unless result[:container_name]
return error(_('No containers available'))
@@ -58,7 +69,7 @@ module PodLogs
' %{max_length} chars' % { max_length: K8S_NAME_MAX_LENGTH }))
end
- unless containers.include?(result[:container_name])
+ unless container_names.include?(result[:container_name])
return error(_('Container does not exist'))
end
diff --git a/app/uploaders/records_uploads.rb b/app/uploaders/records_uploads.rb
index 427314a87bb..967fcdc704e 100644
--- a/app/uploaders/records_uploads.rb
+++ b/app/uploaders/records_uploads.rb
@@ -56,31 +56,10 @@ module RecordsUploads
size: file.size,
path: upload_path,
model: model,
- mount_point: mounted_as,
- store: initial_store
+ mount_point: mounted_as
)
end
- def initial_store
- if immediately_remote_stored?
- ::ObjectStorage::Store::REMOTE
- else
- ::ObjectStorage::Store::LOCAL
- end
- end
-
- def immediately_remote_stored?
- object_storage_available? && direct_upload_enabled?
- end
-
- def object_storage_available?
- self.class.ancestors.include?(ObjectStorage::Concern)
- end
-
- def direct_upload_enabled?
- self.class.object_store_enabled? && self.class.direct_upload_enabled?
- end
-
# Before removing an attachment, destroy any Upload records at the same path
#
# Called `before :remove`
diff --git a/app/workers/concerns/cronjob_queue.rb b/app/workers/concerns/cronjob_queue.rb
index 25ee4539cab..955387b5ad4 100644
--- a/app/workers/concerns/cronjob_queue.rb
+++ b/app/workers/concerns/cronjob_queue.rb
@@ -10,4 +10,14 @@ module CronjobQueue
sidekiq_options retry: false
worker_context project: nil, namespace: nil, user: nil
end
+
+ class_methods do
+ # Cronjobs never get scheduled with arguments, so this is safe to
+ # override
+ def context_for_arguments(_args)
+ return if Gitlab::ApplicationContext.current_context_include?('meta.caller_id')
+
+ Gitlab::ApplicationContext.new(caller_id: "Cronjob")
+ end
+ end
end
diff --git a/changelogs/unreleased/213382-use-not-valid-to-immediately-enforce-a-not-null-constraint.yml b/changelogs/unreleased/213382-use-not-valid-to-immediately-enforce-a-not-null-constraint.yml
deleted file mode 100644
index 066c8e2c45c..00000000000
--- a/changelogs/unreleased/213382-use-not-valid-to-immediately-enforce-a-not-null-constraint.yml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-title: Use NOT VALID to enforce a NOT NULL constraint on file_store to ci_job_artifacts,
- lfs_objects and uploads tables
-merge_request: 28946
-author:
-type: fixed
diff --git a/changelogs/unreleased/ak-historical-pods.yml b/changelogs/unreleased/ak-historical-pods.yml
new file mode 100644
index 00000000000..8c8e781223f
--- /dev/null
+++ b/changelogs/unreleased/ak-historical-pods.yml
@@ -0,0 +1,5 @@
+---
+title: Gather historical pod list from Elasticsearch
+merge_request: 29168
+author:
+type: added
diff --git a/changelogs/unreleased/issue-211404.yml b/changelogs/unreleased/issue-211404.yml
new file mode 100644
index 00000000000..f9af3adbe03
--- /dev/null
+++ b/changelogs/unreleased/issue-211404.yml
@@ -0,0 +1,5 @@
+---
+title: Added support for single-token deletion via option/ctrl-backspace or search-filter clearing via command-backspace in filtered search
+merge_request: 28295
+author: James Becker
+type: added
diff --git a/changelogs/unreleased/refactor_push_rules.yml b/changelogs/unreleased/refactor_push_rules.yml
new file mode 100644
index 00000000000..9afb0761689
--- /dev/null
+++ b/changelogs/unreleased/refactor_push_rules.yml
@@ -0,0 +1,5 @@
+---
+title: Refactor push rules and add push_rule_id columns in project settings and application settings
+merge_request: 28286
+author:
+type: added
diff --git a/config/routes.rb b/config/routes.rb
index c4a1f693048..eefd3bd78f2 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -53,6 +53,8 @@ Rails.application.routes.draw do
Gitlab.ee do
get '/autocomplete/project_groups' => 'autocomplete#project_groups'
+ get '/autocomplete/project_routes' => 'autocomplete#project_routes'
+ get '/autocomplete/namespace_routes' => 'autocomplete#namespace_routes'
end
# Sign up
diff --git a/db/migrate/20200325104755_add_push_rules_id_to_project_settings.rb b/db/migrate/20200325104755_add_push_rules_id_to_project_settings.rb
new file mode 100644
index 00000000000..93de736436b
--- /dev/null
+++ b/db/migrate/20200325104755_add_push_rules_id_to_project_settings.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddPushRulesIdToProjectSettings < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_column :project_settings, :push_rule_id, :bigint
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_column :project_settings, :push_rule_id
+ end
+ end
+end
diff --git a/db/migrate/20200325104756_add_push_rules_foreign_key_to_project_settings.rb b/db/migrate/20200325104756_add_push_rules_foreign_key_to_project_settings.rb
new file mode 100644
index 00000000000..41ad8d73b4c
--- /dev/null
+++ b/db/migrate/20200325104756_add_push_rules_foreign_key_to_project_settings.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddPushRulesForeignKeyToProjectSettings < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :project_settings, :push_rule_id, unique: true
+ add_concurrent_foreign_key :project_settings, :push_rules, column: :push_rule_id, on_delete: :cascade
+ end
+
+ def down
+ remove_foreign_key_if_exists :project_settings, column: :push_rule_id
+ remove_concurrent_index :project_settings, :push_rule_id
+ end
+end
diff --git a/db/migrate/20200325104833_add_push_rules_id_to_application_settings.rb b/db/migrate/20200325104833_add_push_rules_id_to_application_settings.rb
new file mode 100644
index 00000000000..4650f1734c0
--- /dev/null
+++ b/db/migrate/20200325104833_add_push_rules_id_to_application_settings.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class AddPushRulesIdToApplicationSettings < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ add_column :application_settings, :push_rule_id, :bigint
+ end
+ end
+
+ def down
+ with_lock_retries do
+ remove_column :application_settings, :push_rule_id
+ end
+ end
+end
diff --git a/db/migrate/20200325104834_add_push_rules_foreign_key_to_application_settings.rb b/db/migrate/20200325104834_add_push_rules_foreign_key_to_application_settings.rb
new file mode 100644
index 00000000000..5263250833d
--- /dev/null
+++ b/db/migrate/20200325104834_add_push_rules_foreign_key_to_application_settings.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddPushRulesForeignKeyToApplicationSettings < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :application_settings, :push_rule_id, unique: true
+ add_concurrent_foreign_key :application_settings, :push_rules, column: :push_rule_id, on_delete: :nullify
+ end
+
+ def down
+ remove_concurrent_index :application_settings, :push_rule_id
+ remove_foreign_key_if_exists :application_settings, column: :push_rule_id
+ end
+end
diff --git a/db/migrate/20200406165950_add_not_null_constraint_on_file_store_to_lfs_objects.rb b/db/migrate/20200406165950_add_not_null_constraint_on_file_store_to_lfs_objects.rb
deleted file mode 100644
index 78b5832fea4..00000000000
--- a/db/migrate/20200406165950_add_not_null_constraint_on_file_store_to_lfs_objects.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-class AddNotNullConstraintOnFileStoreToLfsObjects < ActiveRecord::Migration[6.0]
- include Gitlab::Database::MigrationHelpers
-
- CONSTRAINT_NAME = 'lfs_objects_file_store_not_null'
- DOWNTIME = false
-
- def up
- with_lock_retries do
- execute <<~SQL
- ALTER TABLE lfs_objects ADD CONSTRAINT #{CONSTRAINT_NAME} CHECK (file_store IS NOT NULL) NOT VALID;
- SQL
- end
- end
-
- def down
- with_lock_retries do
- execute <<~SQL
- ALTER TABLE lfs_objects DROP CONSTRAINT IF EXISTS #{CONSTRAINT_NAME};
- SQL
- end
- end
-end
diff --git a/db/migrate/20200406171857_add_not_null_constraint_on_file_store_to_ci_job_artifacts.rb b/db/migrate/20200406171857_add_not_null_constraint_on_file_store_to_ci_job_artifacts.rb
deleted file mode 100644
index 1d44e5c17b3..00000000000
--- a/db/migrate/20200406171857_add_not_null_constraint_on_file_store_to_ci_job_artifacts.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-class AddNotNullConstraintOnFileStoreToCiJobArtifacts < ActiveRecord::Migration[6.0]
- include Gitlab::Database::MigrationHelpers
-
- CONSTRAINT_NAME = 'ci_job_artifacts_file_store_not_null'
- DOWNTIME = false
-
- def up
- with_lock_retries do
- execute <<~SQL
- ALTER TABLE ci_job_artifacts ADD CONSTRAINT #{CONSTRAINT_NAME} CHECK (file_store IS NOT NULL) NOT VALID;
- SQL
- end
- end
-
- def down
- with_lock_retries do
- execute <<~SQL
- ALTER TABLE ci_job_artifacts DROP CONSTRAINT IF EXISTS #{CONSTRAINT_NAME};
- SQL
- end
- end
-end
diff --git a/db/migrate/20200406172135_add_not_null_constraint_on_file_store_to_uploads.rb b/db/migrate/20200406172135_add_not_null_constraint_on_file_store_to_uploads.rb
deleted file mode 100644
index aa498ba9c89..00000000000
--- a/db/migrate/20200406172135_add_not_null_constraint_on_file_store_to_uploads.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-class AddNotNullConstraintOnFileStoreToUploads < ActiveRecord::Migration[6.0]
- include Gitlab::Database::MigrationHelpers
-
- CONSTRAINT_NAME = 'uploads_store_not_null'
- DOWNTIME = false
-
- def up
- with_lock_retries do
- execute <<~SQL
- ALTER TABLE uploads ADD CONSTRAINT #{CONSTRAINT_NAME} CHECK (store IS NOT NULL) NOT VALID;
- SQL
- end
- end
-
- def down
- with_lock_retries do
- execute <<~SQL
- ALTER TABLE uploads DROP CONSTRAINT IF EXISTS #{CONSTRAINT_NAME};
- SQL
- end
- end
-end
diff --git a/db/migrate/20200408133211_add_index_on_route_path_trigram.rb b/db/migrate/20200408133211_add_index_on_route_path_trigram.rb
new file mode 100644
index 00000000000..3329252bbd3
--- /dev/null
+++ b/db/migrate/20200408133211_add_index_on_route_path_trigram.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class AddIndexOnRoutePathTrigram < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'index_routes_on_path_trigram'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :routes, :path, name: INDEX_NAME, using: :gin, opclass: { path: :gin_trgm_ops }
+ end
+
+ def down
+ remove_concurrent_index_by_name(:routes, INDEX_NAME)
+ end
+end
diff --git a/db/migrate/20200415160722_remove_not_null_lfs_objects_constraint.rb b/db/migrate/20200415160722_remove_not_null_lfs_objects_constraint.rb
new file mode 100644
index 00000000000..58cfa8e8969
--- /dev/null
+++ b/db/migrate/20200415160722_remove_not_null_lfs_objects_constraint.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class RemoveNotNullLfsObjectsConstraint < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ execute <<~SQL
+ ALTER TABLE lfs_objects DROP CONSTRAINT IF EXISTS lfs_objects_file_store_not_null;
+ SQL
+ end
+ end
+
+ def down
+ # No-op
+ end
+end
diff --git a/db/migrate/20200415161021_remove_not_null_ci_job_artifacts_constraint.rb b/db/migrate/20200415161021_remove_not_null_ci_job_artifacts_constraint.rb
new file mode 100644
index 00000000000..65430c180ce
--- /dev/null
+++ b/db/migrate/20200415161021_remove_not_null_ci_job_artifacts_constraint.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class RemoveNotNullCiJobArtifactsConstraint < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ execute <<~SQL
+ ALTER TABLE ci_job_artifacts DROP CONSTRAINT IF EXISTS ci_job_artifacts_file_store_not_null;
+ SQL
+ end
+ end
+
+ def down
+ # No-op
+ end
+end
diff --git a/db/migrate/20200415161206_remove_not_null_uploads_constraint.rb b/db/migrate/20200415161206_remove_not_null_uploads_constraint.rb
new file mode 100644
index 00000000000..23f202bac7e
--- /dev/null
+++ b/db/migrate/20200415161206_remove_not_null_uploads_constraint.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+class RemoveNotNullUploadsConstraint < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ execute <<~SQL
+ ALTER TABLE uploads DROP CONSTRAINT IF EXISTS uploads_store_not_null;
+ SQL
+ end
+ end
+
+ def down
+ # No-op
+ end
+end
diff --git a/db/post_migrate/20200325162730_schedule_backfill_push_rules_id_in_projects.rb b/db/post_migrate/20200325162730_schedule_backfill_push_rules_id_in_projects.rb
new file mode 100644
index 00000000000..b25b3365e12
--- /dev/null
+++ b/db/post_migrate/20200325162730_schedule_backfill_push_rules_id_in_projects.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+class ScheduleBackfillPushRulesIdInProjects < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ disable_ddl_transaction!
+
+ MIGRATION = 'BackfillPushRulesIdInProjects'.freeze
+ BATCH_SIZE = 1_000
+
+ class PushRules < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'push_rules'
+ end
+
+ def up
+ # Update one record that is connected to the instance
+ value_to_be_updated_to = ScheduleBackfillPushRulesIdInProjects::PushRules.find_by(is_sample: true)&.id
+
+ execute "UPDATE application_settings SET push_rule_id = #{value_to_be_updated_to}" if value_to_be_updated_to
+
+ ApplicationSetting.expire
+
+ queue_background_migration_jobs_by_range_at_intervals(ScheduleBackfillPushRulesIdInProjects::PushRules,
+ MIGRATION,
+ 5.minutes,
+ batch_size: BATCH_SIZE)
+ end
+
+ def down
+ execute "UPDATE application_settings SET push_rule_id = NULL"
+
+ ApplicationSetting.expire
+ end
+end
diff --git a/db/post_migrate/20200406193427_add_index_to_issues_health_status.rb b/db/post_migrate/20200406193427_add_index_to_issues_health_status.rb
new file mode 100644
index 00000000000..83baf5b6d75
--- /dev/null
+++ b/db/post_migrate/20200406193427_add_index_to_issues_health_status.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+class AddIndexToIssuesHealthStatus < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ INDEX_NAME = 'idx_issues_on_health_status_not_null'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index(
+ :issues,
+ :health_status,
+ where: 'health_status IS NOT NULL',
+ name: INDEX_NAME
+ )
+ end
+
+ def down
+ remove_concurrent_index_by_name(:issues, INDEX_NAME)
+ end
+end
diff --git a/db/structure.sql b/db/structure.sql
index 28b0010eb30..56efa356123 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -399,7 +399,8 @@ CREATE TABLE public.application_settings (
namespace_storage_size_limit bigint DEFAULT 0 NOT NULL,
seat_link_enabled boolean DEFAULT true NOT NULL,
container_expiration_policies_enable_historic_entries boolean DEFAULT false NOT NULL,
- issues_create_limit integer DEFAULT 300 NOT NULL
+ issues_create_limit integer DEFAULT 300 NOT NULL,
+ push_rule_id bigint
);
CREATE SEQUENCE public.application_settings_id_seq
@@ -5025,7 +5026,8 @@ ALTER SEQUENCE public.project_repository_states_id_seq OWNED BY public.project_r
CREATE TABLE public.project_settings (
project_id integer NOT NULL,
created_at timestamp with time zone NOT NULL,
- updated_at timestamp with time zone NOT NULL
+ updated_at timestamp with time zone NOT NULL,
+ push_rule_id bigint
);
CREATE TABLE public.project_statistics (
@@ -7697,9 +7699,6 @@ ALTER TABLE ONLY public.ci_daily_report_results
ALTER TABLE ONLY public.ci_group_variables
ADD CONSTRAINT ci_group_variables_pkey PRIMARY KEY (id);
-ALTER TABLE public.ci_job_artifacts
- ADD CONSTRAINT ci_job_artifacts_file_store_not_null CHECK ((file_store IS NOT NULL)) NOT VALID;
-
ALTER TABLE ONLY public.ci_job_artifacts
ADD CONSTRAINT ci_job_artifacts_pkey PRIMARY KEY (id);
@@ -8057,9 +8056,6 @@ ALTER TABLE ONLY public.ldap_group_links
ALTER TABLE ONLY public.lfs_file_locks
ADD CONSTRAINT lfs_file_locks_pkey PRIMARY KEY (id);
-ALTER TABLE public.lfs_objects
- ADD CONSTRAINT lfs_objects_file_store_not_null CHECK ((file_store IS NOT NULL)) NOT VALID;
-
ALTER TABLE ONLY public.lfs_objects
ADD CONSTRAINT lfs_objects_pkey PRIMARY KEY (id);
@@ -8453,9 +8449,6 @@ ALTER TABLE ONLY public.u2f_registrations
ALTER TABLE ONLY public.uploads
ADD CONSTRAINT uploads_pkey PRIMARY KEY (id);
-ALTER TABLE public.uploads
- ADD CONSTRAINT uploads_store_not_null CHECK ((store IS NOT NULL)) NOT VALID;
-
ALTER TABLE ONLY public.user_agent_details
ADD CONSTRAINT user_agent_details_pkey PRIMARY KEY (id);
@@ -8573,6 +8566,8 @@ CREATE UNIQUE INDEX design_management_designs_versions_uniqueness ON public.desi
CREATE INDEX design_user_mentions_on_design_id_and_note_id_index ON public.design_user_mentions USING btree (design_id, note_id);
+CREATE INDEX dev_index_route_on_path_trigram ON public.routes USING gin (path public.gin_trgm_ops);
+
CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_and_note_id_index ON public.epic_user_mentions USING btree (epic_id, note_id);
CREATE UNIQUE INDEX epic_user_mentions_on_epic_id_index ON public.epic_user_mentions USING btree (epic_id) WHERE (note_id IS NULL);
@@ -8585,6 +8580,8 @@ CREATE UNIQUE INDEX idx_environment_merge_requests_unique_index ON public.deploy
CREATE INDEX idx_geo_con_rep_updated_events_on_container_repository_id ON public.geo_container_repository_updated_events USING btree (container_repository_id);
+CREATE INDEX idx_issues_on_health_status_not_null ON public.issues USING btree (health_status) WHERE (health_status IS NOT NULL);
+
CREATE INDEX idx_issues_on_project_id_and_created_at_and_id_and_state_id ON public.issues USING btree (project_id, created_at, id, state_id);
CREATE INDEX idx_issues_on_project_id_and_due_date_and_id_and_state_id ON public.issues USING btree (project_id, due_date, id, state_id) WHERE (due_date IS NOT NULL);
@@ -8677,6 +8674,8 @@ CREATE INDEX index_application_settings_on_file_template_project_id ON public.ap
CREATE INDEX index_application_settings_on_instance_administrators_group_id ON public.application_settings USING btree (instance_administrators_group_id);
+CREATE UNIQUE INDEX index_application_settings_on_push_rule_id ON public.application_settings USING btree (push_rule_id);
+
CREATE INDEX index_application_settings_on_usage_stats_set_by_user_id ON public.application_settings USING btree (usage_stats_set_by_user_id);
CREATE INDEX index_applicationsettings_on_instance_administration_project_id ON public.application_settings USING btree (instance_administration_project_id);
@@ -9891,6 +9890,8 @@ CREATE INDEX index_project_repositories_on_shard_id ON public.project_repositori
CREATE UNIQUE INDEX index_project_repository_states_on_project_id ON public.project_repository_states USING btree (project_id);
+CREATE UNIQUE INDEX index_project_settings_on_push_rule_id ON public.project_settings USING btree (push_rule_id);
+
CREATE INDEX index_project_statistics_on_namespace_id ON public.project_statistics USING btree (namespace_id);
CREATE UNIQUE INDEX index_project_statistics_on_project_id ON public.project_statistics USING btree (project_id);
@@ -10117,6 +10118,8 @@ CREATE UNIQUE INDEX index_routes_on_path ON public.routes USING btree (path);
CREATE INDEX index_routes_on_path_text_pattern_ops ON public.routes USING btree (path varchar_pattern_ops);
+CREATE INDEX index_routes_on_path_trigram ON public.routes USING gin (path public.gin_trgm_ops);
+
CREATE UNIQUE INDEX index_routes_on_source_type_and_source_id ON public.routes USING btree (source_type, source_id);
CREATE INDEX index_saml_providers_on_group_id ON public.saml_providers USING btree (group_id);
@@ -10636,6 +10639,9 @@ ALTER TABLE ONLY public.epics
ALTER TABLE ONLY public.ci_pipelines
ADD CONSTRAINT fk_3d34ab2e06 FOREIGN KEY (pipeline_schedule_id) REFERENCES public.ci_pipeline_schedules(id) ON DELETE SET NULL;
+ALTER TABLE ONLY public.project_settings
+ ADD CONSTRAINT fk_413a953e20 FOREIGN KEY (push_rule_id) REFERENCES public.push_rules(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY public.ci_pipeline_schedule_variables
ADD CONSTRAINT fk_41c35fda51 FOREIGN KEY (pipeline_schedule_id) REFERENCES public.ci_pipeline_schedules(id) ON DELETE CASCADE;
@@ -10687,6 +10693,9 @@ ALTER TABLE ONLY public.merge_requests
ALTER TABLE ONLY public.ci_builds
ADD CONSTRAINT fk_6661f4f0e8 FOREIGN KEY (resource_group_id) REFERENCES public.ci_resource_groups(id) ON DELETE SET NULL;
+ALTER TABLE ONLY public.application_settings
+ ADD CONSTRAINT fk_693b8795e4 FOREIGN KEY (push_rule_id) REFERENCES public.push_rules(id) ON DELETE SET NULL;
+
ALTER TABLE ONLY public.merge_requests
ADD CONSTRAINT fk_6a5165a692 FOREIGN KEY (milestone_id) REFERENCES public.milestones(id) ON DELETE SET NULL;
@@ -13113,9 +13122,14 @@ COPY "schema_migrations" (version) FROM STDIN;
20200323134519
20200324093258
20200324115359
+20200325104755
+20200325104756
+20200325104833
+20200325104834
20200325111432
20200325152327
20200325160952
+20200325162730
20200325183636
20200326114443
20200326122700
@@ -13143,15 +13157,17 @@ COPY "schema_migrations" (version) FROM STDIN;
20200406102111
20200406102120
20200406135648
-20200406165950
-20200406171857
-20200406172135
20200406192059
+20200406193427
20200407094005
20200407094923
20200408110856
+20200408133211
20200408153842
20200408175424
20200409211607
+20200415160722
+20200415161021
+20200415161206
\.
diff --git a/doc/README.md b/doc/README.md
index 6b863436ce2..427967039cd 100644
--- a/doc/README.md
+++ b/doc/README.md
@@ -367,7 +367,7 @@ The following documentation relates to the DevOps **Secure** stage:
| [Group Security Dashboard](user/application_security/security_dashboard/index.md#group-security-dashboard) **(ULTIMATE)** | View vulnerabilities in all the projects in a group and its subgroups. |
| [Instance Security Dashboard](user/application_security/security_dashboard/index.md#instance-security-dashboard) **(ULTIMATE)** | View vulnerabilities in all the projects you're interested in. |
| [License Compliance](user/compliance/license_compliance/index.md) **(ULTIMATE)** | Search your project's dependencies for their licenses. |
-| [Pipeline Security Dashboard](user/application_security/security_dashboard/index.md#pipeline-security-dashboard) **(ULTIMATE)** | View the security reports for your project's pipelines. |
+| [Pipeline Security](user/application_security/security_dashboard/index.md#pipeline-security) **(ULTIMATE)** | View the security reports for your project's pipelines. |
| [Project Security Dashboard](user/application_security/security_dashboard/index.md#project-security-dashboard) **(ULTIMATE)** | View the latest security reports for your project. |
| [Static Application Security Testing (SAST)](user/application_security/sast/index.md) **(ULTIMATE)** | Analyze source code for known vulnerabilities. |
diff --git a/doc/administration/geo/replication/datatypes.md b/doc/administration/geo/replication/datatypes.md
index a1f511fe2a5..3431df3ed1f 100644
--- a/doc/administration/geo/replication/datatypes.md
+++ b/doc/administration/geo/replication/datatypes.md
@@ -145,6 +145,7 @@ successfully, you must replicate their data using some other means.
| [Maven Repository](../../../user/packages/maven_repository/index.md) | [No](https://gitlab.com/groups/gitlab-org/-/epics/2346) | No | |
| [Conan Repository](../../../user/packages/conan_repository/index.md) | [No](https://gitlab.com/groups/gitlab-org/-/epics/2346) | No | |
| [NuGet Repository](../../../user/packages/nuget_repository/index.md) | [No](https://gitlab.com/groups/gitlab-org/-/epics/2346) | No | |
+| [PyPi Repository](../../../user/packages/pypi_repository/index.md) | [No](https://gitlab.com/groups/gitlab-org/-/epics/2554) | No | |
| [External merge request diffs](../../merge_request_diffs.md) | [No](https://gitlab.com/gitlab-org/gitlab/issues/33817) | No | |
| Content in object storage | **Yes** | No | |
diff --git a/doc/administration/gitaly/praefect.md b/doc/administration/gitaly/praefect.md
index d1d0c358dc6..c16d705db1d 100644
--- a/doc/administration/gitaly/praefect.md
+++ b/doc/administration/gitaly/praefect.md
@@ -697,6 +697,31 @@ during a failover. Follow issue
It is likely that we will implement support for Consul, and a cloud native
strategy in the future.
+## Identifying Impact of a Primary Node Failure
+
+When a primary Gitaly node fails, there is a chance of dataloss. Dataloss can occur if there were outstanding replication jobs the secondaries did not manage to process before the failure. The Praefect `dataloss` subcommand helps identify these cases by counting the number of dead replication jobs for each repository within a given timeframe.
+
+```shell
+sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dataloss -from <rfc3339-time> -to <rfc3339-time>
+```
+
+If the timeframe is not specified, dead replication jobs from the last six hours are counted:
+
+```shell
+sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dataloss
+
+Failed replication jobs between [2020-01-02 00:00:00 +0000 UTC, 2020-01-02 06:00:00 +0000 UTC):
+example/repository-1: 1 jobs
+example/repository-2: 4 jobs
+example/repository-3: 2 jobs
+```
+
+To specify a timeframe in UTC, run:
+
+```shell
+sudo /opt/gitlab/embedded/bin/praefect -config /var/opt/gitlab/praefect/config.toml dataloss -from 2020-01-02T00:00:00+00:00 -to 2020-01-02T00:02:00+00:00
+```
+
## Backend Node Recovery
When a Praefect backend node fails and is no longer able to
diff --git a/doc/administration/packages/index.md b/doc/administration/packages/index.md
index d14726d33de..f826741d66f 100644
--- a/doc/administration/packages/index.md
+++ b/doc/administration/packages/index.md
@@ -8,6 +8,7 @@ The Packages feature allows GitLab to act as a repository for the following:
| Software repository | Description | Available in GitLab version |
| ------------------- | ----------- | --------------------------- |
+| [PyPi Repository](../../user/packages/pypi_repository/index.md) | The GitLab PyPi Repository enables every project in GitLab to have its own space to store [PyPi](https://pypi.org/) packages. | 12.10+ |
| [NuGet Repository](../../user/packages/nuget_repository/index.md) | The GitLab NuGet Repository enables every project in GitLab to have its own space to store [NuGet](https://www.nuget.org/) packages. | 12.8+ |
| [Conan Repository](../../user/packages/conan_repository/index.md) | The GitLab Conan Repository enables every project in GitLab to have its own space to store [Conan](https://conan.io/) packages. | 12.4+ |
| [Maven Repository](../../user/packages/maven_repository/index.md) | The GitLab Maven Repository enables every project in GitLab to have its own space to store [Maven](https://maven.apache.org/) packages. | 11.3+ |
diff --git a/doc/api/packages.md b/doc/api/packages.md
index 31fc2863708..8671de006d2 100644
--- a/doc/api/packages.md
+++ b/doc/api/packages.md
@@ -20,7 +20,7 @@ GET /projects/:id/packages
| `id` | integer/string | yes | ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) |
| `order_by`| string | no | The field to use as order. One of `created_at` (default), `name`, `version`, or `type`. |
| `sort` | string | no | The direction of the order, either `asc` (default) for ascending order or `desc` for descending order. |
-| `package_type` | string | no | Filter the returned packages by type. One of `conan`, `maven`, `npm` or `nuget`. (_Introduced in GitLab 12.9_)
+| `package_type` | string | no | Filter the returned packages by type. One of `conan`, `maven`, `npm`, `pypi` or `nuget`. (_Introduced in GitLab 12.9_)
| `package_name` | string | no | Filter the project packages with a fuzzy search by name. (_Introduced in GitLab 12.9_)
```shell
@@ -67,7 +67,7 @@ GET /groups/:id/packages
| `exclude_subgroups` | boolean | false | If the parameter is included as true, packages from projects from subgroups are not listed. Default is `false`. |
| `order_by`| string | no | The field to use as order. One of `created_at` (default), `name`, `version`, `type`, or `project_path`. |
| `sort` | string | no | The direction of the order, either `asc` (default) for ascending order or `desc` for descending order. |
-| `package_type` | string | no | Filter the returned packages by type. One of `conan`, `maven`, `npm` or `nuget`. (_Introduced in GitLab 12.9_) |
+| `package_type` | string | no | Filter the returned packages by type. One of `conan`, `maven`, `npm`, `pypi` or `nuget`. (_Introduced in GitLab 12.9_) |
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/groups/:id/packages?exclude_subgroups=true
diff --git a/doc/ci/pipelines/settings.md b/doc/ci/pipelines/settings.md
index bddf64f397e..7ac9ba6a7dd 100644
--- a/doc/ci/pipelines/settings.md
+++ b/doc/ci/pipelines/settings.md
@@ -160,7 +160,7 @@ This also determines the visibility of these related features:
- Job output logs
- Job artifacts
-- The [pipeline security dashboard](../../user/application_security/security_dashboard/index.md#pipeline-security-dashboard) **(ULTIMATE)**
+- The [pipeline security dashboard](../../user/application_security/security_dashboard/index.md#pipeline-security) **(ULTIMATE)**
If **Public pipelines** is enabled (default):
diff --git a/doc/install/aws/index.md b/doc/install/aws/index.md
index d510dff82dd..48de5e274b0 100644
--- a/doc/install/aws/index.md
+++ b/doc/install/aws/index.md
@@ -4,8 +4,6 @@ type: howto
# Installing GitLab HA on Amazon Web Services (AWS)
-DANGER: **Danger:** This guide is under review and the steps below will be revised and updated in due time. For more detail, please see [this epic](https://gitlab.com/groups/gitlab-org/-/epics/912).
-
This page offers a walkthrough of a common HA (Highly Available) configuration
for GitLab on AWS. You should customize it to accommodate your needs.
diff --git a/doc/user/admin_area/settings/usage_statistics.md b/doc/user/admin_area/settings/usage_statistics.md
index f28bab6ad86..7869f7de1b6 100644
--- a/doc/user/admin_area/settings/usage_statistics.md
+++ b/doc/user/admin_area/settings/usage_statistics.md
@@ -222,6 +222,7 @@ but commented out to help encourage others to add to it in the future. -->
|issues_with_associated_zoom_link|counts||
|issues_using_zoom_quick_actions|counts||
|issues_with_embedded_grafana_charts_approx|counts||
+|issues_with_health_status|counts||
|keys|counts||
|label_lists|counts||
|lfs_objects|counts||
diff --git a/doc/user/application_security/container_scanning/index.md b/doc/user/application_security/container_scanning/index.md
index 27b22fb925c..85d2ac6e2e0 100644
--- a/doc/user/application_security/container_scanning/index.md
+++ b/doc/user/application_security/container_scanning/index.md
@@ -212,11 +212,46 @@ If you want to whitelist specific vulnerabilities, you'll need to:
### Running Container Scanning in an offline environment
-Container Scanning can be executed on an offline GitLab Ultimate installation by using the following process:
+For self-managed GitLab instances in an environment with limited, restricted, or intermittent access
+to external resources through the internet, some adjustments are required for the Container Scanning job to
+successfully run. For more information, see [Offline environments](../offline_deployments/index.md).
-1. Host the following Docker images on a [local Docker container registry](../../packages/container_registry/index.md):
- - [arminc/clair-db vulnerabilities database](https://hub.docker.com/r/arminc/clair-db)
- - GitLab klar analyzer: `registry.gitlab.com/gitlab-org/security-products/analyzers/klar`
+#### Requirements for offline Container Scanning
+
+To use Container Scanning in an offline environment, you need:
+
+- GitLab Runner with the [`docker` or `kubernetes` executor](#requirements).
+- To configure a local Docker Container Registry with copies of the Container Scanning [analyzer](https://gitlab.com/gitlab-org/security-products/analyzers/klar) images, found in the [Container Scanning container registry](https://gitlab.com/gitlab-org/security-products/analyzers/klar/container_registry).
+
+NOTE: **Note:**
+GitLab Runner has a [default `pull policy` of `always`](https://docs.gitlab.com/runner/executors/docker.html#using-the-always-pull-policy),
+meaning the runner may try to pull remote images even if a local copy is available. Set GitLab
+Runner's [`pull_policy` to `if-not-present`](https://docs.gitlab.com/runner/executors/docker.html#using-the-if-not-present-pull-policy)
+in an offline environment if you prefer using only locally available Docker images.
+
+#### Make GitLab Container Scanning analyzer images available inside your Docker registry
+
+For Container Scanning, import and host the following images from `registry.gitlab.com` to your
+offline [local Docker container registry](../../packages/container_registry/index.md):
+
+- [arminc/clair-db vulnerabilities database](https://hub.docker.com/r/arminc/clair-db)
+- GitLab klar analyzer: `registry.gitlab.com/gitlab-org/security-products/analyzers/klar`
+
+The process for importing Docker images into a local offline Docker registry depends on
+**your network security policy**. Please consult your IT staff to find an accepted and approved
+process by which external resources can be imported or temporarily accessed.
+
+Note that these scanners are [updated periodically](../index.md#maintenance-and-update-of-the-vulnerabilities-database)
+with new definitions, so consider if you are able to make periodic updates yourself.
+You can read more specific steps on how to do this [below](#automating-container-scanning-vulnerability-database-updates-with-a-pipeline).
+
+For details on saving and transporting Docker images as a file, see Docker's documentation on
+[`docker save`](https://docs.docker.com/engine/reference/commandline/save/), [`docker load`](https://docs.docker.com/engine/reference/commandline/load/),
+[`docker export`](https://docs.docker.com/engine/reference/commandline/export/), and [`docker import`](https://docs.docker.com/engine/reference/commandline/import/).
+
+#### Set Container Scanning CI job variables to use local Container Scanner analyzers
+
+Container Scanning can be executed on an offline GitLab Ultimate installation using the following process:
1. [Override the container scanning template](#overriding-the-container-scanning-template) in your `.gitlab-ci.yml` file to refer to the Docker images hosted on your local Docker container registry:
@@ -234,7 +269,12 @@ Container Scanning can be executed on an offline GitLab Ultimate installation by
self-signed certificate, then you must set `DOCKER_INSECURE: "true"` in the above
`container_scanning` section of your `.gitlab-ci.yml`.
-It may be worthwhile to set up a [scheduled pipeline](../../../ci/pipelines/schedules.md) to automatically build a new version of the vulnerabilities database on a preset schedule. You can use the following `.gitlab-yml.ci` as a template:
+#### Automating Container Scanning vulnerability database updates with a pipeline
+
+It can be worthwhile to set up a [scheduled pipeline](../../../ci/pipelines/schedules.md) to
+automatically build a new version of the vulnerabilities database on a preset schedule. Automating
+this with a pipeline means you won't have to do it manually each time. You can use the following
+`.gitlab-yml.ci` as a template:
```yaml
image: docker:stable
diff --git a/doc/user/application_security/security_dashboard/index.md b/doc/user/application_security/security_dashboard/index.md
index 1eef6b9b696..50104d4c5be 100644
--- a/doc/user/application_security/security_dashboard/index.md
+++ b/doc/user/application_security/security_dashboard/index.md
@@ -34,13 +34,13 @@ To use the instance, group, project, or pipeline security dashboard:
1. [GitLab Runner](https://docs.gitlab.com/runner/) 11.5 or newer must be used.
If you're using the shared Runners on GitLab.com, this is already the case.
-## Pipeline Security Dashboard
+## Pipeline Security
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/13496) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.3.
-At the pipeline level, the Security Dashboard displays the vulnerabilities present in the branch of the project the pipeline was run against.
+At the pipeline level, the Security section displays the vulnerabilities present in the branch of the project the pipeline was run against.
-Visit the page for any pipeline which has run any of the [supported reports](#supported-reports). Click the **Security** tab to view the Security Dashboard.
+Visit the page for any pipeline which has run any of the [supported reports](#supported-reports). Click the **Security** tab to view the Security findings.
![Pipeline Security Dashboard](img/pipeline_security_dashboard_v12_6.png)
diff --git a/doc/user/clusters/applications.md b/doc/user/clusters/applications.md
index cc7b5dcd5fb..df220f556a9 100644
--- a/doc/user/clusters/applications.md
+++ b/doc/user/clusters/applications.md
@@ -553,12 +553,12 @@ To enable Fluentd:
1. Navigate to **{cloud-gear}** **Operations > Kubernetes** and click
**Applications**. You will be prompted to enter a host, port and protocol
where the WAF logs will be sent to via syslog.
-1. Provide the host domain name or URL in **SIEM URL or Host**.
+1. Provide the host domain name or URL in **SIEM Hostname**.
1. Provide the host port number in **SIEM Port**.
1. Select a **SIEM Protocol**.
1. Check **Send ModSecurity Logs**. If you do not select this checkbox, the **Install**
button is disabled.
-1. Click **Install**.
+1. Click **Save changes**.
![Fluentd input fields](img/fluentd_v12_10.png)
diff --git a/doc/user/clusters/img/fluentd_v12_10.png b/doc/user/clusters/img/fluentd_v12_10.png
index 7593f99ab51..e8c5c832020 100644
--- a/doc/user/clusters/img/fluentd_v12_10.png
+++ b/doc/user/clusters/img/fluentd_v12_10.png
Binary files differ
diff --git a/doc/user/compliance/license_compliance/index.md b/doc/user/compliance/license_compliance/index.md
index 9fcc9acf5ea..57c37158799 100644
--- a/doc/user/compliance/license_compliance/index.md
+++ b/doc/user/compliance/license_compliance/index.md
@@ -324,6 +324,9 @@ process:
Additional [configuration](#using-private-maven-repos) may be needed for connecting to private Maven
repositories.
+Exact name matches are required for [project policies](#project-policies-for-license-compliance)
+when running in an offline environment ([see related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/212388)).
+
## Project policies for License Compliance
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5940) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.4.
diff --git a/doc/user/group/img/group_activity_analytics_v12_10.png b/doc/user/group/img/group_activity_analytics_v12_10.png
new file mode 100644
index 00000000000..2202db15d2f
--- /dev/null
+++ b/doc/user/group/img/group_activity_analytics_v12_10.png
Binary files differ
diff --git a/doc/user/group/index.md b/doc/user/group/index.md
index fdcc4105620..cd92a785370 100644
--- a/doc/user/group/index.md
+++ b/doc/user/group/index.md
@@ -226,6 +226,24 @@ To change this setting for a specific group:
To change this setting globally, see [Default project creation protection](../admin_area/settings/visibility_and_access_controls.md#default-project-creation-protection).
+## Viewing group details
+
+A group's **Details** page includes tabs for:
+
+- Subgroups and projects.
+- Shared projects.
+- Archived projects.
+
+As [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/207164) in GitLab [Starter](https://about.gitlab.com/pricing/) 12.10 as a [beta feature](https://about.gitlab.com/handbook/product/#beta), it also shows the number of the following items created in the last 90 days: **(STARTER)**
+
+- Merge requests.
+- Issues.
+- Members.
+
+These Group Activity Analytics can be enabled with the `group_activity_analytics` [feature flag](../../development/feature_flags/development.md#enabling-a-feature-flag-in-development).
+
+![Recent Group Activity](img/group_activity_analytics_v12_10.png)
+
## Viewing group activity
A group's **Activity** page displays the most recent actions taken in a group, including:
diff --git a/doc/user/packages/index.md b/doc/user/packages/index.md
index 78ddc06173c..66ca4295782 100644
--- a/doc/user/packages/index.md
+++ b/doc/user/packages/index.md
@@ -14,6 +14,7 @@ The Packages feature allows GitLab to act as a repository for the following:
| [Maven Repository](maven_repository/index.md) **(PREMIUM)** | The GitLab Maven Repository enables every project in GitLab to have its own space to store [Maven](https://maven.apache.org/) packages. | 11.3+ |
| [NPM Registry](npm_registry/index.md) **(PREMIUM)** | The GitLab NPM Registry enables every project in GitLab to have its own space to store [NPM](https://www.npmjs.com/) packages. | 11.7+ |
| [NuGet Repository](nuget_repository/index.md) **(PREMIUM)** | The GitLab NuGet Repository will enable every project in GitLab to have its own space to store [NuGet](https://www.nuget.org/) packages. | 12.8+ |
+| [PyPi Repository](pypi_repository/index.md) **(PREMIUM)** | The GitLab PyPi Repository will enable every project in GitLab to have its own space to store [PyPi](https://pypi.org/) packages. | 12.10+ |
## Suggested contributions
diff --git a/doc/user/packages/pypi_repository/index.md b/doc/user/packages/pypi_repository/index.md
new file mode 100644
index 00000000000..11d7b828813
--- /dev/null
+++ b/doc/user/packages/pypi_repository/index.md
@@ -0,0 +1,84 @@
+# GitLab PyPi Repository **(PREMIUM)**
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/208747) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.10.
+
+With the GitLab PyPi Repository, every project can have its own space to store PyPi packages.
+
+The GitLab PyPi Repository works with:
+
+- [pip](https://pypi.org/project/pip/)
+- [twine](https://pypi.org/project/twine/)
+
+## Setting up your development environment
+
+You will need a recent version of [pip](https://pypi.org/project/pip/) and [twine](https://pypi.org/project/twine/).
+
+## Enabling the PyPi Repository
+
+NOTE: **Note:**
+This option is available only if your GitLab administrator has
+[enabled support for the Package Registry](../../../administration/packages/index.md). **(PREMIUM ONLY)**
+
+After the PyPi Repository is enabled, it will be available for all new projects
+by default. To enable it for existing projects, or if you want to disable it:
+
+1. Navigate to your project's **Settings > General > Permissions**.
+1. Find the Packages feature and enable or disable it.
+1. Click on **Save changes** for the changes to take effect.
+
+You should then be able to see the **Packages** section on the left sidebar.
+
+## Adding the GitLab PyPi Repository as a source
+
+You will need the following:
+
+- A personal access token. You can generate a [personal access token](../../../user/profile/personal_access_tokens.md) with the scope set to `api` for repository authentication.
+- A suitable name for your source.
+- Your project ID which can be found on the home page of your project.
+
+Edit your `~/.pypirc` file and add the following:
+
+```ini
+[gitlab]
+repository = https://gitlab.com/api/v4/projects/<project_id>/packages/pypi
+username = __token__
+password = <your personal access token>
+```
+
+## Uploading packages
+
+When uploading packages, note that:
+
+- The maximum allowed size is 50 Megabytes.
+- If you upload the same package with the same version multiple times, each consecutive upload
+ is saved as a separate file. When installing a package, GitLab will serve the most recent file.
+- When uploading packages to GitLab, they will not be displayed in the packages UI of your project
+ immediately. It can take up to 10 minutes to process a package.
+
+### Upload packages with Twine
+
+This section assumes that your project is properly built and you already [created a PyPi package with setuptools](https://packaging.python.org/tutorials/packaging-projects/).
+Upload your package using the following command:
+
+```shell
+python -m twine upload --repository <source_name> dist/<package_file>
+```
+
+Where:
+
+- `<package_file>` is your package filename, ending in `.tar.gz` or `.whl`.
+- `<source_name>` is the [source name used during setup](#adding-the-gitlab-pypi-repository-as-a-source).
+
+## Install packages
+
+Install the latest version of a package using the following command:
+
+```shell
+pip install --index-url https://__token__:<personal_access_token>@gitlab.com/api/v4/projects/<project_id>/packages/pypi/simple --no-deps <package_name>
+```
+
+Where:
+
+- `<package_name>` is the package name.
+- `<personal_access_token>` is your personal access token.
+- `<project_id>` is your project id number.
diff --git a/doc/user/permissions.md b/doc/user/permissions.md
index 59867f492b8..8bb2c82e136 100644
--- a/doc/user/permissions.md
+++ b/doc/user/permissions.md
@@ -85,6 +85,7 @@ The following table depicts the various user permission levels in a project.
| View project statistics | | ✓ | ✓ | ✓ | ✓ |
| View Error Tracking list | | ✓ | ✓ | ✓ | ✓ |
| Create new merge request | | ✓ | ✓ | ✓ | ✓ |
+| View requirements **(ULTIMATE)** | | ✓ | ✓ | ✓ | ✓ |
| Pull from [Conan repository](packages/conan_repository/index.md), [Maven repository](packages/maven_repository/index.md), or [NPM registry](packages/npm_registry/index.md) **(PREMIUM)** | | ✓ | ✓ | ✓ | ✓ |
| Publish to [Conan repository](packages/conan_repository/index.md), [Maven repository](packages/maven_repository/index.md), or [NPM registry](packages/npm_registry/index.md) **(PREMIUM)** | | | ✓ | ✓ | ✓ |
| Upload [Design Management](project/issues/design_management.md) files | | | ✓ | ✓ | ✓ |
@@ -118,6 +119,7 @@ The following table depicts the various user permission levels in a project.
| Create and edit wiki pages | | | ✓ | ✓ | ✓ |
| Rewrite/remove Git tags | | | ✓ | ✓ | ✓ |
| Manage Feature Flags **(PREMIUM)** | | | ✓ | ✓ | ✓ |
+| Manage requirements **(ULTIMATE)** | | | ✓ | ✓ | ✓ |
| Use environment terminals | | | | ✓ | ✓ |
| Run Web IDE's Interactive Web Terminals **(ULTIMATE ONLY)** | | | | ✓ | ✓ |
| Add new team members | | | | ✓ | ✓ |
diff --git a/doc/user/project/index.md b/doc/user/project/index.md
index 99050f823c5..56df93e1dae 100644
--- a/doc/user/project/index.md
+++ b/doc/user/project/index.md
@@ -102,6 +102,7 @@ When you create a project in GitLab, you'll have access to a large number of
- [Code owners](code_owners.md): specify code owners for certain files **(STARTER)**
- [License Compliance](../compliance/license_compliance/index.md): approve and blacklist licenses for projects. **(ULTIMATE)**
- [Dependency List](../application_security/dependency_list/index.md): view project dependencies. **(ULTIMATE)**
+- [Requirements](requirements/index.md): Requirements allow you to create criteria to check your products against. **(ULTIMATE)**
### Project integrations
diff --git a/doc/user/project/requirements/img/requirement_archive_view_v12_10.png b/doc/user/project/requirements/img/requirement_archive_view_v12_10.png
new file mode 100644
index 00000000000..b3a52caba6c
--- /dev/null
+++ b/doc/user/project/requirements/img/requirement_archive_view_v12_10.png
Binary files differ
diff --git a/doc/user/project/requirements/img/requirement_create_view_v12_10.png b/doc/user/project/requirements/img/requirement_create_view_v12_10.png
new file mode 100644
index 00000000000..ecb08fe8a8b
--- /dev/null
+++ b/doc/user/project/requirements/img/requirement_create_view_v12_10.png
Binary files differ
diff --git a/doc/user/project/requirements/img/requirement_edit_save_v12_10.png b/doc/user/project/requirements/img/requirement_edit_save_v12_10.png
new file mode 100644
index 00000000000..6cf7db361b8
--- /dev/null
+++ b/doc/user/project/requirements/img/requirement_edit_save_v12_10.png
Binary files differ
diff --git a/doc/user/project/requirements/img/requirement_edit_view_v12_10.png b/doc/user/project/requirements/img/requirement_edit_view_v12_10.png
new file mode 100644
index 00000000000..5251e7eae1e
--- /dev/null
+++ b/doc/user/project/requirements/img/requirement_edit_view_v12_10.png
Binary files differ
diff --git a/doc/user/project/requirements/img/requirements_archived_list_view_v12_10.png b/doc/user/project/requirements/img/requirements_archived_list_view_v12_10.png
new file mode 100644
index 00000000000..a5487b46894
--- /dev/null
+++ b/doc/user/project/requirements/img/requirements_archived_list_view_v12_10.png
Binary files differ
diff --git a/doc/user/project/requirements/img/requirements_list_view_v12_10.png b/doc/user/project/requirements/img/requirements_list_view_v12_10.png
new file mode 100644
index 00000000000..cee1f3781f6
--- /dev/null
+++ b/doc/user/project/requirements/img/requirements_list_view_v12_10.png
Binary files differ
diff --git a/doc/user/project/requirements/index.md b/doc/user/project/requirements/index.md
new file mode 100644
index 00000000000..8f4ec7bbbed
--- /dev/null
+++ b/doc/user/project/requirements/index.md
@@ -0,0 +1,67 @@
+---
+type: reference, howto
+---
+
+# Requirements **(ULTIMATE)**
+
+> [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/2703) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.10.
+
+Requirements allow you to create criteria to check your products against. They
+can be based on users, stakeholders, system, software, or anything else you
+find important to capture.
+
+![requirements list view](img/requirements_list_view_v12_10.png)
+
+## Create a requirement
+
+A paginated list of requirements is available in each project, and there you
+can create a new requirement.
+
+To create a requirement:
+
+1. From your project page, go to **{requirements}** **Requirements**.
+1. Click **New requirement**.
+1. Enter a descriptive title and click **Create requirement**.
+
+You will see the newly created requirement on the top of the list, as the requirements
+list is sorted by creation date in descending order.
+
+![requirement create view](img/requirement_create_view_v12_10.png)
+
+## Edit a requirement
+
+You can edit a requirement (if you have the necessary privileges) from the requirements
+list page.
+
+To edit a requirement:
+
+1. From the requirements list, click the **Edit** (**{pencil}**) button.
+1. Update the title in text input field.
+1. Click **Save changes**.
+
+![requirement edit view](img/requirement_edit_view_v12_10.png)
+
+The requirements list shows the new title immediately.
+
+![requirement edit saved](img/requirement_edit_save_v12_10.png)
+
+## Archive a requirement
+
+You can archive an open requirement (if you have the necessary privileges) while
+you're in the **Open** tab.
+
+From the requirements list page, click the **Archive** (**{archive}**) button.
+
+![requirement archive view](img/requirement_archive_view_v12_10.png)
+
+As soon as a requirement is archived, it no longer appears in the **Open** tab.
+
+## Reopen a requirement
+
+You can view the list of archived requirements in the **Archived** tab.
+
+![archived requirements list](img/requirements_archived_list_view_v12_10.png)
+
+To reopen an archived requirement, click the **Reopen** button.
+
+As soon as a requirement is reopened, it no longer appears in the **Archived** tab.
diff --git a/doc/user/search/index.md b/doc/user/search/index.md
index 2166e8ddbd5..28d098291b0 100644
--- a/doc/user/search/index.md
+++ b/doc/user/search/index.md
@@ -98,7 +98,9 @@ You can view recent searches by clicking on the little arrow-clock icon, which i
## Removing search filters
-Individual filters can be removed by clicking on the filter's (x) button or backspacing. The entire search filter can be cleared by clicking on the search box's (x) button.
+Individual filters can be removed by clicking on the filter's (x) button or backspacing. The entire search filter can be cleared by clicking on the search box's (x) button or via <kbd>⌘</kbd> (Mac) + <kbd>⌫</kbd>.
+
+To delete filter tokens one at a time, the <kbd>⌥</kbd> (Mac) / <kbd>Ctrl</kbd> + <kbd>⌫</kbd> keyboard combination can be used.
## Filtering with multiple filters of the same type
diff --git a/doc/user/shortcuts.md b/doc/user/shortcuts.md
index dcc4753a794..fa466fdb3b9 100644
--- a/doc/user/shortcuts.md
+++ b/doc/user/shortcuts.md
@@ -127,6 +127,15 @@ This shortcut is available when viewing a [wiki page](project/wiki/index.md):
| ----------------- | ----------- |
| <kbd>e</kbd> | Edit wiki page. |
+### Filtered Search
+
+These shortcuts are available when using a [filtered search input](search/index.md):
+
+| Keyboard Shortcut | Description |
+| ----------------------------------------------------- | ----------- |
+| <kbd>⌘</kbd> (Mac) + <kbd>⌫</kbd> | Clear entire search filter. |
+| <kbd>⌥</kbd> (Mac) / <kbd>Ctrl</kbd> + <kbd>⌫</kbd> | Clear one token at a time. |
+
## Epics **(ULTIMATE)**
These shortcuts are available when viewing [Epics](group/epics/index.md):
diff --git a/lib/api/entities/merge_request_basic.rb b/lib/api/entities/merge_request_basic.rb
index 8cec2c1a97e..4610220e4f6 100644
--- a/lib/api/entities/merge_request_basic.rb
+++ b/lib/api/entities/merge_request_basic.rb
@@ -52,7 +52,7 @@ module API
# information.
expose :merge_status do |merge_request|
merge_request.check_mergeability(async: true)
- merge_request.merge_status
+ merge_request.public_merge_status
end
expose :diff_head_sha, as: :sha
expose :merge_commit_sha
diff --git a/lib/api/helpers/internal_helpers.rb b/lib/api/helpers/internal_helpers.rb
index f7aabc8ce4f..31272c537a3 100644
--- a/lib/api/helpers/internal_helpers.rb
+++ b/lib/api/helpers/internal_helpers.rb
@@ -3,7 +3,7 @@
module API
module Helpers
module InternalHelpers
- attr_reader :redirected_path, :container
+ attr_reader :redirected_path
delegate :wiki?, to: :repo_type
@@ -11,15 +11,22 @@ module API
@actor ||= Support::GitAccessActor.from_params(params)
end
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
def repo_type
- set_project unless defined?(@repo_type) # rubocop:disable Gitlab/ModuleWithInstanceVariables
- @repo_type # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ parse_repo_path unless defined?(@repo_type)
+ @repo_type
end
def project
- set_project unless defined?(@project) # rubocop:disable Gitlab/ModuleWithInstanceVariables
- @project # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ parse_repo_path unless defined?(@project)
+ @project
+ end
+
+ def container
+ parse_repo_path unless defined?(@container)
+ @container
end
+ # rubocop:enable Gitlab/ModuleWithInstanceVariables
def access_checker_for(actor, protocol)
access_checker_klass.new(actor.key_or_user, container, protocol,
@@ -79,7 +86,7 @@ module API
end
# rubocop:disable Gitlab/ModuleWithInstanceVariables
- def set_project
+ def parse_repo_path
@container, @project, @repo_type, @redirected_path =
if params[:gl_repository]
Gitlab::GlRepository.parse(params[:gl_repository])
diff --git a/lib/gitlab/application_context.rb b/lib/gitlab/application_context.rb
index 60a50e97998..a3feda9bb59 100644
--- a/lib/gitlab/application_context.rb
+++ b/lib/gitlab/application_context.rb
@@ -25,6 +25,10 @@ module Gitlab
Labkit::Context.push(application_context.to_lazy_hash)
end
+ def self.current_context_include?(attribute_name)
+ Labkit::Context.current.to_h.include?(Labkit::Context.log_key(attribute_name))
+ end
+
def initialize(**args)
unknown_attributes = args.keys - APPLICATION_ATTRIBUTES.map(&:name)
raise ArgumentError, "#{unknown_attributes} are not known keys" if unknown_attributes.any?
diff --git a/lib/gitlab/background_migration/backfill_push_rules_id_in_projects.rb b/lib/gitlab/background_migration/backfill_push_rules_id_in_projects.rb
new file mode 100644
index 00000000000..9b9ef70424a
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_push_rules_id_in_projects.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Class that will insert record into project_push_rules
+ # for each existing push_rule
+ class BackfillPushRulesIdInProjects
+ # Temporary AR table for push rules
+ class ProjectSetting < ActiveRecord::Base
+ self.table_name = 'project_settings'
+ end
+
+ def perform(start_id, stop_id)
+ ProjectSetting.connection.execute(<<~SQL)
+ UPDATE project_settings ps1
+ SET push_rule_id = pr.id
+ FROM project_settings ps2
+ INNER JOIN push_rules pr
+ ON ps2.project_id = pr.project_id
+ WHERE pr.is_sample = false
+ AND pr.id BETWEEN #{start_id} AND #{stop_id}
+ AND ps1.project_id = ps2.project_id
+ SQL
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/cycle_analytics/group_stage_summary.rb b/lib/gitlab/cycle_analytics/group_stage_summary.rb
index 26eaaf7df83..09b33d01846 100644
--- a/lib/gitlab/cycle_analytics/group_stage_summary.rb
+++ b/lib/gitlab/cycle_analytics/group_stage_summary.rb
@@ -12,14 +12,42 @@ module Gitlab
end
def data
- [serialize(Summary::Group::Issue.new(group: group, current_user: current_user, options: options)),
- serialize(Summary::Group::Deploy.new(group: group, options: options))]
+ [issue_stats,
+ deploy_stats,
+ deployment_frequency_stats]
end
private
- def serialize(summary_object)
- AnalyticsSummarySerializer.new.represent(summary_object)
+ def issue_stats
+ serialize(
+ Summary::Group::Issue.new(
+ group: group, current_user: current_user, options: options)
+ )
+ end
+
+ def deployments_summary
+ @deployments_summary ||=
+ Summary::Group::Deploy.new(group: group, options: options)
+ end
+
+ def deploy_stats
+ serialize deployments_summary
+ end
+
+ def deployment_frequency_stats
+ serialize(
+ Summary::Group::DeploymentFrequency.new(
+ deployments: deployments_summary.value,
+ group: group,
+ options: options),
+ with_unit: true
+ )
+ end
+
+ def serialize(summary_object, with_unit: false)
+ AnalyticsSummarySerializer.new.represent(
+ summary_object, with_unit: with_unit)
end
end
end
diff --git a/lib/gitlab/cycle_analytics/summary/group/deployment_frequency.rb b/lib/gitlab/cycle_analytics/summary/group/deployment_frequency.rb
new file mode 100644
index 00000000000..9fbbbb5a1ec
--- /dev/null
+++ b/lib/gitlab/cycle_analytics/summary/group/deployment_frequency.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module CycleAnalytics
+ module Summary
+ module Group
+ class DeploymentFrequency < Group::Base
+ include GroupProjectsProvider
+ include SummaryHelper
+
+ def initialize(deployments:, group:, options:)
+ @deployments = deployments
+
+ super(group: group, options: options)
+ end
+
+ def title
+ _('Deployment Frequency')
+ end
+
+ def value
+ @value ||=
+ frequency(@deployments, options[:from], options[:to] || Time.now)
+ end
+
+ def unit
+ _('per day')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/cycle_analytics/summary_helper.rb b/lib/gitlab/cycle_analytics/summary_helper.rb
new file mode 100644
index 00000000000..2eb8f2b2664
--- /dev/null
+++ b/lib/gitlab/cycle_analytics/summary_helper.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module CycleAnalytics
+ module SummaryHelper
+ def frequency(count, from, to)
+ (count / days(from, to)).round(1)
+ end
+
+ def days(from, to)
+ [(to.end_of_day - from.beginning_of_day) / (24 * 60 * 60), 1].max
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/data_builder/pipeline.rb b/lib/gitlab/data_builder/pipeline.rb
index 8e699de8164..14facd6b1d4 100644
--- a/lib/gitlab/data_builder/pipeline.rb
+++ b/lib/gitlab/data_builder/pipeline.rb
@@ -45,7 +45,7 @@ module Gitlab
target_branch: merge_request.target_branch,
target_project_id: merge_request.target_project_id,
state: merge_request.state,
- merge_status: merge_request.merge_status,
+ merge_status: merge_request.public_merge_status,
url: Gitlab::UrlBuilder.build(merge_request)
}
end
diff --git a/lib/gitlab/elasticsearch/logs.rb b/lib/gitlab/elasticsearch/logs.rb
deleted file mode 100644
index 3b6d1d0286a..00000000000
--- a/lib/gitlab/elasticsearch/logs.rb
+++ /dev/null
@@ -1,154 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module Elasticsearch
- class Logs
- InvalidCursor = Class.new(RuntimeError)
-
- # How many log lines to fetch in a query
- LOGS_LIMIT = 500
-
- def initialize(client)
- @client = client
- end
-
- def pod_logs(namespace, pod_name: nil, container_name: nil, search: nil, start_time: nil, end_time: nil, cursor: nil)
- query = { bool: { must: [] } }.tap do |q|
- filter_pod_name(q, pod_name)
- filter_namespace(q, namespace)
- filter_container_name(q, container_name)
- filter_search(q, search)
- filter_times(q, start_time, end_time)
- end
-
- body = build_body(query, cursor)
- response = @client.search body: body
-
- format_response(response)
- end
-
- private
-
- def build_body(query, cursor = nil)
- body = {
- query: query,
- # reverse order so we can query N-most recent records
- sort: [
- { "@timestamp": { order: :desc } },
- { "offset": { order: :desc } }
- ],
- # only return these fields in the response
- _source: ["@timestamp", "message", "kubernetes.pod.name"],
- # fixed limit for now, we should support paginated queries
- size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT
- }
-
- unless cursor.nil?
- body[:search_after] = decode_cursor(cursor)
- end
-
- body
- end
-
- def filter_pod_name(query, pod_name)
- # We can filter by "all pods" with a null pod_name
- return if pod_name.nil?
-
- query[:bool][:must] << {
- match_phrase: {
- "kubernetes.pod.name" => {
- query: pod_name
- }
- }
- }
- end
-
- def filter_namespace(query, namespace)
- query[:bool][:must] << {
- match_phrase: {
- "kubernetes.namespace" => {
- query: namespace
- }
- }
- }
- end
-
- def filter_container_name(query, container_name)
- # A pod can contain multiple containers.
- # By default we return logs from every container
- return if container_name.nil?
-
- query[:bool][:must] << {
- match_phrase: {
- "kubernetes.container.name" => {
- query: container_name
- }
- }
- }
- end
-
- def filter_search(query, search)
- return if search.nil?
-
- query[:bool][:must] << {
- simple_query_string: {
- query: search,
- fields: [:message],
- default_operator: :and
- }
- }
- end
-
- def filter_times(query, start_time, end_time)
- return unless start_time || end_time
-
- time_range = { range: { :@timestamp => {} } }.tap do |tr|
- tr[:range][:@timestamp][:gte] = start_time if start_time
- tr[:range][:@timestamp][:lt] = end_time if end_time
- end
-
- query[:bool][:filter] = [time_range]
- end
-
- def format_response(response)
- results = response.fetch("hits", {}).fetch("hits", [])
- last_result = results.last
- results = results.map do |hit|
- {
- timestamp: hit["_source"]["@timestamp"],
- message: hit["_source"]["message"],
- pod: hit["_source"]["kubernetes"]["pod"]["name"]
- }
- end
-
- # we queried for the N-most recent records but we want them ordered oldest to newest
- {
- logs: results.reverse,
- cursor: last_result.nil? ? nil : encode_cursor(last_result["sort"])
- }
- end
-
- # we want to hide the implementation details of the search_after parameter from the frontend
- # behind a single easily transmitted value
- def encode_cursor(obj)
- obj.join(',')
- end
-
- def decode_cursor(obj)
- cursor = obj.split(',').map(&:to_i)
-
- unless valid_cursor(cursor)
- raise InvalidCursor, "invalid cursor format"
- end
-
- cursor
- end
-
- def valid_cursor(cursor)
- cursor.instance_of?(Array) &&
- cursor.length == 2 &&
- cursor.map {|i| i.instance_of?(Integer)}.reduce(:&)
- end
- end
- end
-end
diff --git a/lib/gitlab/elasticsearch/logs/lines.rb b/lib/gitlab/elasticsearch/logs/lines.rb
new file mode 100644
index 00000000000..fb32a6c9fcd
--- /dev/null
+++ b/lib/gitlab/elasticsearch/logs/lines.rb
@@ -0,0 +1,156 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Elasticsearch
+ module Logs
+ class Lines
+ InvalidCursor = Class.new(RuntimeError)
+
+ # How many log lines to fetch in a query
+ LOGS_LIMIT = 500
+
+ def initialize(client)
+ @client = client
+ end
+
+ def pod_logs(namespace, pod_name: nil, container_name: nil, search: nil, start_time: nil, end_time: nil, cursor: nil)
+ query = { bool: { must: [] } }.tap do |q|
+ filter_pod_name(q, pod_name)
+ filter_namespace(q, namespace)
+ filter_container_name(q, container_name)
+ filter_search(q, search)
+ filter_times(q, start_time, end_time)
+ end
+
+ body = build_body(query, cursor)
+ response = @client.search body: body
+
+ format_response(response)
+ end
+
+ private
+
+ def build_body(query, cursor = nil)
+ body = {
+ query: query,
+ # reverse order so we can query N-most recent records
+ sort: [
+ { "@timestamp": { order: :desc } },
+ { "offset": { order: :desc } }
+ ],
+ # only return these fields in the response
+ _source: ["@timestamp", "message", "kubernetes.pod.name"],
+ # fixed limit for now, we should support paginated queries
+ size: ::Gitlab::Elasticsearch::Logs::Lines::LOGS_LIMIT
+ }
+
+ unless cursor.nil?
+ body[:search_after] = decode_cursor(cursor)
+ end
+
+ body
+ end
+
+ def filter_pod_name(query, pod_name)
+ # We can filter by "all pods" with a null pod_name
+ return if pod_name.nil?
+
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.pod.name" => {
+ query: pod_name
+ }
+ }
+ }
+ end
+
+ def filter_namespace(query, namespace)
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.namespace" => {
+ query: namespace
+ }
+ }
+ }
+ end
+
+ def filter_container_name(query, container_name)
+ # A pod can contain multiple containers.
+ # By default we return logs from every container
+ return if container_name.nil?
+
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.container.name" => {
+ query: container_name
+ }
+ }
+ }
+ end
+
+ def filter_search(query, search)
+ return if search.nil?
+
+ query[:bool][:must] << {
+ simple_query_string: {
+ query: search,
+ fields: [:message],
+ default_operator: :and
+ }
+ }
+ end
+
+ def filter_times(query, start_time, end_time)
+ return unless start_time || end_time
+
+ time_range = { range: { :@timestamp => {} } }.tap do |tr|
+ tr[:range][:@timestamp][:gte] = start_time if start_time
+ tr[:range][:@timestamp][:lt] = end_time if end_time
+ end
+
+ query[:bool][:filter] = [time_range]
+ end
+
+ def format_response(response)
+ results = response.fetch("hits", {}).fetch("hits", [])
+ last_result = results.last
+ results = results.map do |hit|
+ {
+ timestamp: hit["_source"]["@timestamp"],
+ message: hit["_source"]["message"],
+ pod: hit["_source"]["kubernetes"]["pod"]["name"]
+ }
+ end
+
+ # we queried for the N-most recent records but we want them ordered oldest to newest
+ {
+ logs: results.reverse,
+ cursor: last_result.nil? ? nil : encode_cursor(last_result["sort"])
+ }
+ end
+
+ # we want to hide the implementation details of the search_after parameter from the frontend
+ # behind a single easily transmitted value
+ def encode_cursor(obj)
+ obj.join(',')
+ end
+
+ def decode_cursor(obj)
+ cursor = obj.split(',').map(&:to_i)
+
+ unless valid_cursor(cursor)
+ raise InvalidCursor, "invalid cursor format"
+ end
+
+ cursor
+ end
+
+ def valid_cursor(cursor)
+ cursor.instance_of?(Array) &&
+ cursor.length == 2 &&
+ cursor.map {|i| i.instance_of?(Integer)}.reduce(:&)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/elasticsearch/logs/pods.rb b/lib/gitlab/elasticsearch/logs/pods.rb
new file mode 100644
index 00000000000..66499ae956a
--- /dev/null
+++ b/lib/gitlab/elasticsearch/logs/pods.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Elasticsearch
+ module Logs
+ class Pods
+ # How many items to fetch in a query
+ PODS_LIMIT = 500
+ CONTAINERS_LIMIT = 500
+
+ def initialize(client)
+ @client = client
+ end
+
+ def pods(namespace)
+ body = build_body(namespace)
+ response = @client.search body: body
+
+ format_response(response)
+ end
+
+ private
+
+ def build_body(namespace)
+ {
+ aggs: {
+ pods: {
+ aggs: {
+ containers: {
+ terms: {
+ field: 'kubernetes.container.name',
+ size: ::Gitlab::Elasticsearch::Logs::Pods::CONTAINERS_LIMIT
+ }
+ }
+ },
+ terms: {
+ field: 'kubernetes.pod.name',
+ size: ::Gitlab::Elasticsearch::Logs::Pods::PODS_LIMIT
+ }
+ }
+ },
+ query: {
+ bool: {
+ must: {
+ match_phrase: {
+ "kubernetes.namespace": namespace
+ }
+ }
+ }
+ },
+ # don't populate hits, only the aggregation is needed
+ size: 0
+ }
+ end
+
+ def format_response(response)
+ results = response.dig("aggregations", "pods", "buckets") || []
+ results.map do |bucket|
+ {
+ name: bucket["key"],
+ container_names: (bucket.dig("containers", "buckets") || []).map do |cbucket|
+ cbucket["key"]
+ end
+ }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 49a064da6f8..f2395581caa 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -6082,6 +6082,9 @@ msgstr ""
msgid "Created"
msgstr ""
+msgid "Created %{timestamp}"
+msgstr ""
+
msgid "Created At"
msgstr ""
@@ -6957,6 +6960,9 @@ msgstr ""
msgid "Deploying to"
msgstr ""
+msgid "Deployment Frequency"
+msgstr ""
+
msgid "Deployment|API"
msgstr ""
@@ -14350,6 +14356,9 @@ msgstr ""
msgid "PackageRegistry|Learn how to %{noPackagesLinkStart}publish and share your packages%{noPackagesLinkEnd} with GitLab."
msgstr ""
+msgid "PackageRegistry|Manually Published"
+msgstr ""
+
msgid "PackageRegistry|Maven"
msgstr ""
@@ -14410,6 +14419,9 @@ msgstr ""
msgid "PackageRegistry|npm"
msgstr ""
+msgid "PackageRegistry|published by %{author}"
+msgstr ""
+
msgid "PackageRegistry|yarn"
msgstr ""
@@ -19478,6 +19490,9 @@ msgstr ""
msgid "StaticSiteEditor|Could not create merge request."
msgstr ""
+msgid "StaticSiteEditor|Incompatible file content"
+msgstr ""
+
msgid "StaticSiteEditor|Return to site"
msgstr ""
@@ -19490,9 +19505,15 @@ msgstr ""
msgid "StaticSiteEditor|Summary of changes"
msgstr ""
+msgid "StaticSiteEditor|The Static Site Editor is currently configured to only edit Markdown content on pages generated from Middleman. Visit the documentation to learn more about configuring your site to use the Static Site Editor."
+msgstr ""
+
msgid "StaticSiteEditor|Update %{sourcePath} file"
msgstr ""
+msgid "StaticSiteEditor|View documentation"
+msgstr ""
+
msgid "StaticSiteEditor|View merge request"
msgstr ""
@@ -25117,6 +25138,9 @@ msgstr ""
msgid "pending removal"
msgstr ""
+msgid "per day"
+msgstr ""
+
msgid "pipeline"
msgstr ""
diff --git a/package.json b/package.json
index a48b2468ea8..5ddbe10df04 100644
--- a/package.json
+++ b/package.json
@@ -40,7 +40,7 @@
"@babel/preset-env": "^7.8.4",
"@gitlab/at.js": "1.5.5",
"@gitlab/svgs": "1.119.0",
- "@gitlab/ui": "11.2.1",
+ "@gitlab/ui": "12.0.0",
"@gitlab/visual-review-tools": "1.5.1",
"@sentry/browser": "^5.10.2",
"@sourcegraph/code-host-integration": "0.0.36",
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index a259c5142fc..82383cfa2b0 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -13,7 +13,7 @@ FactoryBot.define do
end
trait :remote_store do
- file_store { JobArtifactUploader::Store::REMOTE }
+ file_store { JobArtifactUploader::Store::REMOTE}
end
after :build do |artifact|
diff --git a/spec/fixtures/lib/elasticsearch/pods_query.json b/spec/fixtures/lib/elasticsearch/pods_query.json
new file mode 100644
index 00000000000..90d162b871a
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/pods_query.json
@@ -0,0 +1,28 @@
+{
+ "aggs": {
+ "pods": {
+ "aggs": {
+ "containers": {
+ "terms": {
+ "field": "kubernetes.container.name",
+ "size": 500
+ }
+ }
+ },
+ "terms": {
+ "field": "kubernetes.pod.name",
+ "size": 500
+ }
+ }
+ },
+ "query": {
+ "bool": {
+ "must": {
+ "match_phrase": {
+ "kubernetes.namespace": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ },
+ "size": 0
+}
diff --git a/spec/fixtures/lib/elasticsearch/pods_response.json b/spec/fixtures/lib/elasticsearch/pods_response.json
new file mode 100644
index 00000000000..d923f914d7c
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/pods_response.json
@@ -0,0 +1,75 @@
+{
+ "took": 8540,
+ "timed_out": false,
+ "_shards": {
+ "total": 153,
+ "successful": 153,
+ "skipped": 0,
+ "failed": 0
+ },
+ "hits": {
+ "total": 62143,
+ "max_score": 0.0,
+ "hits": [
+
+ ]
+ },
+ "aggregations": {
+ "pods": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "runner-gitlab-runner-7bbfb5dcb5-p6smb",
+ "doc_count": 19795,
+ "containers": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "runner-gitlab-runner",
+ "doc_count": 19795
+ }
+ ]
+ }
+ },
+ {
+ "key": "elastic-stack-elasticsearch-master-1",
+ "doc_count": 13185,
+ "containers": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "elasticsearch",
+ "doc_count": 13158
+ },
+ {
+ "key": "chown",
+ "doc_count": 24
+ },
+ {
+ "key": "sysctl",
+ "doc_count": 3
+ }
+ ]
+ }
+ },
+ {
+ "key": "ingress-nginx-ingress-controller-76449bcc8d-8qgl6",
+ "doc_count": 3437,
+ "containers": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "nginx-ingress-controller",
+ "doc_count": 3437
+ }
+ ]
+ }
+ }
+ ]
+ }
+ }
+}
diff --git a/spec/frontend/jira_import/components/jira_import_form_spec.js b/spec/frontend/jira_import/components/jira_import_form_spec.js
index 315ccccd991..3215ff26bdd 100644
--- a/spec/frontend/jira_import/components/jira_import_form_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_form_spec.js
@@ -1,4 +1,4 @@
-import { GlAvatar, GlNewButton, GlFormSelect, GlLabel } from '@gitlab/ui';
+import { GlAvatar, GlButton, GlFormSelect, GlLabel } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import JiraImportForm from '~/jira_import/components/jira_import_form.vue';
@@ -44,7 +44,7 @@ describe('JiraImportForm', () => {
it('shows a Next button', () => {
const nextButton = wrapper
- .findAll(GlNewButton)
+ .findAll(GlButton)
.at(0)
.text();
@@ -53,7 +53,7 @@ describe('JiraImportForm', () => {
it('shows a Cancel button', () => {
const cancelButton = wrapper
- .findAll(GlNewButton)
+ .findAll(GlButton)
.at(1)
.text();
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index 1b67c08e5a4..16a66c70d6a 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -1,7 +1,7 @@
import SnippetHeader from '~/snippets/components/snippet_header.vue';
import DeleteSnippetMutation from '~/snippets/mutations/deleteSnippet.mutation.graphql';
import { ApolloMutation } from 'vue-apollo';
-import { GlNewButton, GlModal } from '@gitlab/ui';
+import { GlButton, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
describe('Snippet header component', () => {
@@ -89,7 +89,7 @@ describe('Snippet header component', () => {
updateSnippet: false,
},
});
- expect(wrapper.findAll(GlNewButton).length).toEqual(0);
+ expect(wrapper.findAll(GlButton).length).toEqual(0);
createComponent({
permissions: {
@@ -97,7 +97,7 @@ describe('Snippet header component', () => {
updateSnippet: false,
},
});
- expect(wrapper.findAll(GlNewButton).length).toEqual(1);
+ expect(wrapper.findAll(GlButton).length).toEqual(1);
createComponent({
permissions: {
@@ -105,7 +105,7 @@ describe('Snippet header component', () => {
updateSnippet: true,
},
});
- expect(wrapper.findAll(GlNewButton).length).toEqual(2);
+ expect(wrapper.findAll(GlButton).length).toEqual(2);
createComponent({
permissions: {
@@ -117,7 +117,7 @@ describe('Snippet header component', () => {
canCreateSnippet: true,
});
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.findAll(GlNewButton).length).toEqual(3);
+ expect(wrapper.findAll(GlButton).length).toEqual(3);
});
});
diff --git a/spec/frontend/static_site_editor/components/invalid_content_message_spec.js b/spec/frontend/static_site_editor/components/invalid_content_message_spec.js
new file mode 100644
index 00000000000..7e699e9451c
--- /dev/null
+++ b/spec/frontend/static_site_editor/components/invalid_content_message_spec.js
@@ -0,0 +1,23 @@
+import { shallowMount } from '@vue/test-utils';
+
+import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
+
+describe('~/static_site_editor/components/invalid_content_message.vue', () => {
+ let wrapper;
+ const findDocumentationButton = () => wrapper.find({ ref: 'documentationButton' });
+ const documentationUrl =
+ 'https://gitlab.com/gitlab-org/project-templates/static-site-editor-middleman';
+
+ beforeEach(() => {
+ wrapper = shallowMount(InvalidContentMessage);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the configuration button link', () => {
+ expect(findDocumentationButton().exists()).toBe(true);
+ expect(findDocumentationButton().attributes('href')).toBe(documentationUrl);
+ });
+});
diff --git a/spec/frontend/static_site_editor/components/publish_toolbar_spec.js b/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
index f00fc38430f..82eb12d4c4d 100644
--- a/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
+++ b/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlNewButton, GlLoadingIcon } from '@gitlab/ui';
+import { GlButton, GlLoadingIcon } from '@gitlab/ui';
import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
@@ -18,7 +18,7 @@ describe('Static Site Editor Toolbar', () => {
};
const findReturnUrlLink = () => wrapper.find({ ref: 'returnUrlLink' });
- const findSaveChangesButton = () => wrapper.find(GlNewButton);
+ const findSaveChangesButton = () => wrapper.find(GlButton);
const findLoadingIndicator = () => wrapper.find(GlLoadingIcon);
beforeEach(() => {
diff --git a/spec/frontend/static_site_editor/components/static_site_editor_spec.js b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
index d427df9bd4b..19a86de9cd8 100644
--- a/spec/frontend/static_site_editor/components/static_site_editor_spec.js
+++ b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
@@ -8,6 +8,7 @@ import createState from '~/static_site_editor/store/state';
import StaticSiteEditor from '~/static_site_editor/components/static_site_editor.vue';
import EditArea from '~/static_site_editor/components/edit_area.vue';
import EditHeader from '~/static_site_editor/components/edit_header.vue';
+import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
import { sourceContent, sourceContentTitle } from '../mock_data';
@@ -29,7 +30,10 @@ describe('StaticSiteEditor', () => {
submitChangesActionMock = jest.fn();
store = new Vuex.Store({
- state: createState(initialState),
+ state: createState({
+ isSupportedContent: true,
+ ...initialState,
+ }),
getters: {
contentChanged: () => false,
...getters,
@@ -62,6 +66,7 @@ describe('StaticSiteEditor', () => {
const findEditArea = () => wrapper.find(EditArea);
const findEditHeader = () => wrapper.find(EditHeader);
+ const findInvalidContentMessage = () => wrapper.find(InvalidContentMessage);
const findPublishToolbar = () => wrapper.find(PublishToolbar);
const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
@@ -151,6 +156,13 @@ describe('StaticSiteEditor', () => {
expect(findPublishToolbar().props('savingChanges')).toBe(true);
});
+ it('displays invalid content message when content is not supported', () => {
+ buildStore({ initialState: { isSupportedContent: false } });
+ buildWrapper();
+
+ expect(findInvalidContentMessage().exists()).toBe(true);
+ });
+
it('dispatches load content action', () => {
expect(loadContentActionMock).toHaveBeenCalled();
});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index d837c793784..4cd03a690e9 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -42,7 +42,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<b-input-group-append-stub
tag="div"
>
- <gl-new-button-stub
+ <gl-button-stub
category="tertiary"
data-clipboard-text="ssh://foo.bar"
icon=""
@@ -55,7 +55,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
size="16"
title="Copy URL"
/>
- </gl-new-button-stub>
+ </gl-button-stub>
</b-input-group-append-stub>
</b-input-group-stub>
</div>
@@ -92,7 +92,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<b-input-group-append-stub
tag="div"
>
- <gl-new-button-stub
+ <gl-button-stub
category="tertiary"
data-clipboard-text="http://foo.bar"
icon=""
@@ -105,7 +105,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
size="16"
title="Copy URL"
/>
- </gl-new-button-stub>
+ </gl-button-stub>
</b-input-group-append-stub>
</b-input-group-stub>
</div>
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index e5d1d1d690e..d0b54a16747 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -8,6 +8,7 @@ import FilteredSearchVisualTokens from '~/filtered_search/filtered_search_visual
import FilteredSearchDropdownManager from '~/filtered_search/filtered_search_dropdown_manager';
import FilteredSearchManager from '~/filtered_search/filtered_search_manager';
import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
+import { BACKSPACE_KEY_CODE, DELETE_KEY_CODE } from '~/lib/utils/keycodes';
describe('Filtered Search Manager', function() {
let input;
@@ -17,16 +18,35 @@ describe('Filtered Search Manager', function() {
const placeholder = 'Search or filter results...';
function dispatchBackspaceEvent(element, eventType) {
- const backspaceKey = 8;
const event = new Event(eventType);
- event.keyCode = backspaceKey;
+ event.keyCode = BACKSPACE_KEY_CODE;
element.dispatchEvent(event);
}
function dispatchDeleteEvent(element, eventType) {
- const deleteKey = 46;
const event = new Event(eventType);
- event.keyCode = deleteKey;
+ event.keyCode = DELETE_KEY_CODE;
+ element.dispatchEvent(event);
+ }
+
+ function dispatchAltBackspaceEvent(element, eventType) {
+ const event = new Event(eventType);
+ event.altKey = true;
+ event.keyCode = BACKSPACE_KEY_CODE;
+ element.dispatchEvent(event);
+ }
+
+ function dispatchCtrlBackspaceEvent(element, eventType) {
+ const event = new Event(eventType);
+ event.ctrlKey = true;
+ event.keyCode = BACKSPACE_KEY_CODE;
+ element.dispatchEvent(event);
+ }
+
+ function dispatchMetaBackspaceEvent(element, eventType) {
+ const event = new Event(eventType);
+ event.metaKey = true;
+ event.keyCode = BACKSPACE_KEY_CODE;
element.dispatchEvent(event);
}
@@ -299,6 +319,80 @@ describe('Filtered Search Manager', function() {
});
});
+ describe('checkForAltOrCtrlBackspace', () => {
+ beforeEach(() => {
+ initializeManager();
+ spyOn(FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough();
+ });
+
+ describe('tokens and no input', () => {
+ beforeEach(() => {
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
+ );
+ });
+
+ it('removes last token via alt-backspace', () => {
+ dispatchAltBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled();
+ });
+
+ it('removes last token via ctrl-backspace', () => {
+ dispatchCtrlBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled();
+ });
+ });
+
+ describe('tokens and input', () => {
+ beforeEach(() => {
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
+ );
+ });
+
+ it('does not remove token or change input via alt-backspace when there is existing input', () => {
+ input = manager.filteredSearchInput;
+ input.value = 'text';
+ dispatchAltBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled();
+ expect(input.value).toEqual('text');
+ });
+
+ it('does not remove token or change input via ctrl-backspace when there is existing input', () => {
+ input = manager.filteredSearchInput;
+ input.value = 'text';
+ dispatchCtrlBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled();
+ expect(input.value).toEqual('text');
+ });
+ });
+ });
+
+ describe('checkForMetaBackspace', () => {
+ beforeEach(() => {
+ initializeManager();
+ });
+
+ beforeEach(() => {
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
+ );
+ });
+
+ it('removes all tokens and input', () => {
+ spyOn(FilteredSearchManager.prototype, 'clearSearch').and.callThrough();
+ dispatchMetaBackspaceEvent(input, 'keydown');
+
+ expect(manager.clearSearch).toHaveBeenCalled();
+ expect(manager.filteredSearchInput.value).toEqual('');
+ expect(DropdownUtils.getSearchQuery()).toEqual('');
+ });
+ });
+
describe('removeToken', () => {
beforeEach(() => {
initializeManager();
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index 0903ca6f9e8..6674ea059a0 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -42,6 +42,18 @@ describe Gitlab::ApplicationContext do
end
end
+ describe '.current_context_include?' do
+ it 'returns true if the key was present in the context' do
+ described_class.with_context(caller_id: "Hello") do
+ expect(described_class.current_context_include?(:caller_id)).to be(true)
+ end
+ end
+
+ it 'returns false if the key was not present in the current context' do
+ expect(described_class.current_context_include?(:caller_id)).to be(false)
+ end
+ end
+
describe '#to_lazy_hash' do
let(:user) { build(:user) }
let(:project) { build(:project) }
diff --git a/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb b/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb
new file mode 100644
index 00000000000..f150ed4bd2e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::BackfillPushRulesIdInProjects, :migration, schema: 2020_03_25_162730 do
+ let(:push_rules) { table(:push_rules) }
+ let(:projects) { table(:projects) }
+ let(:project_settings) { table(:project_settings) }
+ let(:namespace) { table(:namespaces).create(name: 'user', path: 'user') }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ it 'creates new project push_rules for all push rules in the range' do
+ project_1 = projects.create(id: 1, namespace_id: namespace.id)
+ project_2 = projects.create(id: 2, namespace_id: namespace.id)
+ project_3 = projects.create(id: 3, namespace_id: namespace.id)
+ project_settings_1 = project_settings.create(project_id: project_1.id)
+ project_settings_2 = project_settings.create(project_id: project_2.id)
+ project_settings_3 = project_settings.create(project_id: project_3.id)
+ push_rule_1 = push_rules.create(id: 5, is_sample: false, project_id: project_1.id)
+ push_rule_2 = push_rules.create(id: 6, is_sample: false, project_id: project_2.id)
+ push_rules.create(id: 8, is_sample: false, project_id: 3)
+
+ subject.perform(5, 7)
+
+ expect(project_settings_1.reload.push_rule_id).to eq(push_rule_1.id)
+ expect(project_settings_2.reload.push_rule_id).to eq(push_rule_2.id)
+ expect(project_settings_3.reload.push_rule_id).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
index 664009f140f..3de65529e99 100644
--- a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
@@ -127,4 +127,50 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
end
end
+
+ describe '#deployment_frequency' do
+ let(:from) { 6.days.ago }
+ let(:to) { nil }
+
+ subject do
+ described_class.new(group, options: {
+ from: from,
+ to: to,
+ current_user: user
+ }).data.third
+ end
+
+ it 'includes the unit: `per day`' do
+ expect(subject[:unit]).to eq(_('per day'))
+ end
+
+ before do
+ Timecop.freeze(5.days.ago) do
+ create(:deployment, :success, project: project)
+ end
+ end
+
+ context 'when `to` is nil' do
+ it 'includes range until now' do
+ # 1 deployment over 7 days
+ expect(subject[:value]).to eq(0.1)
+ end
+ end
+
+ context 'when `to` is given' do
+ let(:from) { 10.days.ago }
+ let(:to) { 10.days.from_now }
+
+ before do
+ Timecop.freeze(5.days.from_now) do
+ create(:deployment, :success, project: project)
+ end
+ end
+
+ it 'returns deployment frequency within `from` and `to` range' do
+ # 2 deployments over 20 days
+ expect(subject[:value]).to eq(0.1)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index da22da8de0f..519f5873d75 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -83,7 +83,7 @@ describe Gitlab::DataBuilder::Pipeline do
expect(merge_request_attrs[:target_branch]).to eq(merge_request.target_branch)
expect(merge_request_attrs[:target_project_id]).to eq(merge_request.target_project_id)
expect(merge_request_attrs[:state]).to eq(merge_request.state)
- expect(merge_request_attrs[:merge_status]).to eq(merge_request.merge_status)
+ expect(merge_request_attrs[:merge_status]).to eq(merge_request.public_merge_status)
expect(merge_request_attrs[:url]).to eq("http://localhost/#{merge_request.target_project.full_path}/-/merge_requests/#{merge_request.iid}")
end
end
diff --git a/spec/lib/gitlab/elasticsearch/logs_spec.rb b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
index 6b9d1dbef99..8b6a19fa2c5 100644
--- a/spec/lib/gitlab/elasticsearch/logs_spec.rb
+++ b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Elasticsearch::Logs do
+describe Gitlab::Elasticsearch::Logs::Lines do
let(:client) { Elasticsearch::Transport::Client }
let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", pod: "production-6866bc8974-m4sk4", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } }
diff --git a/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb b/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb
new file mode 100644
index 00000000000..0a4ab0780c5
--- /dev/null
+++ b/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Elasticsearch::Logs::Pods do
+ let(:client) { Elasticsearch::Transport::Client }
+
+ let(:es_query) { JSON.parse(fixture_file('lib/elasticsearch/pods_query.json'), symbolize_names: true) }
+ let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/pods_response.json')) }
+ let(:namespace) { "autodevops-deploy-9-production" }
+
+ subject { described_class.new(client) }
+
+ describe '#pods' do
+ it 'returns the pods' do
+ expect(client).to receive(:search).with(body: es_query).and_return(es_response)
+
+ result = subject.pods(namespace)
+ expect(result).to eq([
+ {
+ name: "runner-gitlab-runner-7bbfb5dcb5-p6smb",
+ container_names: %w[runner-gitlab-runner]
+ },
+ {
+ name: "elastic-stack-elasticsearch-master-1",
+ container_names: %w[elasticsearch chown sysctl]
+ },
+ {
+ name: "ingress-nginx-ingress-controller-76449bcc8d-8qgl6",
+ container_names: %w[nginx-ingress-controller]
+ }
+ ])
+ end
+ end
+end
diff --git a/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb b/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb
new file mode 100644
index 00000000000..77648f5c64a
--- /dev/null
+++ b/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20200325162730_schedule_backfill_push_rules_id_in_projects.rb')
+
+describe ScheduleBackfillPushRulesIdInProjects do
+ let(:push_rules) { table(:push_rules) }
+
+ it 'adds global rule association to application settings' do
+ application_settings = table(:application_settings)
+ setting = application_settings.create!
+ sample_rule = push_rules.create!(is_sample: true)
+
+ Sidekiq::Testing.fake! do
+ disable_migrations_output { migrate! }
+ end
+
+ setting.reload
+ expect(setting.push_rule_id).to eq(sample_rule.id)
+ end
+
+ it 'schedules worker to migrate project push rules' do
+ rule_1 = push_rules.create!
+ rule_2 = push_rules.create!
+
+ Sidekiq::Testing.fake! do
+ disable_migrations_output { migrate! }
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(5.minutes, rule_1.id, rule_2.id)
+ end
+ end
+end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 80b619ed2b1..6f6ff3704b4 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -349,13 +349,16 @@ describe Ci::JobArtifact do
end
describe 'file is being stored' do
- context 'when object has nil store' do
- it 'is stored locally' do
- subject = build(:ci_job_artifact, :archive, file_store: nil)
+ subject { create(:ci_job_artifact, :archive) }
- subject.save
+ context 'when object has nil store' do
+ before do
+ subject.update_column(:file_store, nil)
+ subject.reload
+ end
- expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
+ it 'is stored locally' do
+ expect(subject.file_store).to be(nil)
expect(subject.file).to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
end
@@ -363,10 +366,6 @@ describe Ci::JobArtifact do
context 'when existing object has local store' do
it 'is stored locally' do
- subject = build(:ci_job_artifact, :archive)
-
- subject.save
-
expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
expect(subject.file).to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
@@ -380,10 +379,6 @@ describe Ci::JobArtifact do
context 'when file is stored' do
it 'is stored remotely' do
- subject = build(:ci_job_artifact, :archive)
-
- subject.save
-
expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE)
expect(subject.file).not_to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::REMOTE)
diff --git a/spec/models/cycle_analytics/group_level_spec.rb b/spec/models/cycle_analytics/group_level_spec.rb
index 1f410a7c539..5ba0f078df1 100644
--- a/spec/models/cycle_analytics/group_level_spec.rb
+++ b/spec/models/cycle_analytics/group_level_spec.rb
@@ -38,7 +38,7 @@ describe CycleAnalytics::GroupLevel do
end
it 'returns medians for each stage for a specific group' do
- expect(subject.summary.map { |summary| summary[:value] }).to contain_exactly(1, 1)
+ expect(subject.summary.map { |summary| summary[:value] }).to contain_exactly(0.1, 1, 1)
end
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 50bb194ef71..52cd31ee65f 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2335,6 +2335,21 @@ describe MergeRequest do
end
end
+ describe "#public_merge_status" do
+ using RSpec::Parameterized::TableSyntax
+ subject { build(:merge_request, merge_status: status) }
+
+ where(:status, :public_status) do
+ 'cannot_be_merged_rechecking' | 'checking'
+ 'checking' | 'checking'
+ 'cannot_be_merged' | 'cannot_be_merged'
+ end
+
+ with_them do
+ it { expect(subject.public_merge_status).to eq(public_status) }
+ end
+ end
+
describe "#head_pipeline_active? " do
it do
is_expected
@@ -3226,20 +3241,51 @@ describe MergeRequest do
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
- subject.mark_as_unmergeable
- subject.mark_as_unchecked
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_unmergeable!
+ end
+
+ it 'notifies conflict, but does not notify again if rechecking still results in cannot_be_merged with async mergeability check' do
+ expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
+ expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
+
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
end
it 'notifies conflict, whenever newly unmergeable' do
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
- subject.mark_as_unmergeable
- subject.mark_as_unchecked
- subject.mark_as_mergeable
- subject.mark_as_unchecked
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_mergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_unmergeable!
+ end
+
+ it 'notifies conflict, whenever newly unmergeable with async mergeability check' do
+ expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
+ expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
+
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_checking!
+ subject.mark_as_mergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
end
it 'does not notify whenever merge request is newly unmergeable due to other reasons' do
@@ -3248,7 +3294,7 @@ describe MergeRequest do
expect(notification_service).not_to receive(:merge_request_unmergeable)
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
end
end
end
@@ -3261,7 +3307,7 @@ describe MergeRequest do
expect(notification_service).not_to receive(:merge_request_unmergeable)
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
end
end
end
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index a1b3111ff71..8d8c31c335d 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -130,4 +130,15 @@ describe 'getting merge request information nested in a project' do
expect(merge_requests_graphql_data.size).to eq 2
end
end
+
+ context 'when merge request is cannot_be_merged_rechecking' do
+ before do
+ merge_request.update!(merge_status: 'cannot_be_merged_rechecking')
+ end
+
+ it 'returns checking' do
+ post_graphql(query, current_user: current_user)
+ expect(merge_request_graphql_data['mergeStatus']).to eq('checking')
+ end
+ end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index a8543c8e282..40d6f171116 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1060,6 +1060,14 @@ describe API::MergeRequests do
expect(json_response['user']['can_merge']).to be_falsy
end
+ it 'returns `checking` as its merge_status instead of `cannot_be_merged_rechecking`' do
+ merge_request.update!(merge_status: 'cannot_be_merged_rechecking')
+
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
+
+ expect(json_response['merge_status']).to eq 'checking'
+ end
+
context 'when merge request is unchecked' do
before do
merge_request.mark_as_unchecked!
diff --git a/spec/serializers/discussion_entity_spec.rb b/spec/serializers/discussion_entity_spec.rb
index 4adf1dc5994..b441fd08b98 100644
--- a/spec/serializers/discussion_entity_spec.rb
+++ b/spec/serializers/discussion_entity_spec.rb
@@ -73,9 +73,19 @@ describe DiscussionEntity do
:diff_file,
:truncated_diff_lines,
:position,
+ :positions,
+ :line_codes,
:line_code,
:active
)
end
+
+ context 'diff_head_compare feature is disabled' do
+ it 'does not expose positions and line_codes attributes' do
+ stub_feature_flags(merge_ref_head_comments: false)
+
+ expect(subject.keys).not_to include(:positions, :line_codes)
+ end
+ end
end
end
diff --git a/spec/serializers/merge_request_basic_entity_spec.rb b/spec/serializers/merge_request_basic_entity_spec.rb
new file mode 100644
index 00000000000..53ba66a79ac
--- /dev/null
+++ b/spec/serializers/merge_request_basic_entity_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequestBasicEntity do
+ let(:resource) { build(:merge_request) }
+
+ subject do
+ described_class.new(resource).as_json
+ end
+
+ it 'has public_merge_status as merge_status' do
+ expect(resource).to receive(:public_merge_status).and_return('checking')
+
+ expect(subject[:merge_status]).to eq 'checking'
+ end
+end
diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
index 685abbf7e6c..9f96e5711a4 100644
--- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
@@ -19,6 +19,12 @@ describe MergeRequestPollCachedWidgetEntity do
is_expected.to include(:target_branch_sha)
end
+ it 'has public_merge_status as merge_status' do
+ expect(resource).to receive(:public_merge_status).and_return('checking')
+
+ expect(subject[:merge_status]).to eq 'checking'
+ end
+
describe 'diverged_commits_count' do
context 'when MR open and its diverging' do
it 'returns diverged commits count' do
diff --git a/spec/services/pod_logs/base_service_spec.rb b/spec/services/pod_logs/base_service_spec.rb
index fb53321352b..3ec5dc68c60 100644
--- a/spec/services/pod_logs/base_service_spec.rb
+++ b/spec/services/pod_logs/base_service_spec.rb
@@ -13,10 +13,16 @@ describe ::PodLogs::BaseService do
let(:container_name) { 'container-0' }
let(:params) { {} }
let(:raw_pods) do
- JSON.parse([
- kube_pod(name: pod_name),
- kube_pod(name: pod_name_2)
- ].to_json, object_class: OpenStruct)
+ [
+ {
+ name: pod_name,
+ container_names: %w(container-0-0 container-0-1)
+ },
+ {
+ name: pod_name_2,
+ container_names: %w(container-1-0 container-1-1)
+ }
+ ]
end
subject { described_class.new(cluster, namespace, params: params) }
@@ -99,19 +105,6 @@ describe ::PodLogs::BaseService do
end
end
- describe '#get_raw_pods' do
- let(:service) { create(:cluster_platform_kubernetes, :configured) }
-
- it 'returns success with passthrough k8s response' do
- stub_kubeclient_pods(namespace)
-
- result = subject.send(:get_raw_pods, {})
-
- expect(result[:status]).to eq(:success)
- expect(result[:raw_pods].first).to be_a(Kubeclient::Resource)
- end
- end
-
describe '#get_pod_names' do
it 'returns success with a list of pods' do
result = subject.send(:get_pod_names, raw_pods: raw_pods)
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
index 39aa910d878..e3efce1134b 100644
--- a/spec/services/pod_logs/elasticsearch_service_spec.rb
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -21,8 +21,63 @@ describe ::PodLogs::ElasticsearchService do
]
end
+ let(:raw_pods) do
+ [
+ {
+ name: pod_name,
+ container_names: [container_name, "#{container_name}-1"]
+ }
+ ]
+ end
+
subject { described_class.new(cluster, namespace, params: params) }
+ describe '#get_raw_pods' do
+ before do
+ create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
+ end
+
+ it 'returns success with elasticsearch response' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
+ .to receive(:pods)
+ .with(namespace)
+ .and_return(raw_pods)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:raw_pods]).to eq(raw_pods)
+ end
+
+ it 'returns an error when ES is unreachable' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(nil)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to connect to Elasticsearch')
+ end
+
+ it 'handles server errors from elasticsearch' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
+ .to receive(:pods)
+ .and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
+ end
+ end
+
describe '#check_times' do
context 'with start and end provided and valid' do
let(:params) do
@@ -168,7 +223,7 @@ describe ::PodLogs::ElasticsearchService do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
.with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor)
.and_return({ logs: expected_logs, cursor: expected_cursor })
@@ -195,7 +250,7 @@ describe ::PodLogs::ElasticsearchService do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
.and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
@@ -209,9 +264,9 @@ describe ::PodLogs::ElasticsearchService do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
- .and_raise(::Gitlab::Elasticsearch::Logs::InvalidCursor.new)
+ .and_raise(::Gitlab::Elasticsearch::Logs::Lines::InvalidCursor.new)
result = subject.send(:pod_logs, result_arg)
diff --git a/spec/services/pod_logs/kubernetes_service_spec.rb b/spec/services/pod_logs/kubernetes_service_spec.rb
index ff0554bbe5c..da89c7ee117 100644
--- a/spec/services/pod_logs/kubernetes_service_spec.rb
+++ b/spec/services/pod_logs/kubernetes_service_spec.rb
@@ -20,14 +20,36 @@ describe ::PodLogs::KubernetesService do
end
let(:raw_pods) do
- JSON.parse([
- kube_pod(name: pod_name),
- kube_pod(name: pod_name_2, container_name: container_name_2)
- ].to_json, object_class: OpenStruct)
+ [
+ {
+ name: pod_name,
+ container_names: [container_name, "#{container_name}-1"]
+ },
+ {
+ name: pod_name_2,
+ container_names: [container_name_2, "#{container_name_2}-1"]
+ }
+ ]
end
subject { described_class.new(cluster, namespace, params: params) }
+ describe '#get_raw_pods' do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ it 'returns success with passthrough k8s response' do
+ stub_kubeclient_pods(namespace)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:raw_pods]).to eq([{
+ name: 'kube-pod',
+ container_names: %w(container-0 container-0-1)
+ }])
+ end
+ end
+
describe '#pod_logs' do
let(:result_arg) do
{
@@ -233,7 +255,7 @@ describe ::PodLogs::KubernetesService do
end
it 'returns error if container_name was not specified and there are no containers on the pod' do
- raw_pods.first.spec.containers = []
+ raw_pods.first[:container_names] = []
result = subject.send(:check_container_name,
pod_name: pod_name,
diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb
index 140595e58ad..71eff23c77c 100644
--- a/spec/uploaders/records_uploads_spec.rb
+++ b/spec/uploaders/records_uploads_spec.rb
@@ -78,8 +78,7 @@ describe RecordsUploads do
path: File.join('uploads', 'rails_sample.jpg'),
size: 512.kilobytes,
model: build_stubbed(:user),
- uploader: uploader.class.to_s,
- store: ::ObjectStorage::Store::LOCAL
+ uploader: uploader.class.to_s
)
uploader.upload = existing
@@ -99,8 +98,7 @@ describe RecordsUploads do
path: File.join('uploads', 'rails_sample.jpg'),
size: 512.kilobytes,
model: project,
- uploader: uploader.class.to_s,
- store: ::ObjectStorage::Store::LOCAL
+ uploader: uploader.class.to_s
)
uploader.store!(upload_fixture('rails_sample.jpg'))
diff --git a/spec/workers/concerns/cronjob_queue_spec.rb b/spec/workers/concerns/cronjob_queue_spec.rb
index ea3b7bad2e1..0cea67bf116 100644
--- a/spec/workers/concerns/cronjob_queue_spec.rb
+++ b/spec/workers/concerns/cronjob_queue_spec.rb
@@ -14,6 +14,10 @@ describe CronjobQueue do
end
end
+ before do
+ stub_const("DummyWorker", worker)
+ end
+
it 'sets the queue name of a worker' do
expect(worker.sidekiq_options['queue'].to_s).to eq('cronjob:dummy')
end
@@ -29,4 +33,22 @@ describe CronjobQueue do
expect(worker_context[:root_namespace]).to be_nil
expect(worker_context[:project]).to be_nil
end
+
+ it 'gets scheduled with caller_id set to Cronjob' do
+ worker.perform_async
+
+ job = worker.jobs.last
+
+ expect(job).to include('meta.caller_id' => 'Cronjob')
+ end
+
+ it 'does not set the caller_id if there was already one in the context' do
+ Gitlab::ApplicationContext.with_context(caller_id: 'already set') do
+ worker.perform_async
+ end
+
+ job = worker.jobs.last
+
+ expect(job).to include('meta.caller_id' => 'already set')
+ end
end
diff --git a/yarn.lock b/yarn.lock
index 81bbf8a59e3..280bc27d183 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -786,10 +786,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.119.0.tgz#ff35c160b1e726f4f1702a4a152712fb669cde4e"
integrity sha512-bI+kewDLJy1N0//BpUPMx3h5AqG6lfIiV53lxQP9ttn8j/jhyxigZXq1wZ901PI4/ALv9IY1DOSSuLouK4sJsQ==
-"@gitlab/ui@11.2.1":
- version "11.2.1"
- resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-11.2.1.tgz#c031c92204eb2a6036ae3c7482387f66c75524fd"
- integrity sha512-AhDPYmrBtaW6Qu+inSjoMCWx+Ou3CVIhhGAEjyVsAG7rSOilevVMZui2IlSb6fPtLcXS8F78DTWjp3R1YgxxbQ==
+"@gitlab/ui@12.0.0":
+ version "12.0.0"
+ resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-12.0.0.tgz#595ebaaf3d4213dcd5a9707384e1c98ea7ba41b6"
+ integrity sha512-5c2zRivIkPgTZi3+kfF8vbP2lTrwTSN1pBP1NaPe74sYw8yEe5ULCjAWKikh3ROUmsRs7+u5A/Wh3j1qaZ2/VA==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0"