Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-05-22 12:08:09 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-05-22 12:08:09 +0300
commit4a3ba3e5f261eb09e6b2b4fd44373e7a1c454a72 (patch)
tree1a94467252ebcc5575c7de6a3590360ce05b9967
parent707c0eca50cf9a5290806ce637be30f4c7288def (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop.yml5
-rw-r--r--app/assets/javascripts/ide/components/repo_editor.vue15
-rw-r--r--app/assets/javascripts/ide/stores/actions/file.js4
-rw-r--r--app/assets/javascripts/ide/stores/utils.js4
-rw-r--r--app/assets/javascripts/ide/utils.js18
-rw-r--r--app/assets/stylesheets/page_bundles/ide.scss2
-rw-r--r--app/controllers/projects/artifacts_controller.rb12
-rw-r--r--app/helpers/application_settings_helper.rb2
-rw-r--r--app/models/application_setting.rb7
-rw-r--r--app/models/application_setting_implementation.rb4
-rw-r--r--app/models/board_group_recent_visit.rb2
-rw-r--r--app/models/board_project_recent_visit.rb2
-rw-r--r--app/models/ci/build.rb16
-rw-r--r--app/models/ci/job_artifact.rb4
-rw-r--r--app/models/ci/pipeline.rb4
-rw-r--r--app/models/ci/runner.rb4
-rw-r--r--app/models/clusters/applications/prometheus.rb2
-rw-r--r--app/models/commit_status.rb6
-rw-r--r--app/models/concerns/each_batch.rb2
-rw-r--r--app/models/concerns/has_status.rb2
-rw-r--r--app/models/concerns/noteable.rb2
-rw-r--r--app/models/concerns/prometheus_adapter.rb2
-rw-r--r--app/models/concerns/resolvable_note.rb4
-rw-r--r--app/models/deployment.rb2
-rw-r--r--app/models/environment.rb2
-rw-r--r--app/models/issue/metrics.rb4
-rw-r--r--app/models/jira_import_state.rb2
-rw-r--r--app/models/license_template.rb2
-rw-r--r--app/models/member.rb2
-rw-r--r--app/models/namespace.rb2
-rw-r--r--app/models/pages_domain.rb8
-rw-r--r--app/models/pages_domain_acme_order.rb2
-rw-r--r--app/models/project.rb6
-rw-r--r--app/models/prometheus_alert_event.rb6
-rw-r--r--app/models/remote_mirror.rb12
-rw-r--r--app/models/repository.rb2
-rw-r--r--app/models/route.rb2
-rw-r--r--app/models/self_managed_prometheus_alert_event.rb6
-rw-r--r--app/models/snippet_input_action.rb36
-rw-r--r--app/models/snippet_input_action_collection.rb25
-rw-r--r--app/models/todo.rb2
-rw-r--r--app/models/user.rb8
-rw-r--r--app/models/wiki_page/meta.rb2
-rw-r--r--app/models/wiki_page/slug.rb4
-rw-r--r--app/services/groups/transfer_service.rb9
-rw-r--r--app/services/projects/prometheus/alerts/create_events_service.rb8
-rw-r--r--app/services/snippets/base_service.rb33
-rw-r--r--app/services/snippets/create_service.rb31
-rw-r--r--app/services/snippets/update_service.rb2
-rw-r--r--app/services/spam/spam_constants.rb22
-rw-r--r--app/services/spam/spam_verdict_service.rb57
-rw-r--r--app/views/admin/application_settings/_spam.html.haml9
-rw-r--r--app/workers/incident_management/process_prometheus_alert_worker.rb16
-rw-r--r--changelogs/unreleased/118613-spam-api-call.yml5
-rw-r--r--changelogs/unreleased/exclude-server-fields-from-exceptions-log.yml5
-rw-r--r--changelogs/unreleased/fix-group-transfer-to-subgroup.yml5
-rw-r--r--changelogs/unreleased/fj-add-snippet-files-param-to-snippet-create-service.yml5
-rw-r--r--changelogs/unreleased/sh-fix-artifacts-download-404.yml5
-rw-r--r--db/migrate/20200508050301_add_spam_check_endpoint_to_application_settings.rb34
-rw-r--r--db/structure.sql4
-rw-r--r--doc/administration/logs.md11
-rw-r--r--doc/api/settings.md2
-rw-r--r--doc/ci/junit_test_reports.md8
-rw-r--r--doc/ci/large_repositories/index.md10
-rw-r--r--doc/ci/yaml/README.md37
-rw-r--r--doc/development/README.md45
-rw-r--r--doc/development/database/index.md48
-rw-r--r--doc/development/database/strings_and_the_text_data_type.md288
-rw-r--r--doc/development/migration_style_guide.md6
-rw-r--r--doc/install/requirements.md2
-rw-r--r--doc/user/markdown.md8
-rw-r--r--doc/user/snippets.md6
-rw-r--r--lib/api/search.rb3
-rw-r--r--lib/api/settings.rb4
-rw-r--r--lib/gitlab/alerting/alert.rb4
-rw-r--r--lib/gitlab/exception_log_formatter.rb2
-rw-r--r--locale/gitlab.pot36
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb18
-rw-r--r--spec/factories/alert_management/alerts.rb7
-rw-r--r--spec/features/admin/admin_settings_spec.rb8
-rw-r--r--spec/features/projects/artifacts/user_downloads_artifacts_spec.rb6
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js28
-rw-r--r--spec/frontend/ide/stores/utils_spec.js25
-rw-r--r--spec/frontend/ide/utils_spec.js43
-rw-r--r--spec/lib/gitlab/alerting/alert_spec.rb4
-rw-r--r--spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb45
-rw-r--r--spec/models/application_setting_spec.rb24
-rw-r--r--spec/models/broadcast_message_spec.rb6
-rw-r--r--spec/models/ci/build_spec.rb10
-rw-r--r--spec/models/ci/job_artifact_spec.rb4
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb2
-rw-r--r--spec/models/ci/pipeline_spec.rb2
-rw-r--r--spec/models/ci/runner_spec.rb2
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb8
-rw-r--r--spec/models/commit_status_spec.rb4
-rw-r--r--spec/models/concerns/each_batch_spec.rb2
-rw-r--r--spec/models/concerns/issuable_spec.rb6
-rw-r--r--spec/models/concerns/milestoneish_spec.rb4
-rw-r--r--spec/models/concerns/resolvable_discussion_spec.rb2
-rw-r--r--spec/models/concerns/sortable_spec.rb2
-rw-r--r--spec/models/deployment_spec.rb8
-rw-r--r--spec/models/environment_spec.rb2
-rw-r--r--spec/models/event_spec.rb6
-rw-r--r--spec/models/instance_configuration_spec.rb2
-rw-r--r--spec/models/issue/metrics_spec.rb10
-rw-r--r--spec/models/issue_spec.rb12
-rw-r--r--spec/models/iteration_spec.rb12
-rw-r--r--spec/models/jira_import_state_spec.rb2
-rw-r--r--spec/models/member_spec.rb6
-rw-r--r--spec/models/members/project_member_spec.rb2
-rw-r--r--spec/models/merge_request_diff_commit_spec.rb2
-rw-r--r--spec/models/merge_request_spec.rb2
-rw-r--r--spec/models/milestone_spec.rb32
-rw-r--r--spec/models/note_spec.rb2
-rw-r--r--spec/models/pages_domain_spec.rb8
-rw-r--r--spec/models/project_spec.rb4
-rw-r--r--spec/models/project_wiki_spec.rb4
-rw-r--r--spec/models/prometheus_alert_event_spec.rb4
-rw-r--r--spec/models/remote_mirror_spec.rb24
-rw-r--r--spec/models/repository_spec.rb14
-rw-r--r--spec/models/snippet_input_action_collection_spec.rb37
-rw-r--r--spec/models/snippet_input_action_spec.rb45
-rw-r--r--spec/models/todo_spec.rb2
-rw-r--r--spec/models/user_spec.rb8
-rw-r--r--spec/requests/api/settings_spec.rb17
-rw-r--r--spec/services/alert_management/create_alert_issue_service_spec.rb6
-rw-r--r--spec/services/groups/transfer_service_spec.rb9
-rw-r--r--spec/services/projects/prometheus/alerts/create_events_service_spec.rb2
-rw-r--r--spec/services/snippets/create_service_spec.rb59
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb245
-rw-r--r--spec/support/shared_contexts/spam_constants.rb1
-rw-r--r--spec/workers/incident_management/process_alert_worker_spec.rb6
-rw-r--r--spec/workers/incident_management/process_prometheus_alert_worker_spec.rb3
133 files changed, 1513 insertions, 379 deletions
diff --git a/.rubocop.yml b/.rubocop.yml
index c9e8e44bd7c..4dad1fb7a18 100644
--- a/.rubocop.yml
+++ b/.rubocop.yml
@@ -435,4 +435,7 @@ Rails/TimeZone:
- 'ee/app/services/**/*'
- 'ee/spec/controllers/**/*'
- 'ee/spec/services/**/*'
-
+ - 'app/models/**/*'
+ - 'spec/models/**/*'
+ - 'ee/app/models/**/*'
+ - 'ee/spec/models/**/*'
diff --git a/app/assets/javascripts/ide/components/repo_editor.vue b/app/assets/javascripts/ide/components/repo_editor.vue
index c72a8b2b0d0..2a8ed076ae2 100644
--- a/app/assets/javascripts/ide/components/repo_editor.vue
+++ b/app/assets/javascripts/ide/components/repo_editor.vue
@@ -14,6 +14,7 @@ import Editor from '../lib/editor';
import FileTemplatesBar from './file_templates/bar.vue';
import { __ } from '~/locale';
import { extractMarkdownImagesFromEntries } from '../stores/utils';
+import { addFinalNewline } from '../utils';
export default {
components: {
@@ -31,6 +32,7 @@ export default {
return {
content: '',
images: {},
+ addFinalNewline: true,
};
},
computed: {
@@ -247,13 +249,14 @@ export default {
this.model.onChange(model => {
const { file } = model;
+ if (!file.active) return;
- if (file.active) {
- this.changeFileContent({
- path: file.path,
- content: model.getModel().getValue(),
- });
- }
+ const monacoModel = model.getModel();
+ const content = monacoModel.getValue();
+ this.changeFileContent({
+ path: file.path,
+ content: this.addFinalNewline ? addFinalNewline(content, monacoModel.getEOL()) : content,
+ });
});
// Handle Cursor Position
diff --git a/app/assets/javascripts/ide/stores/actions/file.js b/app/assets/javascripts/ide/stores/actions/file.js
index da7d4a44bde..03146e8a315 100644
--- a/app/assets/javascripts/ide/stores/actions/file.js
+++ b/app/assets/javascripts/ide/stores/actions/file.js
@@ -4,7 +4,7 @@ import eventHub from '../../eventhub';
import service from '../../services';
import * as types from '../mutation_types';
import router from '../../ide_router';
-import { addFinalNewlineIfNeeded, setPageTitleForFile } from '../utils';
+import { setPageTitleForFile } from '../utils';
import { viewerTypes, stageKeys } from '../../constants';
export const closeFile = ({ commit, state, dispatch }, file) => {
@@ -152,7 +152,7 @@ export const changeFileContent = ({ commit, state, getters }, { path, content })
const file = state.entries[path];
commit(types.UPDATE_FILE_CONTENT, {
path,
- content: addFinalNewlineIfNeeded(content),
+ content,
});
const indexOfChangedFile = state.changedFiles.findIndex(f => f.path === path);
diff --git a/app/assets/javascripts/ide/stores/utils.js b/app/assets/javascripts/ide/stores/utils.js
index 56671142bd4..34148df821f 100644
--- a/app/assets/javascripts/ide/stores/utils.js
+++ b/app/assets/javascripts/ide/stores/utils.js
@@ -272,10 +272,6 @@ export const pathsAreEqual = (a, b) => {
return cleanA === cleanB;
};
-// if the contents of a file dont end with a newline, this function adds a newline
-export const addFinalNewlineIfNeeded = content =>
- content.charAt(content.length - 1) !== '\n' ? `${content}\n` : content;
-
export function extractMarkdownImagesFromEntries(mdFile, entries) {
/**
* Regex to identify an image tag in markdown, like:
diff --git a/app/assets/javascripts/ide/utils.js b/app/assets/javascripts/ide/utils.js
index 7573bfd963c..64be1f59887 100644
--- a/app/assets/javascripts/ide/utils.js
+++ b/app/assets/javascripts/ide/utils.js
@@ -76,3 +76,21 @@ export function registerLanguages(def, ...defs) {
}
export const otherSide = side => (side === SIDE_RIGHT ? SIDE_LEFT : SIDE_RIGHT);
+
+export function addFinalNewline(content, eol = '\n') {
+ return content.slice(-eol.length) !== eol ? `${content}${eol}` : content;
+}
+
+export function getPathParents(path) {
+ const pathComponents = path.split('/');
+ const paths = [];
+ while (pathComponents.length) {
+ pathComponents.pop();
+
+ let parentPath = pathComponents.join('/');
+ if (parentPath.startsWith('/')) parentPath = parentPath.slice(1);
+ if (parentPath) paths.push(parentPath);
+ }
+
+ return paths;
+}
diff --git a/app/assets/stylesheets/page_bundles/ide.scss b/app/assets/stylesheets/page_bundles/ide.scss
index 2de319b51bb..295af0311ae 100644
--- a/app/assets/stylesheets/page_bundles/ide.scss
+++ b/app/assets/stylesheets/page_bundles/ide.scss
@@ -899,7 +899,7 @@ $ide-commit-header-height: 48px;
@include ide-trace-view();
svg {
- --svg-status-bg: var(--ide-background, $white);
+ --svg-status-bg: var(--ide-background, #{$white});
}
.empty-state {
diff --git a/app/controllers/projects/artifacts_controller.rb b/app/controllers/projects/artifacts_controller.rb
index b8663bc59f2..fef3c6cf424 100644
--- a/app/controllers/projects/artifacts_controller.rb
+++ b/app/controllers/projects/artifacts_controller.rb
@@ -113,7 +113,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
def build
@build ||= begin
- build = build_from_id || build_from_ref
+ build = build_from_id || build_from_sha || build_from_ref
build&.present(current_user: current_user)
end
end
@@ -127,7 +127,8 @@ class Projects::ArtifactsController < Projects::ApplicationController
project.builds.find_by_id(params[:job_id]) if params[:job_id]
end
- def build_from_ref
+ def build_from_sha
+ return if params[:job].blank?
return unless @ref_name
commit = project.commit(@ref_name)
@@ -136,6 +137,13 @@ class Projects::ArtifactsController < Projects::ApplicationController
project.latest_successful_build_for_sha(params[:job], commit.id)
end
+ def build_from_ref
+ return if params[:job].blank?
+ return unless @ref_name
+
+ project.latest_successful_build_for_ref(params[:job], @ref_name)
+ end
+
def artifacts_file
@artifacts_file ||= build&.artifacts_file_for_type(params[:file_type] || :archive)
end
diff --git a/app/helpers/application_settings_helper.rb b/app/helpers/application_settings_helper.rb
index b9f0e3582df..d3bdac5be69 100644
--- a/app/helpers/application_settings_helper.rb
+++ b/app/helpers/application_settings_helper.rb
@@ -261,6 +261,8 @@ module ApplicationSettingsHelper
:sourcegraph_enabled,
:sourcegraph_url,
:sourcegraph_public_only,
+ :spam_check_endpoint_enabled,
+ :spam_check_endpoint_url,
:terminal_max_session_time,
:terms,
:throttle_authenticated_api_enabled,
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index b29d6731b08..e88d9ba4e63 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -301,6 +301,13 @@ class ApplicationSetting < ApplicationRecord
numericality: { greater_than: 0, less_than_or_equal_to: 10 },
if: :external_authorization_service_enabled
+ validates :spam_check_endpoint_url,
+ addressable_url: true, allow_blank: true
+
+ validates :spam_check_endpoint_url,
+ presence: true,
+ if: :spam_check_endpoint_enabled
+
validates :external_auth_client_key,
presence: true,
if: -> (setting) { setting.external_auth_client_cert.present? }
diff --git a/app/models/application_setting_implementation.rb b/app/models/application_setting_implementation.rb
index 221e4d5e0c6..69292fd11fb 100644
--- a/app/models/application_setting_implementation.rb
+++ b/app/models/application_setting_implementation.rb
@@ -115,6 +115,8 @@ module ApplicationSettingImplementation
sourcegraph_enabled: false,
sourcegraph_url: nil,
sourcegraph_public_only: true,
+ spam_check_endpoint_enabled: false,
+ spam_check_endpoint_url: nil,
minimum_password_length: DEFAULT_MINIMUM_PASSWORD_LENGTH,
namespace_storage_size_limit: 0,
terminal_max_session_time: 0,
@@ -151,7 +153,7 @@ module ApplicationSettingImplementation
snowplow_app_id: nil,
snowplow_iglu_registry_url: nil,
custom_http_clone_url_root: nil,
- productivity_analytics_start_date: Time.now,
+ productivity_analytics_start_date: Time.current,
snippet_size_limit: 50.megabytes
}
end
diff --git a/app/models/board_group_recent_visit.rb b/app/models/board_group_recent_visit.rb
index 2f1cd830791..979f0e1ab92 100644
--- a/app/models/board_group_recent_visit.rb
+++ b/app/models/board_group_recent_visit.rb
@@ -14,7 +14,7 @@ class BoardGroupRecentVisit < ApplicationRecord
def self.visited!(user, board)
visit = find_or_create_by(user: user, group: board.group, board: board)
- visit.touch if visit.updated_at < Time.now
+ visit.touch if visit.updated_at < Time.current
rescue ActiveRecord::RecordNotUnique
retry
end
diff --git a/app/models/board_project_recent_visit.rb b/app/models/board_project_recent_visit.rb
index 236d88e909c..509c8f97b83 100644
--- a/app/models/board_project_recent_visit.rb
+++ b/app/models/board_project_recent_visit.rb
@@ -14,7 +14,7 @@ class BoardProjectRecentVisit < ApplicationRecord
def self.visited!(user, board)
visit = find_or_create_by(user: user, project: board.project, board: board)
- visit.touch if visit.updated_at < Time.now
+ visit.touch if visit.updated_at < Time.current
rescue ActiveRecord::RecordNotUnique
retry
end
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 645b87ce68c..d6f6515ed23 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -137,8 +137,8 @@ module Ci
.includes(:metadata, :job_artifacts_metadata)
end
- scope :with_artifacts_not_expired, ->() { with_downloadable_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
- scope :with_expired_artifacts, ->() { with_downloadable_artifacts.where('artifacts_expire_at < ?', Time.now) }
+ scope :with_artifacts_not_expired, ->() { with_downloadable_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.current) }
+ scope :with_expired_artifacts, ->() { with_downloadable_artifacts.where('artifacts_expire_at < ?', Time.current) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + %i[manual]) }
scope :scheduled_actions, ->() { where(when: :delayed, status: COMPLETED_STATUSES + %i[scheduled]) }
@@ -259,7 +259,7 @@ module Ci
end
before_transition any => :waiting_for_resource do |build|
- build.waiting_for_resource_at = Time.now
+ build.waiting_for_resource_at = Time.current
end
before_transition on: :enqueue_waiting_for_resource do |build|
@@ -713,7 +713,7 @@ module Ci
end
def needs_touch?
- Time.now - updated_at > 15.minutes.to_i
+ Time.current - updated_at > 15.minutes.to_i
end
def valid_token?(token)
@@ -776,11 +776,11 @@ module Ci
end
def artifacts_expired?
- artifacts_expire_at && artifacts_expire_at < Time.now
+ artifacts_expire_at && artifacts_expire_at < Time.current
end
def artifacts_expire_in
- artifacts_expire_at - Time.now if artifacts_expire_at
+ artifacts_expire_at - Time.current if artifacts_expire_at
end
def artifacts_expire_in=(value)
@@ -993,7 +993,7 @@ module Ci
end
def update_erased!(user = nil)
- self.update(erased_by: user, erased_at: Time.now, artifacts_expire_at: nil)
+ self.update(erased_by: user, erased_at: Time.current, artifacts_expire_at: nil)
end
def unscoped_project
@@ -1026,7 +1026,7 @@ module Ci
end
def has_expiring_artifacts?
- artifacts_expire_at.present? && artifacts_expire_at > Time.now
+ artifacts_expire_at.present? && artifacts_expire_at > Time.current
end
def job_jwt_variables
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 9aa8526a9cb..e764bdd9133 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -148,7 +148,7 @@ module Ci
where(file_type: types)
end
- scope :expired, -> (limit) { where('expire_at < ?', Time.now).limit(limit) }
+ scope :expired, -> (limit) { where('expire_at < ?', Time.current).limit(limit) }
scope :locked, -> { where(locked: true) }
scope :unlocked, -> { where(locked: [false, nil]) }
@@ -244,7 +244,7 @@ module Ci
end
def expire_in
- expire_at - Time.now if expire_at
+ expire_at - Time.current if expire_at
end
def expire_in=(value)
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 5db1635f64d..2b072b22454 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -163,11 +163,11 @@ module Ci
# Create a separate worker for each new operation
before_transition [:created, :waiting_for_resource, :preparing, :pending] => :running do |pipeline|
- pipeline.started_at = Time.now
+ pipeline.started_at = Time.current
end
before_transition any => [:success, :failed, :canceled] do |pipeline|
- pipeline.finished_at = Time.now
+ pipeline.finished_at = Time.current
pipeline.update_duration
end
diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb
index d4e9217ff9f..03ff952b435 100644
--- a/app/models/ci/runner.rb
+++ b/app/models/ci/runner.rb
@@ -273,7 +273,7 @@ module Ci
def update_cached_info(values)
values = values&.slice(:version, :revision, :platform, :architecture, :ip_address) || {}
- values[:contacted_at] = Time.now
+ values[:contacted_at] = Time.current
cache_attributes(values)
@@ -309,7 +309,7 @@ module Ci
real_contacted_at = read_attribute(:contacted_at)
real_contacted_at.nil? ||
- (Time.now - real_contacted_at) >= contacted_at_max_age
+ (Time.current - real_contacted_at) >= contacted_at_max_age
end
def tag_constraints
diff --git a/app/models/clusters/applications/prometheus.rb b/app/models/clusters/applications/prometheus.rb
index 3183318690c..46ca0d13b65 100644
--- a/app/models/clusters/applications/prometheus.rb
+++ b/app/models/clusters/applications/prometheus.rb
@@ -37,7 +37,7 @@ module Clusters
end
after_transition any => :updating do |application|
- application.update(last_update_started_at: Time.now)
+ application.update(last_update_started_at: Time.current)
end
end
diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb
index 8f152280b51..5622a53bb5d 100644
--- a/app/models/commit_status.rb
+++ b/app/models/commit_status.rb
@@ -136,15 +136,15 @@ class CommitStatus < ApplicationRecord
end
before_transition [:created, :waiting_for_resource, :preparing, :skipped, :manual, :scheduled] => :pending do |commit_status|
- commit_status.queued_at = Time.now
+ commit_status.queued_at = Time.current
end
before_transition [:created, :preparing, :pending] => :running do |commit_status|
- commit_status.started_at = Time.now
+ commit_status.started_at = Time.current
end
before_transition any => [:success, :failed, :canceled] do |commit_status|
- commit_status.finished_at = Time.now
+ commit_status.finished_at = Time.current
end
before_transition any => :failed do |commit_status, transition|
diff --git a/app/models/concerns/each_batch.rb b/app/models/concerns/each_batch.rb
index 6314b46a7e3..af5f4e30d06 100644
--- a/app/models/concerns/each_batch.rb
+++ b/app/models/concerns/each_batch.rb
@@ -17,7 +17,7 @@ module EachBatch
# Example:
#
# User.each_batch do |relation|
- # relation.update_all(updated_at: Time.now)
+ # relation.update_all(updated_at: Time.current)
# end
#
# The supplied block is also passed an optional batch index:
diff --git a/app/models/concerns/has_status.rb b/app/models/concerns/has_status.rb
index b80f8c2bbb2..904de3878e2 100644
--- a/app/models/concerns/has_status.rb
+++ b/app/models/concerns/has_status.rb
@@ -160,7 +160,7 @@ module HasStatus
if started_at && finished_at
finished_at - started_at
elsif started_at
- Time.now - started_at
+ Time.current - started_at
end
end
end
diff --git a/app/models/concerns/noteable.rb b/app/models/concerns/noteable.rb
index 933a0b167e2..183b902dd37 100644
--- a/app/models/concerns/noteable.rb
+++ b/app/models/concerns/noteable.rb
@@ -24,7 +24,7 @@ module Noteable
# The timestamp of the note (e.g. the :created_at or :updated_at attribute if provided via
# API call)
def system_note_timestamp
- @system_note_timestamp || Time.now # rubocop:disable Gitlab/ModuleWithInstanceVariables
+ @system_note_timestamp || Time.current # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
attr_writer :system_note_timestamp
diff --git a/app/models/concerns/prometheus_adapter.rb b/app/models/concerns/prometheus_adapter.rb
index 761a151a474..adb6a59e11c 100644
--- a/app/models/concerns/prometheus_adapter.rb
+++ b/app/models/concerns/prometheus_adapter.rb
@@ -44,7 +44,7 @@ module PrometheusAdapter
{
success: true,
data: data,
- last_update: Time.now.utc
+ last_update: Time.current.utc
}
rescue Gitlab::PrometheusClient::Error => err
{ success: false, result: err.message }
diff --git a/app/models/concerns/resolvable_note.rb b/app/models/concerns/resolvable_note.rb
index 2d2d5fb7168..4e8a1bb643e 100644
--- a/app/models/concerns/resolvable_note.rb
+++ b/app/models/concerns/resolvable_note.rb
@@ -23,7 +23,7 @@ module ResolvableNote
class_methods do
# This method must be kept in sync with `#resolve!`
def resolve!(current_user)
- unresolved.update_all(resolved_at: Time.now, resolved_by_id: current_user.id)
+ unresolved.update_all(resolved_at: Time.current, resolved_by_id: current_user.id)
end
# This method must be kept in sync with `#unresolve!`
@@ -57,7 +57,7 @@ module ResolvableNote
return false unless resolvable?
return false if resolved?
- self.resolved_at = Time.now
+ self.resolved_at = Time.current
self.resolved_by = current_user
self.resolved_by_push = resolved_by_push
diff --git a/app/models/deployment.rb b/app/models/deployment.rb
index ba65acff7f3..aa3e3a8f66d 100644
--- a/app/models/deployment.rb
+++ b/app/models/deployment.rb
@@ -64,7 +64,7 @@ class Deployment < ApplicationRecord
end
before_transition any => [:success, :failed, :canceled] do |deployment|
- deployment.finished_at = Time.now
+ deployment.finished_at = Time.current
end
after_transition any => :success do |deployment|
diff --git a/app/models/environment.rb b/app/models/environment.rb
index 21044771bbb..8dae2d760f5 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -339,7 +339,7 @@ class Environment < ApplicationRecord
end
def auto_stop_in
- auto_stop_at - Time.now if auto_stop_at
+ auto_stop_at - Time.current if auto_stop_at
end
def auto_stop_in=(value)
diff --git a/app/models/issue/metrics.rb b/app/models/issue/metrics.rb
index d4e51dcfbca..a5e1957c096 100644
--- a/app/models/issue/metrics.rb
+++ b/app/models/issue/metrics.rb
@@ -11,11 +11,11 @@ class Issue::Metrics < ApplicationRecord
def record!
if issue.milestone_id.present? && self.first_associated_with_milestone_at.blank?
- self.first_associated_with_milestone_at = Time.now
+ self.first_associated_with_milestone_at = Time.current
end
if issue_assigned_to_list_label? && self.first_added_to_board_at.blank?
- self.first_added_to_board_at = Time.now
+ self.first_added_to_board_at = Time.current
end
self.save
diff --git a/app/models/jira_import_state.rb b/app/models/jira_import_state.rb
index 92147794e88..9737ebcf8fa 100644
--- a/app/models/jira_import_state.rb
+++ b/app/models/jira_import_state.rb
@@ -47,7 +47,7 @@ class JiraImportState < ApplicationRecord
after_transition initial: :scheduled do |state, _|
state.run_after_commit do
job_id = Gitlab::JiraImport::Stage::StartImportWorker.perform_async(project.id)
- state.update(jid: job_id, scheduled_at: Time.now) if job_id
+ state.update(jid: job_id, scheduled_at: Time.current) if job_id
end
end
diff --git a/app/models/license_template.rb b/app/models/license_template.rb
index 73e403f98b4..bd24259984b 100644
--- a/app/models/license_template.rb
+++ b/app/models/license_template.rb
@@ -39,7 +39,7 @@ class LicenseTemplate
end
# Populate placeholders in the LicenseTemplate content
- def resolve!(project_name: nil, fullname: nil, year: Time.now.year.to_s)
+ def resolve!(project_name: nil, fullname: nil, year: Time.current.year.to_s)
# Ensure the string isn't shared with any other instance of LicenseTemplate
new_content = content.dup
new_content.gsub!(YEAR_TEMPLATE_REGEX, year) if year.present?
diff --git a/app/models/member.rb b/app/models/member.rb
index 791073da095..f2926d32d47 100644
--- a/app/models/member.rb
+++ b/app/models/member.rb
@@ -320,7 +320,7 @@ class Member < ApplicationRecord
return false unless invite?
self.invite_token = nil
- self.invite_accepted_at = Time.now.utc
+ self.invite_accepted_at = Time.current.utc
self.user = new_user
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index 8116f7a256f..f82edc72794 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -277,7 +277,7 @@ class Namespace < ApplicationRecord
end
def has_parent?
- parent.present?
+ parent_id.present? || parent.present?
end
def root_ancestor
diff --git a/app/models/pages_domain.rb b/app/models/pages_domain.rb
index da5e4012f05..856496f0941 100644
--- a/app/models/pages_domain.rb
+++ b/app/models/pages_domain.rb
@@ -49,11 +49,11 @@ class PagesDomain < ApplicationRecord
after_update :update_daemon, if: :saved_change_to_pages_config?
after_destroy :update_daemon
- scope :enabled, -> { where('enabled_until >= ?', Time.now ) }
+ scope :enabled, -> { where('enabled_until >= ?', Time.current ) }
scope :needs_verification, -> do
verified_at = arel_table[:verified_at]
enabled_until = arel_table[:enabled_until]
- threshold = Time.now + VERIFICATION_THRESHOLD
+ threshold = Time.current + VERIFICATION_THRESHOLD
where(verified_at.eq(nil).or(enabled_until.eq(nil).or(enabled_until.lt(threshold))))
end
@@ -69,7 +69,7 @@ class PagesDomain < ApplicationRecord
from_union([user_provided, certificate_not_valid, certificate_expiring])
end
- scope :for_removal, -> { where("remove_at < ?", Time.now) }
+ scope :for_removal, -> { where("remove_at < ?", Time.current) }
scope :with_logging_info, -> { includes(project: [:namespace, :route]) }
@@ -141,7 +141,7 @@ class PagesDomain < ApplicationRecord
def expired?
return false unless x509
- current = Time.new
+ current = Time.current
current < x509.not_before || x509.not_after < current
end
diff --git a/app/models/pages_domain_acme_order.rb b/app/models/pages_domain_acme_order.rb
index 63d7fbc8206..411456cc237 100644
--- a/app/models/pages_domain_acme_order.rb
+++ b/app/models/pages_domain_acme_order.rb
@@ -3,7 +3,7 @@
class PagesDomainAcmeOrder < ApplicationRecord
belongs_to :pages_domain
- scope :expired, -> { where("expires_at < ?", Time.now) }
+ scope :expired, -> { where("expires_at < ?", Time.current) }
validates :pages_domain, presence: true
validates :expires_at, presence: true
diff --git a/app/models/project.rb b/app/models/project.rb
index c9e6a8a49e8..c657e76c86f 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -506,6 +506,10 @@ class Project < ApplicationRecord
left_outer_joins(:pages_metadatum)
.where(project_pages_metadata: { project_id: nil })
end
+ scope :with_api_entity_associations, -> {
+ preload(:project_feature, :route, :tags,
+ group: :ip_restrictions, namespace: [:route, :owner])
+ }
enum auto_cancel_pending_pipelines: { disabled: 0, enabled: 1 }
@@ -1036,7 +1040,7 @@ class Project < ApplicationRecord
remote_mirrors.stuck.update_all(
update_status: :failed,
last_error: _('The remote mirror took to long to complete.'),
- last_update_at: Time.now
+ last_update_at: Time.current
)
end
diff --git a/app/models/prometheus_alert_event.rb b/app/models/prometheus_alert_event.rb
index 7e61f6d5e3c..25f58a0b9d5 100644
--- a/app/models/prometheus_alert_event.rb
+++ b/app/models/prometheus_alert_event.rb
@@ -34,10 +34,4 @@ class PrometheusAlertEvent < ApplicationRecord
def self.status_value_for(name)
state_machines[:status].states[name].value
end
-
- def self.payload_key_for(gitlab_alert_id, started_at)
- plain = [gitlab_alert_id, started_at].join('/')
-
- Digest::SHA1.hexdigest(plain)
- end
end
diff --git a/app/models/remote_mirror.rb b/app/models/remote_mirror.rb
index 8e7612e63c8..8b15d481c1b 100644
--- a/app/models/remote_mirror.rb
+++ b/app/models/remote_mirror.rb
@@ -68,13 +68,13 @@ class RemoteMirror < ApplicationRecord
after_transition any => :started do |remote_mirror, _|
Gitlab::Metrics.add_event(:remote_mirrors_running)
- remote_mirror.update(last_update_started_at: Time.now)
+ remote_mirror.update(last_update_started_at: Time.current)
end
after_transition started: :finished do |remote_mirror, _|
Gitlab::Metrics.add_event(:remote_mirrors_finished)
- timestamp = Time.now
+ timestamp = Time.current
remote_mirror.update!(
last_update_at: timestamp,
last_successful_update_at: timestamp,
@@ -86,7 +86,7 @@ class RemoteMirror < ApplicationRecord
after_transition started: :failed do |remote_mirror|
Gitlab::Metrics.add_event(:remote_mirrors_failed)
- remote_mirror.update(last_update_at: Time.now)
+ remote_mirror.update(last_update_at: Time.current)
remote_mirror.run_after_commit do
RemoteMirrorNotificationWorker.perform_async(remote_mirror.id)
@@ -144,9 +144,9 @@ class RemoteMirror < ApplicationRecord
return unless sync?
if recently_scheduled?
- RepositoryUpdateRemoteMirrorWorker.perform_in(backoff_delay, self.id, Time.now)
+ RepositoryUpdateRemoteMirrorWorker.perform_in(backoff_delay, self.id, Time.current)
else
- RepositoryUpdateRemoteMirrorWorker.perform_async(self.id, Time.now)
+ RepositoryUpdateRemoteMirrorWorker.perform_async(self.id, Time.current)
end
end
@@ -261,7 +261,7 @@ class RemoteMirror < ApplicationRecord
def recently_scheduled?
return false unless self.last_update_started_at
- self.last_update_started_at >= Time.now - backoff_delay
+ self.last_update_started_at >= Time.current - backoff_delay
end
def reset_fields
diff --git a/app/models/repository.rb b/app/models/repository.rb
index 2673033ff1f..16b4fdabf48 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -1171,7 +1171,7 @@ class Repository
if target
target.committed_date
else
- Time.now
+ Time.current
end
end
end
diff --git a/app/models/route.rb b/app/models/route.rb
index 63a0461807b..706589e79b8 100644
--- a/app/models/route.rb
+++ b/app/models/route.rb
@@ -42,7 +42,7 @@ class Route < ApplicationRecord
old_path = route.path
# Callbacks must be run manually
- route.update_columns(attributes.merge(updated_at: Time.now))
+ route.update_columns(attributes.merge(updated_at: Time.current))
# We are not calling route.delete_conflicting_redirects here, in hopes
# of avoiding deadlocks. The parent (self, in this method) already
diff --git a/app/models/self_managed_prometheus_alert_event.rb b/app/models/self_managed_prometheus_alert_event.rb
index d2d4a5c37d4..cf26563e92d 100644
--- a/app/models/self_managed_prometheus_alert_event.rb
+++ b/app/models/self_managed_prometheus_alert_event.rb
@@ -15,10 +15,4 @@ class SelfManagedPrometheusAlertEvent < ApplicationRecord
yield event if block_given?
end
end
-
- def self.payload_key_for(started_at, alert_name, query_expression)
- plain = [started_at, alert_name, query_expression].join('/')
-
- Digest::SHA1.hexdigest(plain)
- end
end
diff --git a/app/models/snippet_input_action.rb b/app/models/snippet_input_action.rb
new file mode 100644
index 00000000000..50de1b0c04e
--- /dev/null
+++ b/app/models/snippet_input_action.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+class SnippetInputAction
+ include ActiveModel::Validations
+
+ ACTIONS = %w[create update delete move].freeze
+
+ ACTIONS.each do |action_const|
+ define_method "#{action_const}_action?" do
+ action == action_const
+ end
+ end
+
+ attr_reader :action, :previous_path, :file_path, :content
+
+ validates :action, inclusion: { in: ACTIONS, message: "%{value} is not a valid action" }
+ validates :previous_path, presence: true, if: :move_action?
+ validates :file_path, presence: true
+ validates :content, presence: true, if: :create_action?
+
+ def initialize(action: nil, previous_path: nil, file_path: nil, content: nil)
+ @action = action
+ @previous_path = previous_path
+ @file_path = file_path
+ @content = content
+ end
+
+ def to_commit_action
+ {
+ action: action&.to_sym,
+ previous_path: previous_path,
+ file_path: file_path,
+ content: content
+ }
+ end
+end
diff --git a/app/models/snippet_input_action_collection.rb b/app/models/snippet_input_action_collection.rb
new file mode 100644
index 00000000000..bf20bce5b0a
--- /dev/null
+++ b/app/models/snippet_input_action_collection.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+class SnippetInputActionCollection
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :actions
+
+ delegate :empty?, to: :actions
+
+ def initialize(actions = [])
+ @actions = actions.map { |action| SnippetInputAction.new(action) }
+ end
+
+ def to_commit_actions
+ strong_memoize(:commit_actions) do
+ actions.map { |action| action.to_commit_action }
+ end
+ end
+
+ def valid?
+ strong_memoize(:valid) do
+ actions.all?(&:valid?)
+ end
+ end
+end
diff --git a/app/models/todo.rb b/app/models/todo.rb
index dc42551f0ab..b5ca2ec7466 100644
--- a/app/models/todo.rb
+++ b/app/models/todo.rb
@@ -110,7 +110,7 @@ class Todo < ApplicationRecord
base = where.not(state: new_state).except(:order)
ids = base.pluck(:id)
- base.update_all(state: new_state, updated_at: Time.now)
+ base.update_all(state: new_state, updated_at: Time.current)
ids
end
diff --git a/app/models/user.rb b/app/models/user.rb
index b2d3978551e..1ba5b9cdf71 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -688,7 +688,7 @@ class User < ApplicationRecord
@reset_token, enc = Devise.token_generator.generate(self.class, :reset_password_token)
self.reset_password_token = enc
- self.reset_password_sent_at = Time.now.utc
+ self.reset_password_sent_at = Time.current.utc
@reset_token
end
@@ -1126,7 +1126,7 @@ class User < ApplicationRecord
if !Gitlab.config.ldap.enabled
false
elsif ldap_user?
- !last_credential_check_at || (last_credential_check_at + ldap_sync_time) < Time.now
+ !last_credential_check_at || (last_credential_check_at + ldap_sync_time) < Time.current
else
false
end
@@ -1373,7 +1373,7 @@ class User < ApplicationRecord
def contributed_projects
events = Event.select(:project_id)
.contributions.where(author_id: self)
- .where("created_at > ?", Time.now - 1.year)
+ .where("created_at > ?", Time.current - 1.year)
.distinct
.reorder(nil)
@@ -1646,7 +1646,7 @@ class User < ApplicationRecord
end
def password_expired?
- !!(password_expires_at && password_expires_at < Time.now)
+ !!(password_expires_at && password_expires_at < Time.current)
end
def can_be_deactivated?
diff --git a/app/models/wiki_page/meta.rb b/app/models/wiki_page/meta.rb
index 474968122b1..215d84dc463 100644
--- a/app/models/wiki_page/meta.rb
+++ b/app/models/wiki_page/meta.rb
@@ -120,7 +120,7 @@ class WikiPage
end
def insert_slugs(strings, is_new, canonical_slug)
- creation = Time.now.utc
+ creation = Time.current.utc
slug_attrs = strings.map do |slug|
{
diff --git a/app/models/wiki_page/slug.rb b/app/models/wiki_page/slug.rb
index 246fa8d6442..c1725d34921 100644
--- a/app/models/wiki_page/slug.rb
+++ b/app/models/wiki_page/slug.rb
@@ -16,11 +16,11 @@ class WikiPage
scope :canonical, -> { where(canonical: true) }
def update_columns(attrs = {})
- super(attrs.reverse_merge(updated_at: Time.now.utc))
+ super(attrs.reverse_merge(updated_at: Time.current.utc))
end
def self.update_all(attrs = {})
- super(attrs.reverse_merge(updated_at: Time.now.utc))
+ super(attrs.reverse_merge(updated_at: Time.current.utc))
end
end
end
diff --git a/app/services/groups/transfer_service.rb b/app/services/groups/transfer_service.rb
index fe3ab884302..3f4de659c78 100644
--- a/app/services/groups/transfer_service.rb
+++ b/app/services/groups/transfer_service.rb
@@ -45,6 +45,7 @@ module Groups
raise_transfer_error(:invalid_policies) unless valid_policies?
raise_transfer_error(:namespace_with_same_path) if namespace_with_same_path?
raise_transfer_error(:group_contains_images) if group_projects_contain_registry_images?
+ raise_transfer_error(:cannot_transfer_to_subgroup) if transfer_to_subgroup?
end
def group_is_already_root?
@@ -55,6 +56,11 @@ module Groups
@new_parent_group && @new_parent_group.id == @group.parent_id
end
+ def transfer_to_subgroup?
+ @new_parent_group && \
+ @group.self_and_descendants.pluck_primary_key.include?(@new_parent_group.id)
+ end
+
def valid_policies?
return false unless can?(current_user, :admin_group, @group)
@@ -125,7 +131,8 @@ module Groups
group_is_already_root: s_('TransferGroup|Group is already a root group.'),
same_parent_as_current: s_('TransferGroup|Group is already associated to the parent group.'),
invalid_policies: s_("TransferGroup|You don't have enough permissions."),
- group_contains_images: s_('TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again.')
+ group_contains_images: s_('TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again.'),
+ cannot_transfer_to_subgroup: s_('TransferGroup|Cannot transfer group to one of its subgroup.')
}.freeze
end
end
diff --git a/app/services/projects/prometheus/alerts/create_events_service.rb b/app/services/projects/prometheus/alerts/create_events_service.rb
index a29240947ff..4fcf841314b 100644
--- a/app/services/projects/prometheus/alerts/create_events_service.rb
+++ b/app/services/projects/prometheus/alerts/create_events_service.rb
@@ -40,17 +40,13 @@ module Projects
def create_managed_prometheus_alert_event(parsed_alert)
alert = find_alert(parsed_alert.metric_id)
- payload_key = PrometheusAlertEvent.payload_key_for(parsed_alert.metric_id, parsed_alert.starts_at_raw)
-
- event = PrometheusAlertEvent.find_or_initialize_by_payload_key(parsed_alert.project, alert, payload_key)
+ event = PrometheusAlertEvent.find_or_initialize_by_payload_key(parsed_alert.project, alert, parsed_alert.gitlab_fingerprint)
set_status(parsed_alert, event)
end
def create_self_managed_prometheus_alert_event(parsed_alert)
- payload_key = SelfManagedPrometheusAlertEvent.payload_key_for(parsed_alert.starts_at_raw, parsed_alert.title, parsed_alert.full_query)
-
- event = SelfManagedPrometheusAlertEvent.find_or_initialize_by_payload_key(parsed_alert.project, payload_key) do |event|
+ event = SelfManagedPrometheusAlertEvent.find_or_initialize_by_payload_key(parsed_alert.project, parsed_alert.gitlab_fingerprint) do |event|
event.environment = parsed_alert.environment
event.title = parsed_alert.title
event.query_expression = parsed_alert.full_query
diff --git a/app/services/snippets/base_service.rb b/app/services/snippets/base_service.rb
index 81d12997335..60ba9ff6683 100644
--- a/app/services/snippets/base_service.rb
+++ b/app/services/snippets/base_service.rb
@@ -6,12 +6,13 @@ module Snippets
CreateRepositoryError = Class.new(StandardError)
- attr_reader :uploaded_files
+ attr_reader :uploaded_assets, :snippet_files
def initialize(project, user = nil, params = {})
super
- @uploaded_files = Array(@params.delete(:files).presence)
+ @uploaded_assets = Array(@params.delete(:files).presence)
+ @snippet_files = SnippetInputActionCollection.new(Array(@params.delete(:snippet_files).presence))
filter_spam_check_params
end
@@ -22,12 +23,30 @@ module Snippets
Gitlab::VisibilityLevel.allowed_for?(current_user, visibility_level)
end
- def error_forbidden_visibility(snippet)
+ def forbidden_visibility_error(snippet)
deny_visibility_level(snippet)
snippet_error_response(snippet, 403)
end
+ def valid_params?
+ return true if snippet_files.empty?
+
+ (params.keys & [:content, :file_name]).none? && snippet_files.valid?
+ end
+
+ def invalid_params_error(snippet)
+ if snippet_files.valid?
+ [:content, :file_name].each do |key|
+ snippet.errors.add(key, 'and snippet files cannot be used together') if params.key?(key)
+ end
+ else
+ snippet.errors.add(:snippet_files, 'have invalid data')
+ end
+
+ snippet_error_response(snippet, 403)
+ end
+
def snippet_error_response(snippet, http_status)
ServiceResponse.error(
message: snippet.errors.full_messages.to_sentence,
@@ -52,5 +71,13 @@ module Snippets
message
end
+
+ def files_to_commit
+ snippet_files.to_commit_actions.presence || build_actions_from_params
+ end
+
+ def build_actions_from_params
+ raise NotImplementedError
+ end
end
end
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
index ed6da3a0ad0..b3af2d91e5c 100644
--- a/app/services/snippets/create_service.rb
+++ b/app/services/snippets/create_service.rb
@@ -5,8 +5,10 @@ module Snippets
def execute
@snippet = build_from_params
+ return invalid_params_error(@snippet) unless valid_params?
+
unless visibility_allowed?(@snippet, @snippet.visibility_level)
- return error_forbidden_visibility(@snippet)
+ return forbidden_visibility_error(@snippet)
end
@snippet.author = current_user
@@ -29,12 +31,23 @@ module Snippets
def build_from_params
if project
- project.snippets.build(params)
+ project.snippets.build(create_params)
else
- PersonalSnippet.new(params)
+ PersonalSnippet.new(create_params)
end
end
+ # If the snippet_files param is present
+ # we need to fill content and file_name from
+ # the model
+ def create_params
+ return params if snippet_files.empty?
+
+ first_file = snippet_files.actions.first
+
+ params.merge(content: first_file.content, file_name: first_file.file_path)
+ end
+
def save_and_commit
snippet_saved = @snippet.save
@@ -75,19 +88,19 @@ module Snippets
message: 'Initial commit'
}
- @snippet.snippet_repository.multi_files_action(current_user, snippet_files, commit_attrs)
- end
-
- def snippet_files
- [{ file_path: params[:file_name], content: params[:content] }]
+ @snippet.snippet_repository.multi_files_action(current_user, files_to_commit, commit_attrs)
end
def move_temporary_files
return unless @snippet.is_a?(PersonalSnippet)
- uploaded_files.each do |file|
+ uploaded_assets.each do |file|
FileMover.new(file, from_model: current_user, to_model: @snippet).execute
end
end
+
+ def build_actions_from_params
+ [{ file_path: params[:file_name], content: params[:content] }]
+ end
end
end
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
index 2dc9266dbd0..45b047af3d0 100644
--- a/app/services/snippets/update_service.rb
+++ b/app/services/snippets/update_service.rb
@@ -8,7 +8,7 @@ module Snippets
def execute(snippet)
if visibility_changed?(snippet) && !visibility_allowed?(snippet, visibility_level)
- return error_forbidden_visibility(snippet)
+ return forbidden_visibility_error(snippet)
end
snippet.assign_attributes(params)
diff --git a/app/services/spam/spam_constants.rb b/app/services/spam/spam_constants.rb
index 085bac684c4..26526daa313 100644
--- a/app/services/spam/spam_constants.rb
+++ b/app/services/spam/spam_constants.rb
@@ -2,8 +2,24 @@
module Spam
module SpamConstants
- REQUIRE_RECAPTCHA = :recaptcha
- DISALLOW = :disallow
- ALLOW = :allow
+ REQUIRE_RECAPTCHA = "recaptcha"
+ DISALLOW = "disallow"
+ ALLOW = "allow"
+ BLOCK_USER = "block"
+
+ SUPPORTED_VERDICTS = {
+ BLOCK_USER => {
+ priority: 1
+ },
+ DISALLOW => {
+ priority: 2
+ },
+ REQUIRE_RECAPTCHA => {
+ priority: 3
+ },
+ ALLOW => {
+ priority: 4
+ }
+ }.freeze
end
end
diff --git a/app/services/spam/spam_verdict_service.rb b/app/services/spam/spam_verdict_service.rb
index 2b4d5f4a984..ef026d7f9af 100644
--- a/app/services/spam/spam_verdict_service.rb
+++ b/app/services/spam/spam_verdict_service.rb
@@ -5,13 +5,31 @@ module Spam
include AkismetMethods
include SpamConstants
- def initialize(target:, request:, options:)
+ def initialize(target:, request:, options:, verdict_params: {})
@target = target
@request = request
@options = options
+ @verdict_params = assemble_verdict_params(verdict_params)
end
def execute
+ external_spam_check_result = spam_verdict
+ akismet_result = akismet_verdict
+
+ # filter out anything we don't recognise, including nils.
+ valid_results = [external_spam_check_result, akismet_result].compact.select { |r| SUPPORTED_VERDICTS.key?(r) }
+ # Treat nils - such as service unavailable - as ALLOW
+ return ALLOW unless valid_results.any?
+
+ # Favour the most restrictive result.
+ valid_results.min_by { |v| SUPPORTED_VERDICTS[v][:priority] }
+ end
+
+ private
+
+ attr_reader :target, :request, :options, :verdict_params
+
+ def akismet_verdict
if akismet.spam?
Gitlab::Recaptcha.enabled? ? REQUIRE_RECAPTCHA : DISALLOW
else
@@ -19,8 +37,41 @@ module Spam
end
end
- private
+ def spam_verdict
+ return unless Gitlab::CurrentSettings.spam_check_endpoint_enabled
+ return if endpoint_url.blank?
+
+ result = Gitlab::HTTP.try_get(endpoint_url, verdict_params)
+ return unless result
+
+ begin
+ json_result = Gitlab::Json.parse(result).with_indifferent_access
+ # @TODO metrics/logging
+ # Expecting:
+ # error: (string or nil)
+ # result: (string or nil)
+ verdict = json_result[:verdict]
+ return unless SUPPORTED_VERDICTS.include?(verdict)
- attr_reader :target, :request, :options
+ # @TODO log if json_result[:error]
+
+ json_result[:verdict]
+ rescue
+ # @TODO log
+ ALLOW
+ end
+ end
+
+ def assemble_verdict_params(params)
+ return {} unless endpoint_url
+
+ params.merge({
+ user_id: target.author_id
+ })
+ end
+
+ def endpoint_url
+ @endpoint_url ||= Gitlab::CurrentSettings.current_application_settings.spam_check_endpoint_url
+ end
end
end
diff --git a/app/views/admin/application_settings/_spam.html.haml b/app/views/admin/application_settings/_spam.html.haml
index f0a19075115..ab9368e723e 100644
--- a/app/views/admin/application_settings/_spam.html.haml
+++ b/app/views/admin/application_settings/_spam.html.haml
@@ -62,4 +62,13 @@
.form-text.text-muted
How many seconds an IP will be counted towards the limit
+ .form-group
+ .form-check
+ = f.check_box :spam_check_endpoint_enabled, class: 'form-check-input'
+ = f.label :spam_check_endpoint_enabled, _('Enable Spam Check via external API endpoint'), class: 'form-check-label'
+ .form-text.text-muted= _('Define custom rules for what constitutes spam, independent of Akismet')
+ .form-group
+ = f.label :spam_check_endpoint_url, _('URL of the external Spam Check endpoint'), class: 'label-bold'
+ = f.text_field :spam_check_endpoint_url, class: 'form-control'
+
= f.submit 'Save changes', class: "btn btn-success"
diff --git a/app/workers/incident_management/process_prometheus_alert_worker.rb b/app/workers/incident_management/process_prometheus_alert_worker.rb
index 768e049c60e..e405bc2c2d2 100644
--- a/app/workers/incident_management/process_prometheus_alert_worker.rb
+++ b/app/workers/incident_management/process_prometheus_alert_worker.rb
@@ -41,23 +41,11 @@ module IncidentManagement
end
def find_gitlab_managed_event(alert)
- payload_key = payload_key_for_alert(alert)
-
- PrometheusAlertEvent.find_by_payload_key(payload_key)
+ PrometheusAlertEvent.find_by_payload_key(alert.gitlab_fingerprint)
end
def find_self_managed_event(alert)
- payload_key = payload_key_for_alert(alert)
-
- SelfManagedPrometheusAlertEvent.find_by_payload_key(payload_key)
- end
-
- def payload_key_for_alert(alert)
- if alert.gitlab_managed?
- PrometheusAlertEvent.payload_key_for(alert.metric_id, alert.starts_at_raw)
- else
- SelfManagedPrometheusAlertEvent.payload_key_for(alert.starts_at_raw, alert.title, alert.full_query)
- end
+ SelfManagedPrometheusAlertEvent.find_by_payload_key(alert.gitlab_fingerprint)
end
def create_issue(project, alert)
diff --git a/changelogs/unreleased/118613-spam-api-call.yml b/changelogs/unreleased/118613-spam-api-call.yml
new file mode 100644
index 00000000000..68d4aa27071
--- /dev/null
+++ b/changelogs/unreleased/118613-spam-api-call.yml
@@ -0,0 +1,5 @@
+---
+title: SpamVerdictService can call external spam check endpoint
+merge_request: 31449
+author:
+type: added
diff --git a/changelogs/unreleased/exclude-server-fields-from-exceptions-log.yml b/changelogs/unreleased/exclude-server-fields-from-exceptions-log.yml
new file mode 100644
index 00000000000..2fac0330b5c
--- /dev/null
+++ b/changelogs/unreleased/exclude-server-fields-from-exceptions-log.yml
@@ -0,0 +1,5 @@
+---
+title: Exclude extra.server fields from exceptions_json.log
+merge_request: 32770
+author:
+type: changed
diff --git a/changelogs/unreleased/fix-group-transfer-to-subgroup.yml b/changelogs/unreleased/fix-group-transfer-to-subgroup.yml
new file mode 100644
index 00000000000..41294ceca36
--- /dev/null
+++ b/changelogs/unreleased/fix-group-transfer-to-subgroup.yml
@@ -0,0 +1,5 @@
+---
+title: Fix group transfer service to deny moving group to its subgroup
+merge_request: 31495
+author: Abhisek Datta
+type: fixed
diff --git a/changelogs/unreleased/fj-add-snippet-files-param-to-snippet-create-service.yml b/changelogs/unreleased/fj-add-snippet-files-param-to-snippet-create-service.yml
new file mode 100644
index 00000000000..cd3a5ead9e6
--- /dev/null
+++ b/changelogs/unreleased/fj-add-snippet-files-param-to-snippet-create-service.yml
@@ -0,0 +1,5 @@
+---
+title: Allow the snippet create service to accept an array of files
+merge_request: 32649
+author:
+type: changed
diff --git a/changelogs/unreleased/sh-fix-artifacts-download-404.yml b/changelogs/unreleased/sh-fix-artifacts-download-404.yml
new file mode 100644
index 00000000000..37615394344
--- /dev/null
+++ b/changelogs/unreleased/sh-fix-artifacts-download-404.yml
@@ -0,0 +1,5 @@
+---
+title: Fix 404s downloading build artifacts
+merge_request: 32741
+author:
+type: fixed
diff --git a/db/migrate/20200508050301_add_spam_check_endpoint_to_application_settings.rb b/db/migrate/20200508050301_add_spam_check_endpoint_to_application_settings.rb
new file mode 100644
index 00000000000..d30b8abbbc3
--- /dev/null
+++ b/db/migrate/20200508050301_add_spam_check_endpoint_to_application_settings.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+class AddSpamCheckEndpointToApplicationSettings < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ unless column_exists?(:application_settings, :spam_check_endpoint_url)
+ add_column :application_settings, :spam_check_endpoint_url, :text
+ end
+
+ add_text_limit :application_settings, :spam_check_endpoint_url, 255
+
+ unless column_exists?(:application_settings, :spam_check_endpoint_enabled)
+ add_column :application_settings, :spam_check_endpoint_enabled, :boolean, null: false, default: false
+ end
+ end
+
+ def down
+ remove_column_if_exists :spam_check_endpoint_url
+ remove_column_if_exists :spam_check_endpoint_enabled
+ end
+
+ private
+
+ def remove_column_if_exists(column)
+ return unless column_exists?(:application_settings, column)
+
+ remove_column :application_settings, column
+ end
+end
diff --git a/db/structure.sql b/db/structure.sql
index 17e525e3ed3..052a98d3df3 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -441,7 +441,10 @@ CREATE TABLE public.application_settings (
container_registry_vendor text DEFAULT ''::text NOT NULL,
container_registry_version text DEFAULT ''::text NOT NULL,
container_registry_features text[] DEFAULT '{}'::text[] NOT NULL,
+ spam_check_endpoint_url text,
+ spam_check_endpoint_enabled boolean DEFAULT false NOT NULL,
CONSTRAINT check_d03919528d CHECK ((char_length(container_registry_vendor) <= 255)),
+ CONSTRAINT check_d820146492 CHECK ((char_length(spam_check_endpoint_url) <= 255)),
CONSTRAINT check_e5aba18f02 CHECK ((char_length(container_registry_version) <= 255))
);
@@ -13880,6 +13883,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200506125731
20200506154421
20200507221434
+20200508050301
20200508091106
20200511080113
20200511083541
diff --git a/doc/administration/logs.md b/doc/administration/logs.md
index 4483d0d41ce..c679b69ef8f 100644
--- a/doc/administration/logs.md
+++ b/doc/administration/logs.md
@@ -728,17 +728,6 @@ Each line contains a JSON line that can be ingested by Elasticsearch. For exampl
"severity": "ERROR",
"time": "2019-12-17T11:49:29.485Z",
"correlation_id": "AbDVUrrTvM1",
- "extra.server": {
- "os": {
- "name": "Darwin",
- "version": "Darwin Kernel Version 19.2.0",
- "build": "19.2.0",
- },
- "runtime": {
- "name": "ruby",
- "version": "ruby 2.6.5p114 (2019-10-01 revision 67812) [x86_64-darwin18]"
- }
- },
"extra.project_id": 55,
"extra.relation_key": "milestones",
"extra.relation_index": 1,
diff --git a/doc/api/settings.md b/doc/api/settings.md
index f63d126742a..323e9e5714a 100644
--- a/doc/api/settings.md
+++ b/doc/api/settings.md
@@ -335,6 +335,8 @@ are listed in the descriptions of the relevant settings.
| `sourcegraph_enabled` | boolean | no | Enables Sourcegraph integration. Default is `false`. **If enabled, requires** `sourcegraph_url`. |
| `sourcegraph_url` | string | required by: `sourcegraph_enabled` | The Sourcegraph instance URL for integration. |
| `sourcegraph_public_only` | boolean | no | Blocks Sourcegraph from being loaded on private and internal projects. Default is `true`. |
+| `spam_check_endpoint_enabled` | boolean | no | Enables Spam Check via external API endpoint. Default is `false`. |
+| `spam_check_endpoint_url` | string | no | URL of the external Spam Check service endpoint. |
| `terminal_max_session_time` | integer | no | Maximum time for web terminal websocket connection (in seconds). Set to `0` for unlimited time. |
| `terms` | text | required by: `enforce_terms` | (**Required by:** `enforce_terms`) Markdown content for the ToS. |
| `throttle_authenticated_api_enabled` | boolean | no | (**If enabled, requires:** `throttle_authenticated_api_period_in_seconds` and `throttle_authenticated_api_requests_per_period`) Enable authenticated API request rate limit. Helps reduce request volume (for example, from crawlers or abusive bots). |
diff --git a/doc/ci/junit_test_reports.md b/doc/ci/junit_test_reports.md
index 58a013ecd9d..50be2689adf 100644
--- a/doc/ci/junit_test_reports.md
+++ b/doc/ci/junit_test_reports.md
@@ -215,14 +215,6 @@ Test:
- ./**/*test-result.xml
```
-## Limitations
-
-Currently, the following tools might not work because their XML formats are unsupported in GitLab.
-
-|Case|Tool|Issue|
-|---|---|---|
-|`<testcase>` does not have `classname` attribute|ESlint, sass-lint|<https://gitlab.com/gitlab-org/gitlab-foss/-/issues/50964>|
-
## Viewing JUnit test reports on GitLab
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/24792) in GitLab 12.5.
diff --git a/doc/ci/large_repositories/index.md b/doc/ci/large_repositories/index.md
index b4059fc252b..affcb8f4df9 100644
--- a/doc/ci/large_repositories/index.md
+++ b/doc/ci/large_repositories/index.md
@@ -114,6 +114,16 @@ For exact parameters accepted by
for [`git clean`](https://git-scm.com/docs/git-clean). The available parameters
are dependent on Git version.
+## Git fetch extra flags
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4142) in GitLab Runner 13.1.
+
+[`GIT_FETCH_EXTRA_FLAGS`](../yaml/README.md#git-fetch-extra-flags) allows you
+to modify `git fetch` behavior by passing extra flags.
+
+See the [`GIT_FETCH_EXTRA_FLAGS` documentation](../yaml/README.md#git-fetch-extra-flags)
+for more information.
+
## Fork-based workflow
> Introduced in GitLab Runner 11.10.
diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md
index eaba3887cc8..e00d0ab1cf8 100644
--- a/doc/ci/yaml/README.md
+++ b/doc/ci/yaml/README.md
@@ -3459,6 +3459,43 @@ script:
- ls -al cache/
```
+### Git fetch extra flags
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4142) in GitLab Runner 13.1.
+
+The `GIT_FETCH_EXTRA_FLAGS` variable is used to control the behavior of
+`git fetch`. You can set it globally or per-job in the [`variables`](#variables) section.
+
+`GIT_FETCH_EXTRA_FLAGS` accepts all possible options of the [`git fetch`](https://git-scm.com/docs/git-fetch) command, but please note that `GIT_FETCH_EXTRA_FLAGS` flags will be appended after the default flags that can't be modified.
+
+The default flags are:
+
+- [GIT_DEPTH](#shallow-cloning).
+- The list of [refspecs](https://git-scm.com/book/en/v2/Git-Internals-The-Refspec).
+- A remote called `origin`.
+
+If `GIT_FETCH_EXTRA_FLAGS` is:
+
+- Not specified, `git fetch` flags default to `--prune --quiet` along with the default flags.
+- Given the value `none`, `git fetch` is executed only with the default flags.
+
+For example, the default flags are `--prune --quiet`, so you can make `git fetch` more verbose by overriding this with just `--prune`:
+
+```yaml
+variables:
+ GIT_FETCH_EXTRA_FLAGS: --prune
+script:
+ - ls -al cache/
+```
+
+The configurtion above will result in `git fetch` being called this way:
+
+```shell
+git fetch origin $REFSPECS --depth 50 --prune
+```
+
+Where `$REFSPECS` is a value provided to the Runner internally by GitLab.
+
### Job stages attempts
> Introduced in GitLab, it requires GitLab Runner v1.9+.
diff --git a/doc/development/README.md b/doc/development/README.md
index 22cc21e12b9..458f5cc2350 100644
--- a/doc/development/README.md
+++ b/doc/development/README.md
@@ -113,50 +113,7 @@ Complementary reads:
## Database guides
-### Tooling
-
-- [Understanding EXPLAIN plans](understanding_explain_plans.md)
-- [explain.depesz.com](https://explain.depesz.com/) for visualizing the output
- of `EXPLAIN`
-- [pgFormatter](http://sqlformat.darold.net/) a PostgreSQL SQL syntax beautifier
-
-### Migrations
-
-- [What requires downtime?](what_requires_downtime.md)
-- [SQL guidelines](sql.md) for working with SQL queries
-- [Migrations style guide](migration_style_guide.md) for creating safe SQL migrations
-- [Testing Rails migrations](testing_guide/testing_migrations_guide.md) guide
-- [Post deployment migrations](post_deployment_migrations.md)
-- [Background migrations](background_migrations.md)
-- [Swapping tables](swapping_tables.md)
-- [Deleting migrations](deleting_migrations.md)
-
-### Debugging
-
-- Tracing the source of an SQL query using query comments with [Marginalia](database_query_comments.md)
-- Tracing the source of an SQL query in Rails console using [Verbose Query Logs](https://guides.rubyonrails.org/debugging_rails_applications.html#verbose-query-logs)
-
-### Best practices
-
-- [Adding database indexes](adding_database_indexes.md)
-- [Foreign keys & associations](foreign_keys.md)
-- [Single table inheritance](single_table_inheritance.md)
-- [Polymorphic associations](polymorphic_associations.md)
-- [Serializing data](serializing_data.md)
-- [Hash indexes](hash_indexes.md)
-- [Storing SHA1 hashes as binary](sha1_as_binary.md)
-- [Iterating tables in batches](iterating_tables_in_batches.md)
-- [Insert into tables in batches](insert_into_tables_in_batches.md)
-- [Ordering table columns](ordering_table_columns.md)
-- [Verifying database capabilities](verifying_database_capabilities.md)
-- [Database Debugging and Troubleshooting](database_debugging.md)
-- [Query Count Limits](query_count_limits.md)
-- [Creating enums](creating_enums.md)
-
-### Case studies
-
-- [Database case study: Filtering by label](filtering_by_label.md)
-- [Database case study: Namespaces storage statistics](namespaces_storage_statistics.md)
+See [database guidelines](database/index.md).
## Integration guides
diff --git a/doc/development/database/index.md b/doc/development/database/index.md
new file mode 100644
index 00000000000..8935e5f6efa
--- /dev/null
+++ b/doc/development/database/index.md
@@ -0,0 +1,48 @@
+# Database guides
+
+## Tooling
+
+- [Understanding EXPLAIN plans](../understanding_explain_plans.md)
+- [explain.depesz.com](https://explain.depesz.com/) for visualizing the output
+ of `EXPLAIN`
+- [pgFormatter](http://sqlformat.darold.net/) a PostgreSQL SQL syntax beautifier
+
+## Migrations
+
+- [What requires downtime?](../what_requires_downtime.md)
+- [SQL guidelines](../sql.md) for working with SQL queries
+- [Migrations style guide](../migration_style_guide.md) for creating safe SQL migrations
+- [Testing Rails migrations](../testing_guide/testing_migrations_guide.md) guide
+- [Post deployment migrations](../post_deployment_migrations.md)
+- [Background migrations](../background_migrations.md)
+- [Swapping tables](../swapping_tables.md)
+- [Deleting migrations](../deleting_migrations.md)
+
+## Debugging
+
+- Tracing the source of an SQL query using query comments with [Marginalia](../database_query_comments.md)
+- Tracing the source of an SQL query in Rails console using [Verbose Query Logs](https://guides.rubyonrails.org/debugging_rails_applications.html#verbose-query-logs)
+
+## Best practices
+
+- [Adding database indexes](../adding_database_indexes.md)
+- [Foreign keys & associations](../foreign_keys.md)
+- [Adding a foreign key constraint to an existing column](add_foreign_key_to_existing_column.md)
+- [Strings and the Text data type](strings_and_the_text_data_type.md)
+- [Single table inheritance](../single_table_inheritance.md)
+- [Polymorphic associations](../polymorphic_associations.md)
+- [Serializing data](../serializing_data.md)
+- [Hash indexes](../hash_indexes.md)
+- [Storing SHA1 hashes as binary](../sha1_as_binary.md)
+- [Iterating tables in batches](../iterating_tables_in_batches.md)
+- [Insert into tables in batches](../insert_into_tables_in_batches.md)
+- [Ordering table columns](../ordering_table_columns.md)
+- [Verifying database capabilities](../verifying_database_capabilities.md)
+- [Database Debugging and Troubleshooting](../database_debugging.md)
+- [Query Count Limits](../query_count_limits.md)
+- [Creating enums](../creating_enums.md)
+
+## Case studies
+
+- [Database case study: Filtering by label](../filtering_by_label.md)
+- [Database case study: Namespaces storage statistics](../namespaces_storage_statistics.md)
diff --git a/doc/development/database/strings_and_the_text_data_type.md b/doc/development/database/strings_and_the_text_data_type.md
new file mode 100644
index 00000000000..67e7f896b61
--- /dev/null
+++ b/doc/development/database/strings_and_the_text_data_type.md
@@ -0,0 +1,288 @@
+# Strings and the Text data type
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30453) in GitLab 13.0.
+
+When adding new columns that will be used to store strings or other textual information:
+
+1. We always use the `text` data type instead of the `string` data type.
+1. `text` columns should always have a limit set by using the `add_text_limit` migration helper.
+
+The `text` data type can not be defined with a limit, so `add_text_limit` is enforcing that by
+adding a [check constraint](https://www.postgresql.org/docs/11/ddl-constraints.html) on the
+column and then validating it at a followup step.
+
+## Background info
+
+The reason we always want to use `text` instead of `string` is that `string` columns have the
+disadvantage that if you want to update their limit, you have to run an `ALTER TABLE ...` command.
+
+While a limit is added, the `ALTER TABLE ...` command requires an `EXCLUSIVE LOCK` on the table, which
+is held throughout the process of updating the column and while validating all existing records, a
+process that can take a while for large tables.
+
+On the other hand, texts are [more or less equivalent to strings](https://www.depesz.com/2010/03/02/charx-vs-varcharx-vs-varchar-vs-text/) in PostgreSQL,
+while having the additional advantage that adding a limit on an existing column or updating their
+limit does not require the very costly `EXCLUSIVE LOCK` to be held throughout the validation phase.
+We can start by updating the constraint with the valid option off, which requires an `EXCLUSIVE LOCK`
+but only for updating the declaration of the columns. We can then validate it at a later step using
+`VALIDATE CONSTRAINT`, which requires only a `SHARE UPDATE EXCLUSIVE LOCK` (only conflicts with other
+validations and index creation while it allows reads and writes).
+
+## Create a new table with text columns
+
+When adding a new table, the limits for all text columns should be added in the same migration as
+the table creation.
+
+For example, consider a migration that creates a table with two text columns,
+**db/migrate/20200401000001_create_db_guides.rb**:
+
+```ruby
+class CreateDbGuides < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ unless table_exists?(:db_guides)
+ create_table :db_guides do |t|
+ t.bigint :stars, default: 0, null: false
+ t.text :title
+ t.text :notes
+ end
+ end
+
+ # The following add the constraints and validate them immediately (no data in the table)
+ add_text_limit :db_guides, :title, 128
+ add_text_limit :db_guides, :notes, 1024
+ end
+
+ def down
+ # No need to drop the constraints, drop_table takes care of everything
+ drop_table :db_guides
+ end
+end
+```
+
+Adding a check constraint requires an exclusive lock while the `ALTER TABLE` that adds is running.
+As we don't want the exclusive lock to be held for the duration of a transaction, `add_text_limit`
+must always run in a migration with `disable_ddl_transaction!`.
+
+Also, note that we have to add a check that the table exists so that the migration can be repeated
+in case of a failure.
+
+## Add a text column to an existing table
+
+Adding a column to an existing table requires an exclusive lock for that table. Even though that lock
+is held for a brief amount of time, the time `add_column` needs to complete its execution can vary
+depending on how frequently the table is accessed. For example, acquiring an exclusive lock for a very
+frequently accessed table may take minutes in GitLab.com and requires the use of `with_lock_retries`.
+
+For these reasons, it is advised to add the text limit on a separate migration than the `add_column` one.
+
+For example, consider a migration that adds a new text column `extended_title` to table `sprints`,
+**db/migrate/20200501000001_add_extended_title_to_sprints.rb**:
+
+```ruby
+class AddExtendedTitleToSprints < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ # rubocop:disable Migration/AddLimitToTextColumns
+ # limit is added in 20200501000002_add_text_limit_to_sprints_extended_title
+ def change
+ add_column :sprints, :extended_title, :text
+ end
+ # rubocop:enable Migration/AddLimitToTextColumns
+end
+```
+
+A second migration should follow the first one with a limit added to `extended_title`,
+**db/migrate/20200501000002_add_text_limit_to_sprints_extended_title.rb**:
+
+```ruby
+class AddTextLimitToSprintsExtendedTitle < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_text_limit :sprints, :extended_title, 512
+ end
+
+ def down
+ # Down is required as `add_text_limit` is not reversible
+ remove_text_limit :sprints, :extended_title
+ end
+end
+```
+
+## Add a text limit constraint to an existing column
+
+Adding text limits to existing database columns requires multiple steps split into at least two different releases:
+
+1. Release `N.M` (current release)
+
+ - Add a post-deployment migration to add the limit to the text column with `validate: false`.
+ - Add a post-deployment migration to fix the existing records.
+
+ NOTE: **Note:**
+ Depending on the size of the table, a background migration for cleanup could be required in the next release.
+ See [text limit constraints on large tables](strings_and_the_text_data_type.md#text-limit-constraints-on-large-tables) for more information.
+
+ - Create an issue for the next milestone to validate the text limit.
+
+1. Release `N.M+1` (next release)
+
+ - Validate the text limit using a post-deployment migration.
+
+### Example
+
+Let's assume we want to add a `1024` limit to `issues.title_html` for a given release milestone,
+such as 13.0.
+
+Issues is a pretty busy and large table with more than 25 million rows, so we don't want to lock all
+other processes that try to access it while running the update.
+
+Also, after checking our production database, we know that there are `issues` with more characters in
+their title than the 1024 character limit, so we can not add and validate the constraint in one step.
+
+NOTE: **Note:**
+Even if we did not have any record with a title larger than the provided limit, another
+instance of GitLab could have such records, so we would follow the same process either way.
+
+#### Prevent new invalid records (current release)
+
+We first add the limit as a `NOT VALID` check constraint to the table, which enforces consistency when
+new records are inserted or current records are updated.
+
+In the example above, the existing issues with more than 1024 characters in their title will not be
+affected and you'll be still able to update records in the `issues` table. However, when you'd try
+to update the `title_html` with a title that has more than 1024 characters, the constraint causes
+a database error.
+
+Adding or removing a constraint to an existing attribute requires that any application changes are
+deployed _first_, [otherwise servers still in the old version of the application may try to update the
+attribute with invalid values](../multi_version_compatibility.md#ci-artifact-uploads-were-failing).
+For these reasons, `add_text_limit` should run in a post-deployment migration.
+
+Still in our example, for the 13.0 milestone (current), consider that the following validation
+has been added to model `Issue`:
+
+```ruby
+validates :title_html, length: { maximum: 1024 }
+```
+
+We can also update the database in the same milestone by adding the text limit with `validate: false`
+in a post-deployment migration,
+**db/post_migrate/20200501000001_add_text_limit_migration.rb**:
+
+```ruby
+class AddTextLimitMigration < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ # This will add the constraint WITHOUT validating it
+ add_text_limit :issues, :title_html, 1024, validate: false
+ end
+
+ def down
+ # Down is required as `add_text_limit` is not reversible
+ remove_text_limit :issues, :title_html
+ end
+end
+```
+
+#### Data migration to fix existing records (current release)
+
+The approach here depends on the data volume and the cleanup strategy. The number of records that must
+be fixed on GitLab.com is a nice indicator that will help us decide whether to use a post-deployment
+migration or a background data migration:
+
+- If the data volume is less than `1,000` records, then the data migration can be executed within the post-migration.
+- If the data volume is higher than `1,000` records, it's advised to create a background migration.
+
+When unsure about which option to use, please contact the Database team for advice.
+
+Back to our example, the issues table is considerably large and frequently accessed, so we are going
+to add a background migration for the 13.0 milestone (current),
+**db/post_migrate/20200501000002_schedule_cap_title_length_on_issues.rb**:
+
+```ruby
+class ScheduleCapTitleLengthOnIssues < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ # Info on how many records will be affected on GitLab.com
+ # time each batch needs to run on average, etc ...
+ BATCH_SIZE = 5000
+ DELAY_INTERVAL = 2.minutes.to_i
+
+ # Background migration will update issues whose title is longer than 1024 limit
+ ISSUES_BACKGROUND_MIGRATION = 'CapTitleLengthOnIssues'.freeze
+
+ disable_ddl_transaction!
+
+ class Issue < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'issues'
+ end
+
+ def up
+ queue_background_migration_jobs_by_range_at_intervals(
+ Issue.where('char_length(title_html) > 1024'),
+ ISSUES_MIGRATION,
+ DELAY_INTERVAL,
+ batch_size: BATCH_SIZE
+ )
+ end
+
+ def down
+ # no-op : the part of the title_html after the limit is lost forever
+ end
+end
+```
+
+To keep this guide short, we skipped the definition of the background migration and only
+provided a high level example of the post-deployment migration that is used to schedule the batches.
+You can find more info on the guide about [background migrations](../background_migrations.md)
+
+#### Validate the text limit (next release)
+
+Validating the text limit will scan the whole table and make sure that each record is correct.
+
+Still in our example, for the 13.1 milestone (next), we run the `validate_text_limit` migration
+helper in a final post-deployment migration,
+**db/post_migrate/20200601000001_validate_text_limit_migration.rb**:
+
+```ruby
+class ValidateTextLimitMigration < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ validate_text_limit :issues, :title_html
+ end
+
+ def down
+ # no-op
+ end
+end
+```
+
+## Text limit constraints on large tables
+
+If you have to clean up a text column for a really [large table](https://gitlab.com/gitlab-org/gitlab/-/blob/master/rubocop/migration_helpers.rb#L12)
+(for example, the `artifacts` in `ci_builds`), your background migration will go on for a while and
+it will need an additional [background migration cleaning up](../background_migrations.md#cleaning-up)
+in the release after adding the data migration.
+
+In that rare case you will need 3 releases end-to-end:
+
+1. Release `N.M` - Add the text limit and the background migration to fix the existing records.
+1. Release `N.M+1` - Cleanup the background migration.
+1. Release `N.M+2` - Validate the text limit.
diff --git a/doc/development/migration_style_guide.md b/doc/development/migration_style_guide.md
index 0db214e1401..46fc94a4b2d 100644
--- a/doc/development/migration_style_guide.md
+++ b/doc/development/migration_style_guide.md
@@ -727,6 +727,12 @@ Rails migration example:
add_column(:projects, :foo, :integer, default: 10, limit: 8)
```
+## Strings and the Text data type
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30453) in GitLab 13.0.
+
+See the [text data type](database/strings_and_the_text_data_type.md) style guide for more information.
+
## Timestamp column type
By default, Rails uses the `timestamp` data type that stores timestamp data
diff --git a/doc/install/requirements.md b/doc/install/requirements.md
index f094e143a41..ba25f73441d 100644
--- a/doc/install/requirements.md
+++ b/doc/install/requirements.md
@@ -126,7 +126,7 @@ available when needed.
Our [Memory Team](https://about.gitlab.com/handbook/engineering/development/enablement/memory/) is actively working to reduce the memory requirement.
-NOTE: **Note:** The 25 workers of Sidekiq will show up as separate processes in your process overview (such as `top` or `htop`) but they share the same RAM allocation since Sidekiq is a multithreaded application. Please see the section below about Unicorn workers for information about how many you need for those.
+NOTE: **Note:** The 25 workers of Sidekiq will show up as separate processes in your process overview (such as `top` or `htop`). However, they share the same RAM allocation, as Sidekiq is a multi-threaded application. See the section below about Unicorn workers for information about how many you need for those.
## Database
diff --git a/doc/user/markdown.md b/doc/user/markdown.md
index 78c4e2eb139..0baa0ec0620 100644
--- a/doc/user/markdown.md
+++ b/doc/user/markdown.md
@@ -76,7 +76,7 @@ NOTE: **Note:** We will flag any significant differences between Redcarpet and C
If you have a large volume of Markdown files, it can be tedious to determine
if they will display correctly or not. You can use the
[diff_redcarpet_cmark](https://gitlab.com/digitalmoksha/diff_redcarpet_cmark)
-tool (not an officially supported product) to generate a list of files, and the
+tool (not an officially supported product) to generate a list of files and the
differences between how RedCarpet and CommonMark render the files. It can give
an indication if anything needs to be changed - often nothing will need
to change.
@@ -253,7 +253,7 @@ Consult the [Emoji Cheat Sheet](https://www.webfx.com/tools/emoji-cheat-sheet/)
> **Note:** The emoji example above uses hard-coded images for this documentation. The emoji,
when rendered within GitLab, may appear different depending on the OS and browser used.
-Most emoji are natively supported on macOS, Windows, iOS, Android and will fallback to image-based emoji where there is lack of support.
+Most emoji are natively supported on macOS, Windows, iOS, Android, and will fall back on image-based emoji where there is no support.
NOTE: **Note:** On Linux, you can download [Noto Color Emoji](https://www.google.com/get/noto/help/emoji/)
to get full native emoji support. Ubuntu 18.04 (like many modern Linux distributions) has
@@ -272,7 +272,7 @@ in a box at the top of the document, before the rendered HTML content. To view a
you can toggle between the source and rendered version of a [GitLab documentation file](https://gitlab.com/gitlab-org/gitlab/blob/master/doc/README.md).
In GitLab, front matter is only used in Markdown files and wiki pages, not the other
-places where Markdown formatting is supported. It must be at the very top of the document,
+places where Markdown formatting is supported. It must be at the very top of the document
and must be between delimiters, as explained below.
The following delimiters are supported:
@@ -405,7 +405,7 @@ GFM recognizes special GitLab related references. For example, you can easily re
an issue, a commit, a team member, or even the whole team within a project. GFM will turn
that reference into a link so you can navigate between them easily.
-Additionally, GFM recognizes certain cross-project references, and also has a shorthand
+Additionally, GFM recognizes certain cross-project references and also has a shorthand
version to reference other projects from the same namespace.
GFM will recognize the following:
diff --git a/doc/user/snippets.md b/doc/user/snippets.md
index 4be79822bab..68e5c5ac92c 100644
--- a/doc/user/snippets.md
+++ b/doc/user/snippets.md
@@ -147,8 +147,8 @@ snippet was created using the GitLab web interface the original line ending is W
> Introduced in GitLab 10.8.
-Public snippets can not only be shared, but also embedded on any website. This
-allows us to reuse a GitLab snippet in multiple places and any change to the source
+Public snippets can not only be shared, but also embedded on any website. With
+this, you can reuse a GitLab snippet in multiple places and any change to the source
is automatically reflected in the embedded snippet.
To embed a snippet, first make sure that:
@@ -172,6 +172,6 @@ Here's how an embedded snippet looks like:
<script src="https://gitlab.com/gitlab-org/gitlab-foss/snippets/1717978.js"></script>
-Embedded snippets are displayed with a header that shows the file name is defined,
+Embedded snippets are displayed with a header that shows the file name if it's defined,
the snippet size, a link to GitLab, and the actual snippet content. Actions in
the header allow users to see the snippet in raw format and download it.
diff --git a/lib/api/search.rb b/lib/api/search.rb
index a9f19a5d781..4dcc710fdb9 100644
--- a/lib/api/search.rb
+++ b/lib/api/search.rb
@@ -21,7 +21,8 @@ module API
}.freeze
SCOPE_PRELOAD_METHOD = {
- merge_requests: :with_api_entity_associations
+ merge_requests: :with_api_entity_associations,
+ projects: :with_api_entity_associations
}.freeze
def search(additional_params = {})
diff --git a/lib/api/settings.rb b/lib/api/settings.rb
index e3a8f0671ef..0056927fec9 100644
--- a/lib/api/settings.rb
+++ b/lib/api/settings.rb
@@ -132,6 +132,10 @@ module API
given sourcegraph_enabled: ->(val) { val } do
requires :sourcegraph_url, type: String, desc: 'The configured Sourcegraph instance URL'
end
+ optional :spam_check_endpoint_enabled, type: Boolean, desc: 'Enable Spam Check via external API endpoint'
+ given spam_check_endpoint_enabled: ->(val) { val } do
+ requires :spam_check_endpoint_url, type: String, desc: 'The URL of the external Spam Check service endpoint'
+ end
optional :terminal_max_session_time, type: Integer, desc: 'Maximum time for web terminal websocket connection (in seconds). Set to 0 for unlimited time.'
optional :usage_ping_enabled, type: Boolean, desc: 'Every week GitLab will report license usage back to GitLab, Inc.'
optional :instance_statistics_visibility_private, type: Boolean, desc: 'When set to `true` Instance statistics will only be available to admins'
diff --git a/lib/gitlab/alerting/alert.rb b/lib/gitlab/alerting/alert.rb
index d859ca89418..8674bd437d4 100644
--- a/lib/gitlab/alerting/alert.rb
+++ b/lib/gitlab/alerting/alert.rb
@@ -121,9 +121,9 @@ module Gitlab
def plain_gitlab_fingerprint
if gitlab_managed?
- [metric_id, starts_at].join('/')
+ [metric_id, starts_at_raw].join('/')
else # self managed
- [starts_at, title, full_query].join('/')
+ [starts_at_raw, title, full_query].join('/')
end
end
diff --git a/lib/gitlab/exception_log_formatter.rb b/lib/gitlab/exception_log_formatter.rb
index 92d55213cc2..9fcacd61318 100644
--- a/lib/gitlab/exception_log_formatter.rb
+++ b/lib/gitlab/exception_log_formatter.rb
@@ -12,6 +12,8 @@ module Gitlab
'exception.message' => exception.message
)
+ payload.delete('extra.server')
+
if exception.backtrace
payload['exception.backtrace'] = Gitlab::BacktraceCleaner.clean_backtrace(exception.backtrace)
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 96d1606176c..a68418e539d 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -6933,6 +6933,9 @@ msgstr ""
msgid "Define a custom pattern with cron syntax"
msgstr ""
+msgid "Define custom rules for what constitutes spam, independent of Akismet"
+msgstr ""
+
msgid "Define environments in the deploy stage(s) in <code>.gitlab-ci.yml</code> to track deployments here."
msgstr ""
@@ -8055,6 +8058,9 @@ msgstr ""
msgid "Enable Seat Link"
msgstr ""
+msgid "Enable Spam Check via external API endpoint"
+msgstr ""
+
msgid "Enable access to Grafana"
msgstr ""
@@ -14080,6 +14086,9 @@ msgstr ""
msgid "Network"
msgstr ""
+msgid "NetworkPolicies|Enabled"
+msgstr ""
+
msgid "NetworkPolicies|Environment does not have deployment platform"
msgstr ""
@@ -14089,6 +14098,18 @@ msgstr ""
msgid "NetworkPolicies|Kubernetes error: %{error}"
msgstr ""
+msgid "NetworkPolicies|Last modified"
+msgstr ""
+
+msgid "NetworkPolicies|Name"
+msgstr ""
+
+msgid "NetworkPolicies|No policies detected"
+msgstr ""
+
+msgid "NetworkPolicies|Policies are a specification of how groups of pods are allowed to communicate with each other network endpoints."
+msgstr ""
+
msgid "NetworkPolicies|Policy %{policyName} was successfully changed"
msgstr ""
@@ -14098,6 +14119,9 @@ msgstr ""
msgid "NetworkPolicies|Something went wrong, unable to fetch policies"
msgstr ""
+msgid "NetworkPolicies|Status"
+msgstr ""
+
msgid "Never"
msgstr ""
@@ -22347,9 +22371,15 @@ msgstr ""
msgid "ThreatMonitoring|Operations Per Second"
msgstr ""
+msgid "ThreatMonitoring|Overview"
+msgstr ""
+
msgid "ThreatMonitoring|Packet Activity"
msgstr ""
+msgid "ThreatMonitoring|Policies"
+msgstr ""
+
msgid "ThreatMonitoring|Requests"
msgstr ""
@@ -22885,6 +22915,9 @@ msgstr ""
msgid "Transfer project"
msgstr ""
+msgid "TransferGroup|Cannot transfer group to one of its subgroup."
+msgstr ""
+
msgid "TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again."
msgstr ""
@@ -23062,6 +23095,9 @@ msgstr ""
msgid "URL must start with %{codeStart}http://%{codeEnd}, %{codeStart}https://%{codeEnd}, or %{codeStart}ftp://%{codeEnd}"
msgstr ""
+msgid "URL of the external Spam Check endpoint"
+msgstr ""
+
msgid "URL of the external storage that will serve the repository static objects (e.g. archives, blobs, ...)."
msgstr ""
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index be616b566dd..4c815a5b40c 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe Projects::ArtifactsController do
+ include RepoHelpers
+
let(:user) { project.owner }
let_it_be(:project) { create(:project, :repository, :public) }
@@ -481,6 +483,22 @@ describe Projects::ArtifactsController do
expect(response).to redirect_to(path)
end
end
+
+ context 'with a failed pipeline on an updated master' do
+ before do
+ create_file_in_repo(project, 'master', 'master', 'test.txt', 'This is test')
+
+ create(:ci_pipeline,
+ project: project,
+ sha: project.commit.sha,
+ ref: project.default_branch,
+ status: 'failed')
+
+ get :latest_succeeded, params: params_from_ref(project.default_branch)
+ end
+
+ it_behaves_like 'redirect to the job'
+ end
end
end
end
diff --git a/spec/factories/alert_management/alerts.rb b/spec/factories/alert_management/alerts.rb
index 01f40a7a465..a23d04dcbe0 100644
--- a/spec/factories/alert_management/alerts.rb
+++ b/spec/factories/alert_management/alerts.rb
@@ -8,6 +8,13 @@ FactoryBot.define do
title { FFaker::Lorem.sentence }
started_at { Time.current }
+ trait :with_validation_errors do
+ after(:create) do |alert|
+ too_many_hosts = Array.new(AlertManagement::Alert::HOSTS_MAX_LENGTH + 1) { |_| 'host' }
+ alert.update_columns(hosts: too_many_hosts)
+ end
+ end
+
trait :with_issue do
issue
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 7ec3c2abb51..f11e48f4849 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -282,11 +282,13 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
visit reporting_admin_application_settings_path
page.within('.as-spam') do
- check 'Enable reCAPTCHA'
- check 'Enable reCAPTCHA for login'
fill_in 'reCAPTCHA Site Key', with: 'key'
fill_in 'reCAPTCHA Private Key', with: 'key'
+ check 'Enable reCAPTCHA'
+ check 'Enable reCAPTCHA for login'
fill_in 'IPs per user', with: 15
+ check 'Enable Spam Check via external API endpoint'
+ fill_in 'URL of the external Spam Check endpoint', with: 'https://www.example.com/spamcheck'
click_button 'Save changes'
end
@@ -294,6 +296,8 @@ describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_not_moc
expect(current_settings.recaptcha_enabled).to be true
expect(current_settings.login_recaptcha_protection_enabled).to be true
expect(current_settings.unique_ips_limit_per_user).to eq(15)
+ expect(current_settings.spam_check_endpoint_enabled).to be true
+ expect(current_settings.spam_check_endpoint_url).to eq 'https://www.example.com/spamcheck'
end
end
diff --git a/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb b/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
index 3cbf276c02d..78ff89799ad 100644
--- a/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
+++ b/spec/features/projects/artifacts/user_downloads_artifacts_spec.rb
@@ -32,5 +32,11 @@ describe "User downloads artifacts" do
it_behaves_like "downloading"
end
+
+ context "via SHA" do
+ let(:url) { latest_succeeded_project_artifacts_path(project, "#{pipeline.sha}/download", job: job.name) }
+
+ it_behaves_like "downloading"
+ end
end
end
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index af29e172332..913c4f3747d 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -283,15 +283,25 @@ describe('RepoEditor', () => {
expect(vm.model.events.size).toBe(2);
});
- it('updates state when model content changed', done => {
- vm.model.setValue('testing 123\n');
-
- setImmediate(() => {
- expect(vm.file.content).toBe('testing 123\n');
-
- done();
- });
- });
+ it.each`
+ insertFinalNewline | input | eol | output
+ ${true} | ${'testing 123\n'} | ${'\n'} | ${'testing 123\n'}
+ ${true} | ${'testing 123'} | ${'\n'} | ${'testing 123\n'}
+ ${false} | ${'testing 123'} | ${'\n'} | ${'testing 123'}
+ ${true} | ${'testing 123'} | ${'\r\n'} | ${'testing 123\r\n'}
+ ${false} | ${'testing 123'} | ${'\r\n'} | ${'testing 123'}
+ `(
+ 'updates state with "$output" if `this.insertFinalNewline` is $insertFinalNewline',
+ ({ insertFinalNewline, input, eol, output }) => {
+ jest.spyOn(vm.model.getModel(), 'getEOL').mockReturnValue(eol);
+
+ vm.addFinalNewline = insertFinalNewline;
+
+ vm.model.setValue(input);
+
+ expect(vm.file.content).toBe(output);
+ },
+ );
it('sets head model as staged file', () => {
jest.spyOn(vm.editor, 'createModel');
diff --git a/spec/frontend/ide/stores/utils_spec.js b/spec/frontend/ide/stores/utils_spec.js
index b87f6c1f05a..da9ce916017 100644
--- a/spec/frontend/ide/stores/utils_spec.js
+++ b/spec/frontend/ide/stores/utils_spec.js
@@ -661,31 +661,6 @@ describe('Multi-file store utils', () => {
});
});
- describe('addFinalNewlineIfNeeded', () => {
- it('adds a newline if it doesnt already exist', () => {
- [
- {
- input: 'some text',
- output: 'some text\n',
- },
- {
- input: 'some text\n',
- output: 'some text\n',
- },
- {
- input: 'some text\n\n',
- output: 'some text\n\n',
- },
- {
- input: 'some\n text',
- output: 'some\n text\n',
- },
- ].forEach(({ input, output }) => {
- expect(utils.addFinalNewlineIfNeeded(input)).toEqual(output);
- });
- });
- });
-
describe('extractMarkdownImagesFromEntries', () => {
let mdFile;
let entries;
diff --git a/spec/frontend/ide/utils_spec.js b/spec/frontend/ide/utils_spec.js
index f689314567a..4ae440ff09e 100644
--- a/spec/frontend/ide/utils_spec.js
+++ b/spec/frontend/ide/utils_spec.js
@@ -1,4 +1,10 @@
-import { isTextFile, registerLanguages, trimPathComponents } from '~/ide/utils';
+import {
+ isTextFile,
+ registerLanguages,
+ trimPathComponents,
+ addFinalNewline,
+ getPathParents,
+} from '~/ide/utils';
import { languages } from 'monaco-editor';
describe('WebIDE utils', () => {
@@ -148,4 +154,39 @@ describe('WebIDE utils', () => {
]);
});
});
+
+ describe('addFinalNewline', () => {
+ it.each`
+ input | output
+ ${'some text'} | ${'some text\n'}
+ ${'some text\n'} | ${'some text\n'}
+ ${'some text\n\n'} | ${'some text\n\n'}
+ ${'some\n text'} | ${'some\n text\n'}
+ `('adds a newline if it doesnt already exist for input: $input', ({ input, output }) => {
+ expect(addFinalNewline(input)).toEqual(output);
+ });
+
+ it.each`
+ input | output
+ ${'some text'} | ${'some text\r\n'}
+ ${'some text\r\n'} | ${'some text\r\n'}
+ ${'some text\n'} | ${'some text\n\r\n'}
+ ${'some text\r\n\r\n'} | ${'some text\r\n\r\n'}
+ ${'some\r\n text'} | ${'some\r\n text\r\n'}
+ `('works with CRLF newline style; input: $input', ({ input, output }) => {
+ expect(addFinalNewline(input, '\r\n')).toEqual(output);
+ });
+ });
+
+ describe('getPathParents', () => {
+ it.each`
+ path | parents
+ ${'foo/bar/baz/index.md'} | ${['foo/bar/baz', 'foo/bar', 'foo']}
+ ${'foo/bar/baz'} | ${['foo/bar', 'foo']}
+ ${'index.md'} | ${[]}
+ ${'path with/spaces to/something.md'} | ${['path with/spaces to', 'path with']}
+ `('gets all parent directory names for path: $path', ({ path, parents }) => {
+ expect(getPathParents(path)).toEqual(parents);
+ });
+ });
});
diff --git a/spec/lib/gitlab/alerting/alert_spec.rb b/spec/lib/gitlab/alerting/alert_spec.rb
index a0582515f3d..d582ff6f32a 100644
--- a/spec/lib/gitlab/alerting/alert_spec.rb
+++ b/spec/lib/gitlab/alerting/alert_spec.rb
@@ -253,7 +253,7 @@ describe Gitlab::Alerting::Alert do
include_context 'gitlab alert'
it 'returns a fingerprint' do
- plain_fingerprint = [alert.metric_id, alert.starts_at].join('/')
+ plain_fingerprint = [alert.metric_id, alert.starts_at_raw].join('/')
is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
end
@@ -263,7 +263,7 @@ describe Gitlab::Alerting::Alert do
include_context 'full query'
it 'returns a fingerprint' do
- plain_fingerprint = [alert.starts_at, alert.title, alert.full_query].join('/')
+ plain_fingerprint = [alert.starts_at_raw, alert.title, alert.full_query].join('/')
is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
end
diff --git a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
new file mode 100644
index 00000000000..01cc0b30784
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Cleanup::OrphanLfsFileReferences do
+ let(:null_logger) { Logger.new('/dev/null') }
+ let(:project) { create(:project, :repository, lfs_enabled: true) }
+ let(:lfs_object) { create(:lfs_object) }
+
+ let!(:invalid_reference) { create(:lfs_objects_project, project: project, lfs_object: lfs_object) }
+
+ before do
+ allow(null_logger).to receive(:info)
+
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+
+ # Create a valid reference
+ oid = project.repository.gitaly_blob_client.get_all_lfs_pointers.first.lfs_oid
+ lfs_object2 = create(:lfs_object, oid: oid)
+ create(:lfs_objects_project, project: project, lfs_object: lfs_object2)
+ end
+
+ context 'dry run' do
+ it 'prints messages and does not delete references' do
+ expect(null_logger).to receive(:info).with("[DRY RUN] Looking for orphan LFS files for project #{project.name_with_namespace}")
+ expect(null_logger).to receive(:info).with("[DRY RUN] Found invalid references: 1")
+
+ expect { described_class.new(project, logger: null_logger).run! }
+ .not_to change { project.lfs_objects.count }
+ end
+ end
+
+ context 'regular run' do
+ it 'prints messages and deletes invalid reference' do
+ expect(null_logger).to receive(:info).with("Looking for orphan LFS files for project #{project.name_with_namespace}")
+ expect(null_logger).to receive(:info).with("Removed invalid references: 1")
+ expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:lfs_objects_size])
+
+ expect { described_class.new(project, logger: null_logger, dry_run: false).run! }
+ .to change { project.lfs_objects.count }.from(2).to(1)
+
+ expect(LfsObjectsProject.exists?(invalid_reference.id)).to be_falsey
+ end
+ end
+end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 64308af38f9..8d963d41fce 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -152,6 +152,30 @@ describe ApplicationSetting do
end
end
+ describe 'spam_check_endpoint' do
+ context 'when spam_check_endpoint is enabled' do
+ before do
+ setting.spam_check_endpoint_enabled = true
+ end
+
+ it { is_expected.to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
+ it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
+ it { is_expected.not_to allow_value(nil).for(:spam_check_endpoint_url) }
+ it { is_expected.not_to allow_value('').for(:spam_check_endpoint_url) }
+ end
+
+ context 'when spam_check_endpoint is NOT enabled' do
+ before do
+ setting.spam_check_endpoint_enabled = false
+ end
+
+ it { is_expected.to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
+ it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
+ it { is_expected.to allow_value(nil).for(:spam_check_endpoint_url) }
+ it { is_expected.to allow_value('').for(:spam_check_endpoint_url) }
+ end
+ end
+
context 'when snowplow is enabled' do
before do
setting.snowplow_enabled = true
diff --git a/spec/models/broadcast_message_spec.rb b/spec/models/broadcast_message_spec.rb
index 127faa5e8e2..8032f913d86 100644
--- a/spec/models/broadcast_message_spec.rb
+++ b/spec/models/broadcast_message_spec.rb
@@ -66,7 +66,7 @@ describe BroadcastMessage do
end
it 'expires the value if a broadcast message has ended', :request_store do
- message = create(:broadcast_message, broadcast_type: broadcast_type, ends_at: Time.now.utc + 1.day)
+ message = create(:broadcast_message, broadcast_type: broadcast_type, ends_at: Time.current.utc + 1.day)
expect(subject.call).to match_array([message])
expect(described_class.cache).to receive(:expire).and_call_original
@@ -87,8 +87,8 @@ describe BroadcastMessage do
future = create(
:broadcast_message,
- starts_at: Time.now + 10.minutes,
- ends_at: Time.now + 20.minutes,
+ starts_at: Time.current + 10.minutes,
+ ends_at: Time.current + 20.minutes,
broadcast_type: broadcast_type
)
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index c99a11b7401..2065e132de7 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -626,7 +626,7 @@ describe Ci::Build do
context 'is expired' do
before do
- build.update(artifacts_expire_at: Time.now - 7.days)
+ build.update(artifacts_expire_at: Time.current - 7.days)
end
it { is_expected.to be_truthy }
@@ -634,7 +634,7 @@ describe Ci::Build do
context 'is not expired' do
before do
- build.update(artifacts_expire_at: Time.now + 7.days)
+ build.update(artifacts_expire_at: Time.current + 7.days)
end
it { is_expected.to be_falsey }
@@ -661,13 +661,13 @@ describe Ci::Build do
it { is_expected.to be_nil }
context 'when artifacts_expire_at is specified' do
- let(:expire_at) { Time.now + 7.days }
+ let(:expire_at) { Time.current + 7.days }
before do
build.artifacts_expire_at = expire_at
end
- it { is_expected.to be_within(5).of(expire_at - Time.now) }
+ it { is_expected.to be_within(5).of(expire_at - Time.current) }
end
end
@@ -1795,7 +1795,7 @@ describe Ci::Build do
end
describe '#keep_artifacts!' do
- let(:build) { create(:ci_build, artifacts_expire_at: Time.now + 7.days) }
+ let(:build) { create(:ci_build, artifacts_expire_at: Time.current + 7.days) }
subject { build.keep_artifacts! }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 4cdc74d7a41..953931f48a7 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -363,13 +363,13 @@ describe Ci::JobArtifact do
it { is_expected.to be_nil }
context 'when expire_at is specified' do
- let(:expire_at) { Time.now + 7.days }
+ let(:expire_at) { Time.current + 7.days }
before do
artifact.expire_at = expire_at
end
- it { is_expected.to be_within(5).of(expire_at - Time.now) }
+ it { is_expected.to be_within(5).of(expire_at - Time.current) }
end
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 9a10c7629b2..4ba70552f01 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -118,7 +118,7 @@ describe Ci::PipelineSchedule do
let(:pipeline_schedule) { create(:ci_pipeline_schedule, :every_minute) }
it "updates next_run_at to the sidekiq worker's execution time" do
- Timecop.freeze(Time.parse("2019-06-01 12:18:00+0000")) do
+ Timecop.freeze(Time.zone.parse("2019-06-01 12:18:00+0000")) do
expect(pipeline_schedule.next_run_at).to eq(cron_worker_next_run_at)
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 4f53b6b4418..687ae71cdab 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -1079,7 +1079,7 @@ describe Ci::Pipeline, :mailer do
end
describe 'state machine' do
- let(:current) { Time.now.change(usec: 0) }
+ let(:current) { Time.current.change(usec: 0) }
let(:build) { create_build('build1', queued_at: 0) }
let(:build_b) { create_build('build2', queued_at: 0) }
let(:build_c) { create_build('build3', queued_at: 0) }
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 8b6a4fa6ade..f741d2d9acf 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -605,7 +605,7 @@ describe Ci::Runner do
context 'when database was updated recently' do
before do
- runner.contacted_at = Time.now
+ runner.contacted_at = Time.current
end
it 'updates cache' do
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index ce341e67c14..1ed9e207b6b 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -47,7 +47,7 @@ describe Clusters::Applications::Prometheus do
it 'sets last_update_started_at to now' do
Timecop.freeze do
- expect { subject.make_updating }.to change { subject.reload.last_update_started_at }.to be_within(1.second).of(Time.now)
+ expect { subject.make_updating }.to change { subject.reload.last_update_started_at }.to be_within(1.second).of(Time.current)
end
end
end
@@ -347,14 +347,14 @@ describe Clusters::Applications::Prometheus do
describe '#updated_since?' do
let(:cluster) { create(:cluster) }
let(:prometheus_app) { build(:clusters_applications_prometheus, cluster: cluster) }
- let(:timestamp) { Time.now - 5.minutes }
+ let(:timestamp) { Time.current - 5.minutes }
around do |example|
Timecop.freeze { example.run }
end
before do
- prometheus_app.last_update_started_at = Time.now
+ prometheus_app.last_update_started_at = Time.current
end
context 'when app does not have status failed' do
@@ -363,7 +363,7 @@ describe Clusters::Applications::Prometheus do
end
it 'returns false when last update started before the timestamp' do
- expect(prometheus_app.updated_since?(Time.now + 5.minutes)).to be false
+ expect(prometheus_app.updated_since?(Time.current + 5.minutes)).to be false
end
end
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 05d3329215a..8528a78241d 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -235,7 +235,7 @@ describe CommitStatus do
context 'if the building process has started' do
before do
- commit_status.started_at = Time.now - 1.minute
+ commit_status.started_at = Time.current - 1.minute
commit_status.finished_at = nil
end
@@ -708,7 +708,7 @@ describe CommitStatus do
end
describe '#enqueue' do
- let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) }
+ let!(:current_time) { Time.zone.local(2018, 4, 5, 14, 0, 0) }
before do
allow(Time).to receive(:now).and_return(current_time)
diff --git a/spec/models/concerns/each_batch_spec.rb b/spec/models/concerns/each_batch_spec.rb
index 294fde4f8e6..ee3d9aea505 100644
--- a/spec/models/concerns/each_batch_spec.rb
+++ b/spec/models/concerns/each_batch_spec.rb
@@ -44,7 +44,7 @@ describe EachBatch do
end
it 'allows updating of the yielded relations' do
- time = Time.now
+ time = Time.current
model.each_batch do |relation|
relation.update_all(updated_at: time)
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 24908785320..59398e797c1 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -422,7 +422,7 @@ describe Issuable do
context 'total_time_spent is updated' do
before do
- issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.now)
+ issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.current)
issue.save
expect(Gitlab::HookData::IssuableBuilder)
.to receive(:new).with(issue).and_return(builder)
@@ -572,8 +572,8 @@ describe Issuable do
second_priority = create(:label, project: project, priority: 2)
no_priority = create(:label, project: project)
- first_milestone = create(:milestone, project: project, due_date: Time.now)
- second_milestone = create(:milestone, project: project, due_date: Time.now + 1.month)
+ first_milestone = create(:milestone, project: project, due_date: Time.current)
+ second_milestone = create(:milestone, project: project, due_date: Time.current + 1.month)
third_milestone = create(:milestone, project: project)
# The issues here are ordered by label priority, to ensure that we don't
diff --git a/spec/models/concerns/milestoneish_spec.rb b/spec/models/concerns/milestoneish_spec.rb
index 81f173cd23a..8c43a12aa15 100644
--- a/spec/models/concerns/milestoneish_spec.rb
+++ b/spec/models/concerns/milestoneish_spec.rb
@@ -290,13 +290,13 @@ describe Milestone, 'Milestoneish' do
end
it 'shows 0 if start_date is a future' do
- milestone = build_stubbed(:milestone, start_date: Time.now + 2.days)
+ milestone = build_stubbed(:milestone, start_date: Time.current + 2.days)
expect(milestone.elapsed_days).to eq(0)
end
it 'shows correct amount of days' do
- milestone = build_stubbed(:milestone, start_date: Time.now - 2.days)
+ milestone = build_stubbed(:milestone, start_date: Time.current - 2.days)
expect(milestone.elapsed_days).to eq(2)
end
diff --git a/spec/models/concerns/resolvable_discussion_spec.rb b/spec/models/concerns/resolvable_discussion_spec.rb
index 9ea01ca9002..f1d5b1cd041 100644
--- a/spec/models/concerns/resolvable_discussion_spec.rb
+++ b/spec/models/concerns/resolvable_discussion_spec.rb
@@ -536,7 +536,7 @@ describe Discussion, ResolvableDiscussion do
describe "#last_resolved_note" do
let(:current_user) { create(:user) }
- let(:time) { Time.now.utc }
+ let(:time) { Time.current.utc }
before do
Timecop.freeze(time - 1.second) do
diff --git a/spec/models/concerns/sortable_spec.rb b/spec/models/concerns/sortable_spec.rb
index 18ac4d19938..a1fe5c0928d 100644
--- a/spec/models/concerns/sortable_spec.rb
+++ b/spec/models/concerns/sortable_spec.rb
@@ -91,7 +91,7 @@ describe Sortable do
Group.all.order_by(order).map(&:name)
end
- let!(:ref_time) { Time.parse('2018-05-01 00:00:00') }
+ let!(:ref_time) { Time.zone.parse('2018-05-01 00:00:00') }
let!(:group1) { create(:group, name: 'aa', id: 1, created_at: ref_time - 15.seconds, updated_at: ref_time) }
let!(:group2) { create(:group, name: 'AAA', id: 2, created_at: ref_time - 10.seconds, updated_at: ref_time - 5.seconds) }
let!(:group3) { create(:group, name: 'BB', id: 3, created_at: ref_time - 5.seconds, updated_at: ref_time - 10.seconds) }
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 6eb006a5d67..ac2a4c9877d 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -114,7 +114,7 @@ describe Deployment do
deployment.succeed!
expect(deployment).to be_success
- expect(deployment.finished_at).to be_like_time(Time.now)
+ expect(deployment.finished_at).to be_like_time(Time.current)
end
end
@@ -141,7 +141,7 @@ describe Deployment do
deployment.drop!
expect(deployment).to be_failed
- expect(deployment.finished_at).to be_like_time(Time.now)
+ expect(deployment.finished_at).to be_like_time(Time.current)
end
end
@@ -161,7 +161,7 @@ describe Deployment do
deployment.cancel!
expect(deployment).to be_canceled
- expect(deployment.finished_at).to be_like_time(Time.now)
+ expect(deployment.finished_at).to be_like_time(Time.current)
end
end
@@ -587,7 +587,7 @@ describe Deployment do
Timecop.freeze do
deploy.update_status('success')
- expect(deploy.read_attribute(:finished_at)).to eq(Time.now)
+ expect(deploy.read_attribute(:finished_at)).to eq(Time.current)
end
end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index c0b2a4ae984..b93da518b68 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -1061,7 +1061,7 @@ describe Environment, :use_clean_rails_memory_store_caching do
end
context 'when time window arguments are provided' do
- let(:metric_params) { [1552642245.067, Time.now] }
+ let(:metric_params) { [1552642245.067, Time.current] }
it 'queries with the expected parameters' do
expect(environment.prometheus_adapter)
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index ac89f8fe9e1..e800619e5e2 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -37,7 +37,7 @@ describe Event do
project.reload
- expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now)
+ expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.current)
end
end
@@ -665,7 +665,7 @@ describe Event do
context 'when a project was updated less than 1 hour ago' do
it 'does not update the project' do
- project.update(last_activity_at: Time.now)
+ project.update(last_activity_at: Time.current)
expect(project).not_to receive(:update_column)
.with(:last_activity_at, a_kind_of(Time))
@@ -682,7 +682,7 @@ describe Event do
project.reload
- expect(project.last_activity_at).to be_within(1.minute).of(Time.now)
+ expect(project.last_activity_at).to be_within(1.minute).of(Time.current)
end
end
end
diff --git a/spec/models/instance_configuration_spec.rb b/spec/models/instance_configuration_spec.rb
index 3e0181b8846..747e9dc2faa 100644
--- a/spec/models/instance_configuration_spec.rb
+++ b/spec/models/instance_configuration_spec.rb
@@ -110,7 +110,7 @@ describe InstanceConfiguration do
end
it 'expires after EXPIRATION_TIME' do
- allow(Time).to receive(:now).and_return(Time.now + described_class::EXPIRATION_TIME)
+ allow(Time).to receive(:now).and_return(Time.current + described_class::EXPIRATION_TIME)
Rails.cache.cleanup
expect(Rails.cache.read(described_class::CACHE_KEY)).to eq(nil)
diff --git a/spec/models/issue/metrics_spec.rb b/spec/models/issue/metrics_spec.rb
index 0d0628277a6..dc22d26e2f9 100644
--- a/spec/models/issue/metrics_spec.rb
+++ b/spec/models/issue/metrics_spec.rb
@@ -23,7 +23,7 @@ describe Issue::Metrics do
describe '.with_first_mention_not_earlier_than' do
subject(:scope) { described_class.with_first_mention_not_earlier_than(timestamp) }
- let(:timestamp) { DateTime.now }
+ let(:timestamp) { DateTime.current }
it 'returns metrics without mentioning in commit or with mentioning after given timestamp' do
issue1 = create(:issue)
@@ -37,7 +37,7 @@ describe Issue::Metrics do
describe "when recording the default set of issue metrics on issue save" do
context "milestones" do
it "records the first time an issue is associated with a milestone" do
- time = Time.now
+ time = Time.current
Timecop.freeze(time) { subject.update(milestone: create(:milestone, project: project)) }
metrics = subject.metrics
@@ -46,7 +46,7 @@ describe Issue::Metrics do
end
it "does not record the second time an issue is associated with a milestone" do
- time = Time.now
+ time = Time.current
Timecop.freeze(time) { subject.update(milestone: create(:milestone, project: project)) }
Timecop.freeze(time + 2.hours) { subject.update(milestone: nil) }
Timecop.freeze(time + 6.hours) { subject.update(milestone: create(:milestone, project: project)) }
@@ -60,7 +60,7 @@ describe Issue::Metrics do
context "list labels" do
it "records the first time an issue is associated with a list label" do
list_label = create(:list).label
- time = Time.now
+ time = Time.current
Timecop.freeze(time) { subject.update(label_ids: [list_label.id]) }
metrics = subject.metrics
@@ -69,7 +69,7 @@ describe Issue::Metrics do
end
it "does not record the second time an issue is associated with a list label" do
- time = Time.now
+ time = Time.current
first_list_label = create(:list).label
Timecop.freeze(time) { subject.update(label_ids: [first_list_label.id]) }
second_list_label = create(:list).label
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index dd5ff3dbdde..4fcb1ff7eb3 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -176,7 +176,7 @@ describe Issue do
describe '#close' do
subject(:issue) { create(:issue, state: 'opened') }
- it 'sets closed_at to Time.now when an issue is closed' do
+ it 'sets closed_at to Time.current when an issue is closed' do
expect { issue.close }.to change { issue.closed_at }.from(nil)
end
@@ -190,7 +190,7 @@ describe Issue do
describe '#reopen' do
let(:user) { create(:user) }
- let(:issue) { create(:issue, state: 'closed', closed_at: Time.now, closed_by: user) }
+ let(:issue) { create(:issue, state: 'closed', closed_at: Time.current, closed_by: user) }
it 'sets closed_at to nil when an issue is reopend' do
expect { issue.reopen }.to change { issue.closed_at }.to(nil)
@@ -994,7 +994,7 @@ describe Issue do
it_behaves_like 'versioned description'
describe "#previous_updated_at" do
- let_it_be(:updated_at) { Time.new(2012, 01, 06) }
+ let_it_be(:updated_at) { Time.zone.local(2012, 01, 06) }
let_it_be(:issue) { create(:issue, updated_at: updated_at) }
it 'returns updated_at value if updated_at did not change at all' do
@@ -1010,15 +1010,15 @@ describe Issue do
end
it 'returns updated_at value if previous updated_at value is not present' do
- allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => [nil, Time.new(2013, 02, 06)] })
+ allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => [nil, Time.zone.local(2013, 02, 06)] })
expect(issue.previous_updated_at).to eq(updated_at)
end
it 'returns previous updated_at when present' do
- allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => [Time.new(2013, 02, 06), Time.new(2013, 03, 06)] })
+ allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => [Time.zone.local(2013, 02, 06), Time.zone.local(2013, 03, 06)] })
- expect(issue.previous_updated_at).to eq(Time.new(2013, 02, 06))
+ expect(issue.previous_updated_at).to eq(Time.zone.local(2013, 02, 06))
end
end
diff --git a/spec/models/iteration_spec.rb b/spec/models/iteration_spec.rb
index e5b7b746639..51e2c7a03ae 100644
--- a/spec/models/iteration_spec.rb
+++ b/spec/models/iteration_spec.rb
@@ -62,7 +62,7 @@ describe Iteration do
end
context 'when end_date is in range' do
- let(:start_date) { Time.now }
+ let(:start_date) { Time.current }
let(:due_date) { 6.days.from_now }
it 'is not valid' do
@@ -94,7 +94,7 @@ describe Iteration do
describe '#future_date' do
context 'when dates are in the future' do
- let(:start_date) { Time.now }
+ let(:start_date) { Time.current }
let(:due_date) { 1.week.from_now }
it { is_expected.to be_valid }
@@ -111,7 +111,7 @@ describe Iteration do
end
context 'when due_date is in the past' do
- let(:start_date) { Time.now }
+ let(:start_date) { Time.current }
let(:due_date) { 1.week.ago }
it 'is not valid' do
@@ -122,7 +122,7 @@ describe Iteration do
context 'when start_date is over 500 years in the future' do
let(:start_date) { 501.years.from_now }
- let(:due_date) { Time.now }
+ let(:due_date) { Time.current }
it 'is not valid' do
expect(subject).not_to be_valid
@@ -131,7 +131,7 @@ describe Iteration do
end
context 'when due_date is over 500 years in the future' do
- let(:start_date) { Time.now }
+ let(:start_date) { Time.current }
let(:due_date) { 501.years.from_now }
it 'is not valid' do
@@ -143,7 +143,7 @@ describe Iteration do
end
describe '.within_timeframe' do
- let_it_be(:now) { Time.now }
+ let_it_be(:now) { Time.current }
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:iteration_1) { create(:iteration, project: project, start_date: now, due_date: 1.day.from_now) }
let_it_be(:iteration_2) { create(:iteration, project: project, start_date: 2.days.from_now, due_date: 3.days.from_now) }
diff --git a/spec/models/jira_import_state_spec.rb b/spec/models/jira_import_state_spec.rb
index 99f9e035205..493e9d2d78c 100644
--- a/spec/models/jira_import_state_spec.rb
+++ b/spec/models/jira_import_state_spec.rb
@@ -124,7 +124,7 @@ describe JiraImportState do
jira_import.schedule
expect(jira_import.jid).to eq('some-job-id')
- expect(jira_import.scheduled_at).to be_within(1.second).of(Time.now)
+ expect(jira_import.scheduled_at).to be_within(1.second).of(Time.current)
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index a8d864ad3f0..7c40bb24b56 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -494,7 +494,7 @@ describe Member do
end
describe '#accept_request' do
- let(:member) { create(:project_member, requested_at: Time.now.utc) }
+ let(:member) { create(:project_member, requested_at: Time.current.utc) }
it { expect(member.accept_request).to be_truthy }
@@ -518,14 +518,14 @@ describe Member do
end
describe '#request?' do
- subject { create(:project_member, requested_at: Time.now.utc) }
+ subject { create(:project_member, requested_at: Time.current.utc) }
it { is_expected.to be_request }
end
describe '#pending?' do
let(:invited_member) { create(:project_member, invite_email: "user@example.com", user: nil) }
- let(:requester) { create(:project_member, requested_at: Time.now.utc) }
+ let(:requester) { create(:project_member, requested_at: Time.current.utc) }
it { expect(invited_member).to be_invite }
it { expect(requester).to be_pending }
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 79c39b81196..98d8de48dfb 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -44,7 +44,7 @@ describe ProjectMember do
let(:maintainer) { create(:project_member, project: project) }
it "creates an expired event when left due to expiry" do
- expired = create(:project_member, project: project, expires_at: Time.now - 6.days)
+ expired = create(:project_member, project: project, expires_at: Time.current - 6.days)
expired.destroy
expect(Event.recent.first.action).to eq(Event::EXPIRED)
end
diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb
index 8b51c6fae08..62430b08c5c 100644
--- a/spec/models/merge_request_diff_commit_spec.rb
+++ b/spec/models/merge_request_diff_commit_spec.rb
@@ -73,7 +73,7 @@ describe MergeRequestDiffCommit do
# This commit's date is "Sun Aug 17 07:12:55 292278994 +0000"
[project.commit('ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69')]
end
- let(:timestamp) { Time.at((1 << 31) - 1) }
+ let(:timestamp) { Time.zone.at((1 << 31) - 1) }
let(:rows) do
[{
"message": "Weird commit date\n",
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index fc4590f7b22..cc4a12467da 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2018,7 +2018,7 @@ describe MergeRequest do
describe '#can_be_reverted?' do
context 'when there is no merge_commit for the MR' do
before do
- subject.metrics.update!(merged_at: Time.now.utc)
+ subject.metrics.update!(merged_at: Time.current.utc)
end
it 'returns false' do
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index e51108947a7..178fd5c04d0 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -81,7 +81,7 @@ describe Milestone do
end
it_behaves_like 'within_timeframe scope' do
- let_it_be(:now) { Time.now }
+ let_it_be(:now) { Time.current }
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:resource_1) { create(:milestone, project: project, start_date: now - 1.day, due_date: now + 1.day) }
let_it_be(:resource_2) { create(:milestone, project: project, start_date: now + 2.days, due_date: now + 3.days) }
@@ -130,7 +130,7 @@ describe Milestone do
describe '#upcoming?' do
it 'returns true when start_date is in the future' do
- milestone = build(:milestone, start_date: Time.now + 1.month)
+ milestone = build(:milestone, start_date: Time.current + 1.month)
expect(milestone.upcoming?).to be_truthy
end
@@ -297,30 +297,30 @@ describe Milestone do
let(:group_3) { create(:group) }
let(:groups) { [group_1, group_2, group_3] }
- let!(:past_milestone_group_1) { create(:milestone, group: group_1, due_date: Time.now - 1.day) }
- let!(:current_milestone_group_1) { create(:milestone, group: group_1, due_date: Time.now + 1.day) }
- let!(:future_milestone_group_1) { create(:milestone, group: group_1, due_date: Time.now + 2.days) }
+ let!(:past_milestone_group_1) { create(:milestone, group: group_1, due_date: Time.current - 1.day) }
+ let!(:current_milestone_group_1) { create(:milestone, group: group_1, due_date: Time.current + 1.day) }
+ let!(:future_milestone_group_1) { create(:milestone, group: group_1, due_date: Time.current + 2.days) }
- let!(:past_milestone_group_2) { create(:milestone, group: group_2, due_date: Time.now - 1.day) }
- let!(:closed_milestone_group_2) { create(:milestone, :closed, group: group_2, due_date: Time.now + 1.day) }
- let!(:current_milestone_group_2) { create(:milestone, group: group_2, due_date: Time.now + 2.days) }
+ let!(:past_milestone_group_2) { create(:milestone, group: group_2, due_date: Time.current - 1.day) }
+ let!(:closed_milestone_group_2) { create(:milestone, :closed, group: group_2, due_date: Time.current + 1.day) }
+ let!(:current_milestone_group_2) { create(:milestone, group: group_2, due_date: Time.current + 2.days) }
- let!(:past_milestone_group_3) { create(:milestone, group: group_3, due_date: Time.now - 1.day) }
+ let!(:past_milestone_group_3) { create(:milestone, group: group_3, due_date: Time.current - 1.day) }
let(:project_1) { create(:project) }
let(:project_2) { create(:project) }
let(:project_3) { create(:project) }
let(:projects) { [project_1, project_2, project_3] }
- let!(:past_milestone_project_1) { create(:milestone, project: project_1, due_date: Time.now - 1.day) }
- let!(:current_milestone_project_1) { create(:milestone, project: project_1, due_date: Time.now + 1.day) }
- let!(:future_milestone_project_1) { create(:milestone, project: project_1, due_date: Time.now + 2.days) }
+ let!(:past_milestone_project_1) { create(:milestone, project: project_1, due_date: Time.current - 1.day) }
+ let!(:current_milestone_project_1) { create(:milestone, project: project_1, due_date: Time.current + 1.day) }
+ let!(:future_milestone_project_1) { create(:milestone, project: project_1, due_date: Time.current + 2.days) }
- let!(:past_milestone_project_2) { create(:milestone, project: project_2, due_date: Time.now - 1.day) }
- let!(:closed_milestone_project_2) { create(:milestone, :closed, project: project_2, due_date: Time.now + 1.day) }
- let!(:current_milestone_project_2) { create(:milestone, project: project_2, due_date: Time.now + 2.days) }
+ let!(:past_milestone_project_2) { create(:milestone, project: project_2, due_date: Time.current - 1.day) }
+ let!(:closed_milestone_project_2) { create(:milestone, :closed, project: project_2, due_date: Time.current + 1.day) }
+ let!(:current_milestone_project_2) { create(:milestone, project: project_2, due_date: Time.current + 2.days) }
- let!(:past_milestone_project_3) { create(:milestone, project: project_3, due_date: Time.now - 1.day) }
+ let!(:past_milestone_project_3) { create(:milestone, project: project_3, due_date: Time.current - 1.day) }
let(:milestone_ids) { described_class.upcoming_ids(projects, groups).map(&:id) }
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 6dd295ca915..d42222db574 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -285,7 +285,7 @@ describe Note do
end
describe "edited?" do
- let(:note) { build(:note, updated_by_id: nil, created_at: Time.now, updated_at: Time.now + 5.hours) }
+ let(:note) { build(:note, updated_by_id: nil, created_at: Time.current, updated_at: Time.current + 5.hours) }
context "with updated_by" do
it "returns true" do
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 54747ddf525..fc7694530d0 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -97,8 +97,8 @@ describe PagesDomain do
it 'saves validity time' do
domain.save
- expect(domain.certificate_valid_not_before).to be_like_time(Time.parse("2020-03-16 14:20:34 UTC"))
- expect(domain.certificate_valid_not_after).to be_like_time(Time.parse("2220-01-28 14:20:34 UTC"))
+ expect(domain.certificate_valid_not_before).to be_like_time(Time.zone.parse("2020-03-16 14:20:34 UTC"))
+ expect(domain.certificate_valid_not_after).to be_like_time(Time.zone.parse("2220-01-28 14:20:34 UTC"))
end
end
@@ -366,7 +366,7 @@ describe PagesDomain do
let_it_be(:domain) { create(:pages_domain) }
where(:attribute, :old_value, :new_value, :update_expected) do
- now = Time.now
+ now = Time.current
future = now + 1.day
:project | nil | :project1 | true
@@ -548,7 +548,7 @@ describe PagesDomain do
it 'does not clear failure on unrelated updates' do
expect do
- domain.update!(verified_at: Time.now)
+ domain.update!(verified_at: Time.current)
end.not_to change { domain.auto_ssl_failed }.from(true)
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index ec8c03513ce..d4a57e832e2 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -770,7 +770,7 @@ describe Project do
describe 'last_activity_date' do
it 'returns the creation date of the project\'s last event if present' do
- new_event = create(:event, :closed, project: project, created_at: Time.now)
+ new_event = create(:event, :closed, project: project, created_at: Time.current)
project.reload
expect(project.last_activity_at.to_i).to eq(new_event.created_at.to_i)
@@ -3620,7 +3620,7 @@ describe Project do
expect(project).not_to receive(:visibility_level_allowed_as_fork).and_call_original
expect(project).not_to receive(:visibility_level_allowed_by_group).and_call_original
- project.update(updated_at: Time.now)
+ project.update(updated_at: Time.current)
end
end
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index a4181e3be9a..aff2b248642 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -27,8 +27,8 @@ describe ProjectWiki do
subject.create_page('Test Page', 'This is content')
wiki_container.reload
- expect(wiki_container.last_activity_at).to be_within(1.minute).of(Time.now)
- expect(wiki_container.last_repository_updated_at).to be_within(1.minute).of(Time.now)
+ expect(wiki_container.last_activity_at).to be_within(1.minute).of(Time.current)
+ expect(wiki_container.last_repository_updated_at).to be_within(1.minute).of(Time.current)
end
end
end
diff --git a/spec/models/prometheus_alert_event_spec.rb b/spec/models/prometheus_alert_event_spec.rb
index 040113643dd..85e57cb08c3 100644
--- a/spec/models/prometheus_alert_event_spec.rb
+++ b/spec/models/prometheus_alert_event_spec.rb
@@ -49,7 +49,7 @@ describe PrometheusAlertEvent do
describe 'transaction' do
describe 'fire' do
- let(:started_at) { Time.now }
+ let(:started_at) { Time.current }
context 'when status is none' do
subject { build(:prometheus_alert_event, :none) }
@@ -75,7 +75,7 @@ describe PrometheusAlertEvent do
end
describe 'resolve' do
- let(:ended_at) { Time.now }
+ let(:ended_at) { Time.current }
context 'when firing' do
subject { build(:prometheus_alert_event) }
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index a87cdcf9344..6d163a16e63 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -306,7 +306,7 @@ describe RemoteMirror, :mailer do
context 'when it did not update in the last minute' do
it 'schedules a RepositoryUpdateRemoteMirrorWorker to run now' do
- expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_async).with(remote_mirror.id, Time.now)
+ expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_async).with(remote_mirror.id, Time.current)
remote_mirror.sync
end
@@ -314,9 +314,9 @@ describe RemoteMirror, :mailer do
context 'when it did update in the last minute' do
it 'schedules a RepositoryUpdateRemoteMirrorWorker to run in the next minute' do
- remote_mirror.last_update_started_at = Time.now - 30.seconds
+ remote_mirror.last_update_started_at = Time.current - 30.seconds
- expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_in).with(RemoteMirror::PROTECTED_BACKOFF_DELAY, remote_mirror.id, Time.now)
+ expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_in).with(RemoteMirror::PROTECTED_BACKOFF_DELAY, remote_mirror.id, Time.current)
remote_mirror.sync
end
@@ -330,7 +330,7 @@ describe RemoteMirror, :mailer do
context 'when it did not update in the last 5 minutes' do
it 'schedules a RepositoryUpdateRemoteMirrorWorker to run now' do
- expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_async).with(remote_mirror.id, Time.now)
+ expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_async).with(remote_mirror.id, Time.current)
remote_mirror.sync
end
@@ -338,9 +338,9 @@ describe RemoteMirror, :mailer do
context 'when it did update within the last 5 minutes' do
it 'schedules a RepositoryUpdateRemoteMirrorWorker to run in the next 5 minutes' do
- remote_mirror.last_update_started_at = Time.now - 30.seconds
+ remote_mirror.last_update_started_at = Time.current - 30.seconds
- expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_in).with(RemoteMirror::UNPROTECTED_BACKOFF_DELAY, remote_mirror.id, Time.now)
+ expect(RepositoryUpdateRemoteMirrorWorker).to receive(:perform_in).with(RemoteMirror::UNPROTECTED_BACKOFF_DELAY, remote_mirror.id, Time.current)
remote_mirror.sync
end
@@ -377,9 +377,9 @@ describe RemoteMirror, :mailer do
let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
it 'resets all the columns when URL changes' do
- remote_mirror.update(last_error: Time.now,
- last_update_at: Time.now,
- last_successful_update_at: Time.now,
+ remote_mirror.update(last_error: Time.current,
+ last_update_at: Time.current,
+ last_successful_update_at: Time.current,
update_status: 'started',
error_notification_sent: true)
@@ -394,14 +394,14 @@ describe RemoteMirror, :mailer do
describe '#updated_since?' do
let(:remote_mirror) { create(:project, :repository, :remote_mirror).remote_mirrors.first }
- let(:timestamp) { Time.now - 5.minutes }
+ let(:timestamp) { Time.current - 5.minutes }
around do |example|
Timecop.freeze { example.run }
end
before do
- remote_mirror.update(last_update_started_at: Time.now)
+ remote_mirror.update(last_update_started_at: Time.current)
end
context 'when remote mirror does not have status failed' do
@@ -410,7 +410,7 @@ describe RemoteMirror, :mailer do
end
it 'returns false when last update started before the timestamp' do
- expect(remote_mirror.updated_since?(Time.now + 5.minutes)).to be false
+ expect(remote_mirror.updated_since?(Time.current + 5.minutes)).to be false
end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index be626dd6e32..a616a63e724 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -88,8 +88,8 @@ describe Repository do
subject { repository.tags_sorted_by('updated_desc').map(&:name) }
before do
- double_first = double(committed_date: Time.now)
- double_last = double(committed_date: Time.now - 1.second)
+ double_first = double(committed_date: Time.current)
+ double_last = double(committed_date: Time.current - 1.second)
allow(tag_a).to receive(:dereferenced_target).and_return(double_first)
allow(tag_b).to receive(:dereferenced_target).and_return(double_last)
@@ -103,8 +103,8 @@ describe Repository do
subject { repository.tags_sorted_by('updated_asc').map(&:name) }
before do
- double_first = double(committed_date: Time.now - 1.second)
- double_last = double(committed_date: Time.now)
+ double_first = double(committed_date: Time.current - 1.second)
+ double_last = double(committed_date: Time.current)
allow(tag_a).to receive(:dereferenced_target).and_return(double_last)
allow(tag_b).to receive(:dereferenced_target).and_return(double_first)
@@ -125,8 +125,8 @@ describe Repository do
rugged_repo(repository).tags.create(annotated_tag_name, 'a48e4fc218069f68ef2e769dd8dfea3991362175', options)
- double_first = double(committed_date: Time.now - 1.second)
- double_last = double(committed_date: Time.now)
+ double_first = double(committed_date: Time.current - 1.second)
+ double_last = double(committed_date: Time.current)
allow(tag_a).to receive(:dereferenced_target).and_return(double_last)
allow(tag_b).to receive(:dereferenced_target).and_return(double_first)
@@ -227,7 +227,7 @@ describe Repository do
tree_builder = Rugged::Tree::Builder.new(rugged)
tree_builder.insert({ oid: blob_id, name: "hello\x80world", filemode: 0100644 })
tree_id = tree_builder.write
- user = { email: "jcai@gitlab.com", time: Time.now, name: "John Cai" }
+ user = { email: "jcai@gitlab.com", time: Time.current.to_time, name: "John Cai" }
Rugged::Commit.create(rugged, message: 'some commit message', parents: [rugged.head.target.oid], tree: tree_id, committer: user, author: user)
end
diff --git a/spec/models/snippet_input_action_collection_spec.rb b/spec/models/snippet_input_action_collection_spec.rb
new file mode 100644
index 00000000000..db361ea1f5b
--- /dev/null
+++ b/spec/models/snippet_input_action_collection_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SnippetInputActionCollection do
+ let(:action_name) { 'create' }
+ let(:action) { { action: action_name, file_path: 'foo', content: 'bar', previous_path: 'foobar' } }
+ let(:data) { [action, action] }
+
+ it { is_expected.to delegate_method(:empty?).to(:actions) }
+
+ describe '#to_commit_actions' do
+ subject { described_class.new(data).to_commit_actions}
+
+ it 'translates all actions to commit actions' do
+ transformed_action = action.merge(action: action_name.to_sym)
+
+ expect(subject).to eq [transformed_action, transformed_action]
+ end
+ end
+
+ describe '#valid?' do
+ subject { described_class.new(data).valid?}
+
+ it 'returns true' do
+ expect(subject).to be true
+ end
+
+ context 'when any of the actions is invalid' do
+ let(:data) { [action, { action: 'foo' }, action]}
+
+ it 'returns false' do
+ expect(subject).to be false
+ end
+ end
+ end
+end
diff --git a/spec/models/snippet_input_action_spec.rb b/spec/models/snippet_input_action_spec.rb
new file mode 100644
index 00000000000..ca0f91d7d27
--- /dev/null
+++ b/spec/models/snippet_input_action_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe SnippetInputAction do
+ describe 'validations' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:action, :file_path, :content, :previous_path, :is_valid) do
+ 'create' | 'foobar' | 'foobar' | 'foobar' | true
+ 'move' | 'foobar' | 'foobar' | 'foobar' | true
+ 'delete' | 'foobar' | 'foobar' | 'foobar' | true
+ 'update' | 'foobar' | 'foobar' | 'foobar' | true
+ 'foo' | 'foobar' | 'foobar' | 'foobar' | false
+ nil | 'foobar' | 'foobar' | 'foobar' | false
+ '' | 'foobar' | 'foobar' | 'foobar' | false
+ 'move' | 'foobar' | 'foobar' | nil | false
+ 'move' | 'foobar' | 'foobar' | '' | false
+ 'create' | 'foobar' | nil | 'foobar' | false
+ 'create' | 'foobar' | '' | 'foobar' | false
+ 'create' | nil | 'foobar' | 'foobar' | false
+ 'create' | '' | 'foobar' | 'foobar' | false
+ end
+
+ with_them do
+ subject { described_class.new(action: action, file_path: file_path, content: content, previous_path: previous_path).valid? }
+
+ specify { is_expected.to be is_valid}
+ end
+ end
+
+ describe '#to_commit_action' do
+ let(:action) { 'create' }
+ let(:file_path) { 'foo' }
+ let(:content) { 'bar' }
+ let(:previous_path) { 'previous_path' }
+ let(:options) { { action: action, file_path: file_path, content: content, previous_path: previous_path } }
+
+ subject { described_class.new(options).to_commit_action }
+
+ it 'transforms attributes to commit action' do
+ expect(subject).to eq(options.merge(action: action.to_sym))
+ end
+ end
+end
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index e125f58399e..08f0627191a 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -414,7 +414,7 @@ describe Todo do
create(:todo, :pending)
Timecop.freeze(1.day.from_now) do
- expected_update_date = Time.now.utc
+ expected_update_date = Time.current.utc
ids = described_class.update_state(:done)
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 94a3f6bafea..a90d7893c79 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -1253,7 +1253,7 @@ describe User do
end
it 'is true when sent less than one minute ago' do
- user = build_stubbed(:user, reset_password_sent_at: Time.now)
+ user = build_stubbed(:user, reset_password_sent_at: Time.current)
expect(user.recently_sent_password_reset?).to eq true
end
@@ -2029,7 +2029,7 @@ describe User do
describe '#all_emails' do
let(:user) { create(:user) }
- let!(:email_confirmed) { create :email, user: user, confirmed_at: Time.now }
+ let!(:email_confirmed) { create :email, user: user, confirmed_at: Time.current }
let!(:email_unconfirmed) { create :email, user: user }
context 'when `include_private_email` is true' do
@@ -2058,7 +2058,7 @@ describe User do
let(:user) { create(:user) }
it 'returns only confirmed emails' do
- email_confirmed = create :email, user: user, confirmed_at: Time.now
+ email_confirmed = create :email, user: user, confirmed_at: Time.current
create :email, user: user
expect(user.verified_emails).to contain_exactly(
@@ -2073,7 +2073,7 @@ describe User do
let(:user) { create(:user) }
it 'returns true when the email is verified/confirmed' do
- email_confirmed = create :email, user: user, confirmed_at: Time.now
+ email_confirmed = create :email, user: user, confirmed_at: Time.current
create :email, user: user
user.reload
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index a5b95bc59a5..0b8c7eef7c1 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -38,6 +38,8 @@ describe API::Settings, 'Settings' do
expect(json_response).not_to have_key('performance_bar_allowed_group_path')
expect(json_response).not_to have_key('performance_bar_enabled')
expect(json_response['snippet_size_limit']).to eq(50.megabytes)
+ expect(json_response['spam_check_endpoint_enabled']).to be_falsey
+ expect(json_response['spam_check_endpoint_url']).to be_nil
end
end
@@ -90,7 +92,9 @@ describe API::Settings, 'Settings' do
push_event_activities_limit: 2,
snippet_size_limit: 5,
issues_create_limit: 300,
- raw_blob_request_limit: 300
+ raw_blob_request_limit: 300,
+ spam_check_endpoint_enabled: true,
+ spam_check_endpoint_url: 'https://example.com/spam_check'
}
expect(response).to have_gitlab_http_status(:ok)
@@ -129,6 +133,8 @@ describe API::Settings, 'Settings' do
expect(json_response['snippet_size_limit']).to eq(5)
expect(json_response['issues_create_limit']).to eq(300)
expect(json_response['raw_blob_request_limit']).to eq(300)
+ expect(json_response['spam_check_endpoint_enabled']).to be_truthy
+ expect(json_response['spam_check_endpoint_url']).to eq('https://example.com/spam_check')
end
end
@@ -390,5 +396,14 @@ describe API::Settings, 'Settings' do
expect(json_response['error']).to eq('sourcegraph_url is missing')
end
end
+
+ context "missing spam_check_endpoint_url value when spam_check_endpoint_enabled is true" do
+ it "returns a blank parameter error message" do
+ put api("/application/settings", admin), params: { spam_check_endpoint_enabled: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('spam_check_endpoint_url is missing')
+ end
+ end
end
end
diff --git a/spec/services/alert_management/create_alert_issue_service_spec.rb b/spec/services/alert_management/create_alert_issue_service_spec.rb
index 62afe777165..c8b1eec8cb1 100644
--- a/spec/services/alert_management/create_alert_issue_service_spec.rb
+++ b/spec/services/alert_management/create_alert_issue_service_spec.rb
@@ -94,11 +94,7 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
end
context 'when alert cannot be updated' do
- before do
- # invalidate alert
- too_many_hosts = Array.new(AlertManagement::Alert::HOSTS_MAX_LENGTH + 1) { |_| 'host' }
- alert.update_columns(hosts: too_many_hosts)
- end
+ let(:alert) { create(:alert_management_alert, :with_validation_errors, :triggered, project: project, payload: payload) }
it 'responds with error' do
expect(execute).to be_error
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index bbf5bbbf814..d7f6bececfe 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -216,6 +216,15 @@ describe Groups::TransferService do
end
end
+ context 'when a group is transferred to its subgroup' do
+ let(:new_parent_group) { create(:group, parent: group) }
+
+ it 'does not execute the transfer' do
+ expect(transfer_service.execute(new_parent_group)).to be_falsy
+ expect(transfer_service.error).to match(/Cannot transfer group to one of its subgroup/)
+ end
+ end
+
context 'when transferring a group with group descendants' do
let!(:subgroup1) { create(:group, :private, parent: group) }
let!(:subgroup2) { create(:group, :internal, parent: group) }
diff --git a/spec/services/projects/prometheus/alerts/create_events_service_spec.rb b/spec/services/projects/prometheus/alerts/create_events_service_spec.rb
index 35f23afd7a2..61236b5bbdb 100644
--- a/spec/services/projects/prometheus/alerts/create_events_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/create_events_service_spec.rb
@@ -89,9 +89,9 @@ describe Projects::Prometheus::Alerts::CreateEventsService do
context 'with a resolved payload' do
let(:started_at) { truncate_to_second(Time.current) }
let(:ended_at) { started_at + 1 }
- let(:payload_key) { PrometheusAlertEvent.payload_key_for(alert.prometheus_metric_id, utc_rfc3339(started_at)) }
let(:resolved_event) { alert_payload(status: 'resolved', started_at: started_at, ended_at: ended_at) }
let(:alerts_payload) { { 'alerts' => [resolved_event] } }
+ let(:payload_key) { Gitlab::Alerting::Alert.new(project: project, payload: resolved_event).gitlab_fingerprint }
context 'with a matching firing event' do
before do
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index 786fc3ec8dd..fa8cbc87563 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -109,7 +109,7 @@ describe Snippets::CreateService do
expect(snippet.repository.exists?).to be_truthy
end
- it 'commit the files to the repository' do
+ it 'commits the files to the repository' do
subject
blob = snippet.repository.blob_at('master', base_opts[:file_name])
@@ -230,6 +230,61 @@ describe Snippets::CreateService do
end
end
+ shared_examples 'when snippet_files param is present' do
+ let(:file_path) { 'snippet_file_path.rb' }
+ let(:content) { 'snippet_content' }
+ let(:snippet_files) { [{ action: 'create', file_path: file_path, content: content }] }
+ let(:base_opts) do
+ {
+ title: 'Test snippet',
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE,
+ snippet_files: snippet_files
+ }
+ end
+
+ it 'creates a snippet with the provided attributes' do
+ expect(snippet.title).to eq(opts[:title])
+ expect(snippet.visibility_level).to eq(opts[:visibility_level])
+ expect(snippet.file_name).to eq(file_path)
+ expect(snippet.content).to eq(content)
+ end
+
+ it 'commit the files to the repository' do
+ subject
+
+ blob = snippet.repository.blob_at('master', file_path)
+
+ expect(blob.data).to eq content
+ end
+
+ context 'when content or file_name params are present' do
+ let(:extra_opts) { { content: 'foo', file_name: 'path' } }
+
+ it 'a validation error is raised' do
+ response = subject
+ snippet = response.payload[:snippet]
+
+ expect(response).to be_error
+ expect(snippet.errors.full_messages_for(:content)).to eq ['Content and snippet files cannot be used together']
+ expect(snippet.errors.full_messages_for(:file_name)).to eq ['File name and snippet files cannot be used together']
+ expect(snippet.repository.exists?).to be_falsey
+ end
+ end
+
+ context 'when snippet_files param is invalid' do
+ let(:snippet_files) { [{ action: 'invalid_action', file_path: 'snippet_file_path.rb', content: 'snippet_content' }] }
+
+ it 'a validation error is raised' do
+ response = subject
+ snippet = response.payload[:snippet]
+
+ expect(response).to be_error
+ expect(snippet.errors.full_messages_for(:snippet_files)).to eq ['Snippet files have invalid data']
+ expect(snippet.repository.exists?).to be_falsey
+ end
+ end
+ end
+
context 'when ProjectSnippet' do
let_it_be(:project) { create(:project) }
@@ -244,6 +299,7 @@ describe Snippets::CreateService do
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
it_behaves_like 'after_save callback to store_mentions', ProjectSnippet
+ it_behaves_like 'when snippet_files param is present'
context 'when uploaded files are passed to the service' do
let(:extra_opts) { { files: ['foo'] } }
@@ -270,6 +326,7 @@ describe Snippets::CreateService do
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
it_behaves_like 'after_save callback to store_mentions', PersonalSnippet
+ it_behaves_like 'when snippet_files param is present'
context 'when the snippet description contains files' do
include FileMoverHelpers
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 93460a5e7d7..2568b708ad8 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -25,41 +25,256 @@ describe Spam::SpamVerdictService do
subject { service.execute }
before do
- allow_next_instance_of(Spam::AkismetService) do |service|
- allow(service).to receive(:spam?).and_return(spam_verdict)
+ allow(service).to receive(:akismet_verdict).and_return(nil)
+ allow(service).to receive(:spam_verdict_verdict).and_return(nil)
+ end
+
+ context 'if all services return nil' do
+ it 'renders ALLOW verdict' do
+ expect(subject).to eq ALLOW
end
end
- context 'if Akismet considers it spam' do
- let(:spam_verdict) { true }
+ context 'if only one service returns a verdict' do
+ context 'and it is supported' do
+ before do
+ allow(service).to receive(:akismet_verdict).and_return(DISALLOW)
+ end
- context 'if reCAPTCHA is enabled' do
+ it 'renders that verdict' do
+ expect(subject).to eq DISALLOW
+ end
+ end
+
+ context 'and it is unexpected' do
before do
- stub_application_setting(recaptcha_enabled: true)
+ allow(service).to receive(:akismet_verdict).and_return("unexpected")
end
- it 'requires reCAPTCHA' do
- expect(subject).to eq REQUIRE_RECAPTCHA
+ it 'allows' do
+ expect(subject).to eq ALLOW
end
end
+ end
- context 'if reCAPTCHA is not enabled' do
+ context 'if more than one service returns a verdict' do
+ context 'and they are supported' do
before do
- stub_application_setting(recaptcha_enabled: false)
+ allow(service).to receive(:akismet_verdict).and_return(DISALLOW)
+ allow(service).to receive(:spam_verdict).and_return(BLOCK_USER)
end
- it 'disallows the change' do
- expect(subject).to eq DISALLOW
+ it 'renders the more restrictive verdict' do
+ expect(subject).to eq BLOCK_USER
+ end
+ end
+
+ context 'and one is supported' do
+ before do
+ allow(service).to receive(:akismet_verdict).and_return('nonsense')
+ allow(service).to receive(:spam_verdict).and_return(BLOCK_USER)
+ end
+
+ it 'renders the more restrictive verdict' do
+ expect(subject).to eq BLOCK_USER
+ end
+ end
+
+ context 'and one is supported' do
+ before do
+ allow(service).to receive(:akismet_verdict).and_return('nonsense')
+ allow(service).to receive(:spam_verdict).and_return(BLOCK_USER)
+ end
+
+ it 'renders the more restrictive verdict' do
+ expect(subject).to eq BLOCK_USER
+ end
+ end
+
+ context 'and none are supported' do
+ before do
+ allow(service).to receive(:akismet_verdict).and_return('nonsense')
+ allow(service).to receive(:spam_verdict).and_return('rubbish')
+ end
+
+ it 'renders the more restrictive verdict' do
+ expect(subject).to eq ALLOW
+ end
+ end
+ end
+ end
+
+ describe '#akismet_verdict' do
+ subject { service.send(:akismet_verdict) }
+
+ context 'if Akismet is enabled' do
+ before do
+ stub_application_setting(akismet_enabled: true)
+ allow_next_instance_of(Spam::AkismetService) do |service|
+ allow(service).to receive(:spam?).and_return(akismet_result)
+ end
+ end
+
+ context 'if Akismet considers it spam' do
+ let(:akismet_result) { true }
+
+ context 'if reCAPTCHA is enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: true)
+ end
+
+ it 'returns require reCAPTCHA verdict' do
+ expect(subject).to eq REQUIRE_RECAPTCHA
+ end
+ end
+
+ context 'if reCAPTCHA is not enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: false)
+ end
+
+ it 'renders disallow verdict' do
+ expect(subject).to eq DISALLOW
+ end
+ end
+ end
+
+ context 'if Akismet does not consider it spam' do
+ let(:akismet_result) { false }
+
+ it 'renders allow verdict' do
+ expect(subject).to eq ALLOW
end
end
end
- context 'if Akismet does not consider it spam' do
- let(:spam_verdict) { false }
+ context 'if Akismet is not enabled' do
+ before do
+ stub_application_setting(akismet_enabled: false)
+ end
- it 'allows the change' do
+ it 'renders allow verdict' do
expect(subject).to eq ALLOW
end
end
end
+
+ describe '#spam_verdict' do
+ subject { service.send(:spam_verdict) }
+
+ context 'if a Spam Check endpoint enabled and set to a URL' do
+ let(:spam_check_body) { {} }
+ let(:spam_check_http_status) { nil }
+
+ before do
+ stub_application_setting(spam_check_endpoint_enabled: true)
+ stub_application_setting(spam_check_endpoint_url: "http://www.spamcheckurl.com/spam_check")
+ stub_request(:any, /.*spamcheckurl.com.*/).to_return( body: spam_check_body.to_json, status: spam_check_http_status )
+ end
+
+ context 'if the endpoint is accessible' do
+ let(:spam_check_http_status) { 200 }
+ let(:error) { nil }
+ let(:verdict) { nil }
+ let(:spam_check_body) do
+ { verdict: verdict, error: error }
+ end
+
+ context 'the result is a valid verdict' do
+ let(:verdict) { 'allow' }
+
+ it 'returns the verdict' do
+ expect(subject).to eq ALLOW
+ end
+ end
+
+ context 'the verdict is an unexpected string' do
+ let(:verdict) { 'this is fine' }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'the JSON is malformed' do
+ let(:spam_check_body) { 'this is fine' }
+
+ it 'returns allow' do
+ expect(subject).to eq ALLOW
+ end
+ end
+
+ context 'the verdict is an empty string' do
+ let(:verdict) { '' }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'the verdict is nil' do
+ let(:verdict) { nil }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'there is an error' do
+ let(:error) { "Sorry Dave, I can't do that" }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'the HTTP status is not 200' do
+ let(:spam_check_http_status) { 500 }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'the confused API endpoint returns both an error and a verdict' do
+ let(:verdict) { 'disallow' }
+ let(:error) { 'oh noes!' }
+
+ it 'renders the verdict' do
+ expect(subject).to eq DISALLOW
+ end
+ end
+ end
+
+ context 'if the endpoint times out' do
+ before do
+ stub_request(:any, /.*spamcheckurl.com.*/).to_timeout
+ end
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+ end
+
+ context 'if a Spam Check endpoint is not set' do
+ before do
+ stub_application_setting(spam_check_endpoint_url: nil)
+ end
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'if Spam Check endpoint is not enabled' do
+ before do
+ stub_application_setting(spam_check_endpoint_enabled: false)
+ end
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/support/shared_contexts/spam_constants.rb b/spec/support/shared_contexts/spam_constants.rb
index 7aff207dc2e..0eb5e541965 100644
--- a/spec/support/shared_contexts/spam_constants.rb
+++ b/spec/support/shared_contexts/spam_constants.rb
@@ -5,5 +5,6 @@ shared_context 'includes Spam constants' do
stub_const('REQUIRE_RECAPTCHA', Spam::SpamConstants::REQUIRE_RECAPTCHA)
stub_const('DISALLOW', Spam::SpamConstants::DISALLOW)
stub_const('ALLOW', Spam::SpamConstants::ALLOW)
+ stub_const('BLOCK_USER', Spam::SpamConstants::BLOCK_USER)
end
end
diff --git a/spec/workers/incident_management/process_alert_worker_spec.rb b/spec/workers/incident_management/process_alert_worker_spec.rb
index 938e72aa0f0..08acee264f8 100644
--- a/spec/workers/incident_management/process_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_alert_worker_spec.rb
@@ -70,11 +70,7 @@ describe IncidentManagement::ProcessAlertWorker do
end
context 'when alert cannot be updated' do
- before do
- # invalidate alert
- too_many_hosts = Array.new(AlertManagement::Alert::HOSTS_MAX_LENGTH + 1) { |_| 'host' }
- alert.update_columns(hosts: too_many_hosts)
- end
+ let(:alert) { create(:alert_management_alert, :with_validation_errors, project: project) }
it 'updates AlertManagement::Alert#issue_id' do
expect { subject }.not_to change { alert.reload.issue_id }
diff --git a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
index 5fbc39cad4e..d80365cfba4 100644
--- a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
@@ -6,7 +6,7 @@ describe IncidentManagement::ProcessPrometheusAlertWorker do
describe '#perform' do
let_it_be(:project) { create(:project) }
let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
- let_it_be(:payload_key) { PrometheusAlertEvent.payload_key_for(prometheus_alert.prometheus_metric_id, prometheus_alert.created_at.rfc3339) }
+ let(:payload_key) { Gitlab::Alerting::Alert.new(project: project, payload: alert_params).gitlab_fingerprint }
let!(:prometheus_alert_event) { create(:prometheus_alert_event, prometheus_alert: prometheus_alert, payload_key: payload_key) }
let(:alert_params) do
@@ -107,7 +107,6 @@ describe IncidentManagement::ProcessPrometheusAlertWorker do
let(:starts_at) { Time.now.rfc3339 }
let!(:prometheus_alert_event) do
- payload_key = SelfManagedPrometheusAlertEvent.payload_key_for(starts_at, alert_name, 'vector(1)')
create(:self_managed_prometheus_alert_event, project: project, payload_key: payload_key)
end