Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'app/models')
-rw-r--r--app/models/ability.rb12
-rw-r--r--app/models/active_session.rb110
-rw-r--r--app/models/appearance.rb19
-rw-r--r--app/models/application_setting.rb119
-rw-r--r--app/models/application_setting/term.rb19
-rw-r--r--app/models/badge.rb2
-rw-r--r--app/models/broadcast_message.rb6
-rw-r--r--app/models/ci/artifact_blob.rb7
-rw-r--r--app/models/ci/build.rb216
-rw-r--r--app/models/ci/build_metadata.rb35
-rw-r--r--app/models/ci/build_trace_chunk.rb180
-rw-r--r--app/models/ci/group.rb8
-rw-r--r--app/models/ci/group_variable.rb4
-rw-r--r--app/models/ci/job_artifact.rb45
-rw-r--r--app/models/ci/legacy_stage.rb6
-rw-r--r--app/models/ci/pipeline.rb125
-rw-r--r--app/models/ci/pipeline_schedule_variable.rb2
-rw-r--r--app/models/ci/pipeline_variable.rb2
-rw-r--r--app/models/ci/runner.rb132
-rw-r--r--app/models/ci/runner_namespace.rb11
-rw-r--r--app/models/ci/runner_project.rb4
-rw-r--r--app/models/ci/stage.rb53
-rw-r--r--app/models/ci/variable.rb2
-rw-r--r--app/models/clusters/applications/jupyter.rb92
-rw-r--r--app/models/clusters/applications/prometheus.rb8
-rw-r--r--app/models/clusters/applications/runner.rb11
-rw-r--r--app/models/clusters/cluster.rb14
-rw-r--r--app/models/clusters/concerns/application_status.rb2
-rw-r--r--app/models/clusters/platforms/kubernetes.rb4
-rw-r--r--app/models/clusters/providers/gcp.rb2
-rw-r--r--app/models/commit.rb50
-rw-r--r--app/models/commit_status.rb8
-rw-r--r--app/models/concerns/atomic_internal_id.rb21
-rw-r--r--app/models/concerns/avatarable.rb55
-rw-r--r--app/models/concerns/awardable.rb14
-rw-r--r--app/models/concerns/batch_destroy_dependent_associations.rb28
-rw-r--r--app/models/concerns/cache_markdown_field.rb30
-rw-r--r--app/models/concerns/cacheable_attributes.rb74
-rw-r--r--app/models/concerns/chronic_duration_attribute.rb39
-rw-r--r--app/models/concerns/deployment_platform.rb22
-rw-r--r--app/models/concerns/diff_file.rb9
-rw-r--r--app/models/concerns/enum_with_nil.rb33
-rw-r--r--app/models/concerns/fast_destroy_all.rb91
-rw-r--r--app/models/concerns/group_descendant.rb15
-rw-r--r--app/models/concerns/has_status.rb2
-rw-r--r--app/models/concerns/has_variable.rb2
-rw-r--r--app/models/concerns/iid_routes.rb9
-rw-r--r--app/models/concerns/issuable.rb8
-rw-r--r--app/models/concerns/milestoneish.rb8
-rw-r--r--app/models/concerns/nonatomic_internal_id.rb22
-rw-r--r--app/models/concerns/participable.rb4
-rw-r--r--app/models/concerns/presentable.rb8
-rw-r--r--app/models/concerns/project_features_compatibility.rb2
-rw-r--r--app/models/concerns/protected_ref.rb2
-rw-r--r--app/models/concerns/protected_ref_access.rb4
-rw-r--r--app/models/concerns/reactive_caching.rb18
-rw-r--r--app/models/concerns/redis_cacheable.rb19
-rw-r--r--app/models/concerns/resolvable_note.rb2
-rw-r--r--app/models/concerns/routable.rb11
-rw-r--r--app/models/concerns/sha_attribute.rb30
-rw-r--r--app/models/concerns/sortable.rb4
-rw-r--r--app/models/concerns/storage/legacy_namespace.rb30
-rw-r--r--app/models/concerns/time_trackable.rb4
-rw-r--r--app/models/concerns/uniquify.rb22
-rw-r--r--app/models/concerns/with_uploads.rb43
-rw-r--r--app/models/deploy_key.rb4
-rw-r--r--app/models/deploy_token.rb66
-rw-r--r--app/models/deployment.rb5
-rw-r--r--app/models/diff_note.rb54
-rw-r--r--app/models/discussion.rb4
-rw-r--r--app/models/environment.rb4
-rw-r--r--app/models/event.rb15
-rw-r--r--app/models/generic_commit_status.rb2
-rw-r--r--app/models/group.rb85
-rw-r--r--app/models/hooks/project_hook.rb1
-rw-r--r--app/models/hooks/system_hook.rb5
-rw-r--r--app/models/hooks/web_hook.rb9
-rw-r--r--app/models/identity.rb8
-rw-r--r--app/models/internal_id.rb3
-rw-r--r--app/models/issue.rb59
-rw-r--r--app/models/label.rb19
-rw-r--r--app/models/lfs_object.rb17
-rw-r--r--app/models/list.rb21
-rw-r--r--app/models/member.rb13
-rw-r--r--app/models/members/group_member.rb6
-rw-r--r--app/models/members/project_member.rb6
-rw-r--r--app/models/merge_request.rb122
-rw-r--r--app/models/merge_request_diff.rb31
-rw-r--r--app/models/merge_request_diff_commit.rb2
-rw-r--r--app/models/merge_request_diff_file.rb7
-rw-r--r--app/models/milestone.rb12
-rw-r--r--app/models/namespace.rb18
-rw-r--r--app/models/note.rb20
-rw-r--r--app/models/note_diff_file.rb7
-rw-r--r--app/models/notification_recipient.rb58
-rw-r--r--app/models/notification_setting.rb8
-rw-r--r--app/models/pages_domain.rb12
-rw-r--r--app/models/personal_snippet.rb1
-rw-r--r--app/models/project.rb441
-rw-r--r--app/models/project_auto_devops.rb31
-rw-r--r--app/models/project_ci_cd_setting.rb16
-rw-r--r--app/models/project_deploy_token.rb8
-rw-r--r--app/models/project_import_data.rb2
-rw-r--r--app/models/project_import_state.rb55
-rw-r--r--app/models/project_services/bamboo_service.rb2
-rw-r--r--app/models/project_services/bugzilla_service.rb2
-rw-r--r--app/models/project_services/buildkite_service.rb2
-rw-r--r--app/models/project_services/chat_message/base_message.rb7
-rw-r--r--app/models/project_services/chat_message/pipeline_message.rb4
-rw-r--r--app/models/project_services/chat_notification_service.rb17
-rw-r--r--app/models/project_services/custom_issue_tracker_service.rb2
-rw-r--r--app/models/project_services/drone_ci_service.rb4
-rw-r--r--app/models/project_services/external_wiki_service.rb2
-rw-r--r--app/models/project_services/flowdock_service.rb48
-rw-r--r--app/models/project_services/gemnasium_service.rb20
-rw-r--r--app/models/project_services/gitlab_issue_tracker_service.rb2
-rw-r--r--app/models/project_services/hipchat_service.rb2
-rw-r--r--app/models/project_services/jira_service.rb6
-rw-r--r--app/models/project_services/kubernetes_service.rb2
-rw-r--r--app/models/project_services/microsoft_teams_service.rb2
-rw-r--r--app/models/project_services/mock_ci_service.rb2
-rw-r--r--app/models/project_services/prometheus_service.rb2
-rw-r--r--app/models/project_services/redmine_service.rb2
-rw-r--r--app/models/project_services/teamcity_service.rb2
-rw-r--r--app/models/project_statistics.rb20
-rw-r--r--app/models/project_team.rb2
-rw-r--r--app/models/project_wiki.rb12
-rw-r--r--app/models/protected_branch.rb9
-rw-r--r--app/models/redirect_route.rb28
-rw-r--r--app/models/remote_mirror.rb218
-rw-r--r--app/models/repository.rb115
-rw-r--r--app/models/route.rb26
-rw-r--r--app/models/sent_notification.rb4
-rw-r--r--app/models/service.rb17
-rw-r--r--app/models/storage/hashed_project.rb4
-rw-r--r--app/models/storage/legacy_project.rb8
-rw-r--r--app/models/system_note_metadata.rb6
-rw-r--r--app/models/term_agreement.rb8
-rw-r--r--app/models/timelog.rb9
-rw-r--r--app/models/todo.rb2
-rw-r--r--app/models/upload.rb19
-rw-r--r--app/models/user.rb164
-rw-r--r--app/models/user_callout.rb3
-rw-r--r--app/models/wiki_page.rb18
144 files changed, 3265 insertions, 811 deletions
diff --git a/app/models/ability.rb b/app/models/ability.rb
index 6dae49f38dc..bb600eaccba 100644
--- a/app/models/ability.rb
+++ b/app/models/ability.rb
@@ -10,6 +10,14 @@ class Ability
end
end
+ # Given a list of users and a group this method returns the users that can
+ # read the given group.
+ def users_that_can_read_group(users, group)
+ DeclarativePolicy.subject_scope do
+ users.select { |u| allowed?(u, :read_group, group) }
+ end
+ end
+
# Given a list of users and a snippet this method returns the users that can
# read the given snippet.
def users_that_can_read_personal_snippet(users, snippet)
@@ -46,10 +54,6 @@ class Ability
end
end
- def can_edit_note?(user, note)
- allowed?(user, :edit_note, note)
- end
-
def allowed?(user, action, subject = :global, opts = {})
if subject.is_a?(Hash)
opts, subject = subject, :global
diff --git a/app/models/active_session.rb b/app/models/active_session.rb
new file mode 100644
index 00000000000..b4a86dbb331
--- /dev/null
+++ b/app/models/active_session.rb
@@ -0,0 +1,110 @@
+class ActiveSession
+ include ActiveModel::Model
+
+ attr_accessor :created_at, :updated_at,
+ :session_id, :ip_address,
+ :browser, :os, :device_name, :device_type
+
+ def current?(session)
+ return false if session_id.nil? || session.id.nil?
+
+ session_id == session.id
+ end
+
+ def human_device_type
+ device_type&.titleize
+ end
+
+ def self.set(user, request)
+ Gitlab::Redis::SharedState.with do |redis|
+ session_id = request.session.id
+ client = DeviceDetector.new(request.user_agent)
+ timestamp = Time.current
+
+ active_user_session = new(
+ ip_address: request.ip,
+ browser: client.name,
+ os: client.os_name,
+ device_name: client.device_name,
+ device_type: client.device_type,
+ created_at: user.current_sign_in_at || timestamp,
+ updated_at: timestamp,
+ session_id: session_id
+ )
+
+ redis.pipelined do
+ redis.setex(
+ key_name(user.id, session_id),
+ Settings.gitlab['session_expire_delay'] * 60,
+ Marshal.dump(active_user_session)
+ )
+
+ redis.sadd(
+ lookup_key_name(user.id),
+ session_id
+ )
+ end
+ end
+ end
+
+ def self.list(user)
+ Gitlab::Redis::SharedState.with do |redis|
+ cleaned_up_lookup_entries(redis, user.id).map do |entry|
+ # rubocop:disable Security/MarshalLoad
+ Marshal.load(entry)
+ # rubocop:enable Security/MarshalLoad
+ end
+ end
+ end
+
+ def self.destroy(user, session_id)
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.srem(lookup_key_name(user.id), session_id)
+
+ deleted_keys = redis.del(key_name(user.id, session_id))
+
+ # only allow deleting the devise session if we could actually find a
+ # related active session. this prevents another user from deleting
+ # someone else's session.
+ if deleted_keys > 0
+ redis.del("#{Gitlab::Redis::SharedState::SESSION_NAMESPACE}:#{session_id}")
+ end
+ end
+ end
+
+ def self.cleanup(user)
+ Gitlab::Redis::SharedState.with do |redis|
+ cleaned_up_lookup_entries(redis, user.id)
+ end
+ end
+
+ def self.key_name(user_id, session_id = '*')
+ "#{Gitlab::Redis::SharedState::USER_SESSIONS_NAMESPACE}:#{user_id}:#{session_id}"
+ end
+
+ def self.lookup_key_name(user_id)
+ "#{Gitlab::Redis::SharedState::USER_SESSIONS_LOOKUP_NAMESPACE}:#{user_id}"
+ end
+
+ def self.cleaned_up_lookup_entries(redis, user_id)
+ lookup_key = lookup_key_name(user_id)
+
+ session_ids = redis.smembers(lookup_key)
+
+ entry_keys = session_ids.map { |session_id| key_name(user_id, session_id) }
+ return [] if entry_keys.empty?
+
+ entries = redis.mget(entry_keys)
+
+ session_ids_and_entries = session_ids.zip(entries)
+
+ # remove expired keys.
+ # only the single key entries are automatically expired by redis, the
+ # lookup entries in the set need to be removed manually.
+ session_ids_and_entries.reject { |_session_id, entry| entry }.each do |session_id, _entry|
+ redis.srem(lookup_key, session_id)
+ end
+
+ session_ids_and_entries.select { |_session_id, entry| entry }.map { |_session_id, entry| entry }
+ end
+end
diff --git a/app/models/appearance.rb b/app/models/appearance.rb
index dcd14c08f3c..b770aadef0e 100644
--- a/app/models/appearance.rb
+++ b/app/models/appearance.rb
@@ -1,5 +1,8 @@
class Appearance < ActiveRecord::Base
+ include CacheableAttributes
include CacheMarkdownField
+ include ObjectStorage::BackgroundMove
+ include WithUploads
cache_markdown_field :description
cache_markdown_field :new_project_guidelines
@@ -11,19 +14,11 @@ class Appearance < ActiveRecord::Base
mount_uploader :logo, AttachmentUploader
mount_uploader :header_logo, AttachmentUploader
+ mount_uploader :favicon, FaviconUploader
- has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
-
- CACHE_KEY = 'current_appearance'.freeze
-
- after_commit :flush_redis_cache
-
- def self.current
- Rails.cache.fetch(CACHE_KEY) { first }
- end
-
- def flush_redis_cache
- Rails.cache.delete(CACHE_KEY)
+ # Overrides CacheableAttributes.current_without_cache
+ def self.current_without_cache
+ first
end
def single_appearance_row
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index 862933bf127..bddeb8b0352 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -1,11 +1,11 @@
class ApplicationSetting < ActiveRecord::Base
+ include CacheableAttributes
include CacheMarkdownField
include TokenAuthenticatable
add_authentication_token_field :runners_registration_token
add_authentication_token_field :health_check_access_token
- CACHE_KEY = 'application_setting.last'.freeze
DOMAIN_LIST_SEPARATOR = %r{\s*[,;]\s* # comma or semicolon, optionally surrounded by whitespace
| # or
\s # any whitespace character
@@ -212,13 +212,7 @@ class ApplicationSetting < ActiveRecord::Base
end
end
- validates_each :disabled_oauth_sign_in_sources do |record, attr, value|
- value&.each do |source|
- unless Devise.omniauth_providers.include?(source.to_sym)
- record.errors.add(attr, "'#{source}' is not an OAuth sign-in source")
- end
- end
- end
+ validate :terms_exist, if: :enforce_terms?
before_validation :ensure_uuid!
@@ -226,41 +220,9 @@ class ApplicationSetting < ActiveRecord::Base
before_save :ensure_health_check_access_token
after_commit do
- Rails.cache.write(CACHE_KEY, self)
- end
-
- def self.current
- ensure_cache_setup
-
- Rails.cache.fetch(CACHE_KEY) do
- ApplicationSetting.last.tap do |settings|
- # do not cache nils
- raise 'missing settings' unless settings
- end
- end
- rescue
- # Fall back to an uncached value if there are any problems (e.g. redis down)
- ApplicationSetting.last
- end
-
- def self.expire
- Rails.cache.delete(CACHE_KEY)
- rescue
- # Gracefully handle when Redis is not available. For example,
- # omnibus may fail here during gitlab:assets:compile.
- end
-
- def self.cached
- value = Rails.cache.read(CACHE_KEY)
- ensure_cache_setup if value.present?
- value
- end
-
- def self.ensure_cache_setup
- # This is a workaround for a Rails bug that causes attribute methods not
- # to be loaded when read from cache: https://github.com/rails/rails/issues/27348
- ApplicationSetting.define_attribute_methods
+ reset_memoized_terms
end
+ after_commit :expire_performance_bar_allowed_user_ids_cache, if: -> { previous_changes.key?('performance_bar_allowed_group_id') }
def self.defaults
{
@@ -331,7 +293,8 @@ class ApplicationSetting < ActiveRecord::Base
gitaly_timeout_fast: 10,
gitaly_timeout_medium: 30,
gitaly_timeout_default: 55,
- allow_local_requests_from_hooks_and_services: false
+ allow_local_requests_from_hooks_and_services: false,
+ mirror_available: true
}
end
@@ -359,6 +322,11 @@ class ApplicationSetting < ActiveRecord::Base
::Gitlab::Database.cached_column_exists?(:application_settings, :sidekiq_throttling_enabled)
end
+ def disabled_oauth_sign_in_sources=(sources)
+ sources = (sources || []).map(&:to_s) & Devise.omniauth_providers.map(&:to_s)
+ super(sources)
+ end
+
def domain_whitelist_raw
self.domain_whitelist&.join("\n")
end
@@ -389,17 +357,6 @@ class ApplicationSetting < ActiveRecord::Base
Array(read_attribute(:repository_storages))
end
- # DEPRECATED
- # repository_storage is still required in the API. Remove in 9.0
- # Still used in API v3
- def repository_storage
- repository_storages.first
- end
-
- def repository_storage=(value)
- self.repository_storages = [value]
- end
-
def default_project_visibility=(level)
super(Gitlab::VisibilityLevel.level_value(level))
end
@@ -416,31 +373,6 @@ class ApplicationSetting < ActiveRecord::Base
super(levels.map { |level| Gitlab::VisibilityLevel.level_value(level) })
end
- def performance_bar_allowed_group_id=(group_full_path)
- group_full_path = nil if group_full_path.blank?
-
- if group_full_path.nil?
- if group_full_path != performance_bar_allowed_group_id
- super(group_full_path)
- Gitlab::PerformanceBar.expire_allowed_user_ids_cache
- end
-
- return
- end
-
- group = Group.find_by_full_path(group_full_path)
-
- if group
- if group.id != performance_bar_allowed_group_id
- super(group.id)
- Gitlab::PerformanceBar.expire_allowed_user_ids_cache
- end
- else
- super(nil)
- Gitlab::PerformanceBar.expire_allowed_user_ids_cache
- end
- end
-
def performance_bar_allowed_group
Group.find_by_id(performance_bar_allowed_group_id)
end
@@ -450,15 +382,6 @@ class ApplicationSetting < ActiveRecord::Base
performance_bar_allowed_group_id.present?
end
- # - If `enable` is true, we early return since the actual attribute that holds
- # the enabling/disabling is `performance_bar_allowed_group_id`
- # - If `enable` is false, we set `performance_bar_allowed_group_id` to `nil`
- def performance_bar_enabled=(enable)
- return if Gitlab::Utils.to_boolean(enable)
-
- self.performance_bar_allowed_group_id = nil
- end
-
# Choose one of the available repository storage options. Currently all have
# equal weighting.
def pick_repository_storage
@@ -507,6 +430,16 @@ class ApplicationSetting < ActiveRecord::Base
password_authentication_enabled_for_web? || password_authentication_enabled_for_git?
end
+ delegate :terms, to: :latest_terms, allow_nil: true
+ def latest_terms
+ @latest_terms ||= Term.latest
+ end
+
+ def reset_memoized_terms
+ @latest_terms = nil
+ latest_terms
+ end
+
private
def ensure_uuid!
@@ -520,4 +453,14 @@ class ApplicationSetting < ActiveRecord::Base
errors.add(:repository_storages, "can't include: #{invalid.join(", ")}") unless
invalid.empty?
end
+
+ def terms_exist
+ return unless enforce_terms?
+
+ errors.add(:terms, "You need to set terms to be enforced") unless terms.present?
+ end
+
+ def expire_performance_bar_allowed_user_ids_cache
+ Gitlab::PerformanceBar.expire_allowed_user_ids_cache
+ end
end
diff --git a/app/models/application_setting/term.rb b/app/models/application_setting/term.rb
new file mode 100644
index 00000000000..3b1dfe7e4ef
--- /dev/null
+++ b/app/models/application_setting/term.rb
@@ -0,0 +1,19 @@
+class ApplicationSetting
+ class Term < ActiveRecord::Base
+ include CacheMarkdownField
+ has_many :term_agreements
+
+ validates :terms, presence: true
+
+ cache_markdown_field :terms
+
+ def self.latest
+ order(:id).last
+ end
+
+ def accepted_by_user?(user)
+ user.accepted_term_id == id ||
+ term_agreements.accepted.where(user: user).exists?
+ end
+ end
+end
diff --git a/app/models/badge.rb b/app/models/badge.rb
index f7e10c2ebfc..265c5d872d4 100644
--- a/app/models/badge.rb
+++ b/app/models/badge.rb
@@ -18,7 +18,7 @@ class Badge < ActiveRecord::Base
scope :order_created_at_asc, -> { reorder(created_at: :asc) }
- validates :link_url, :image_url, url_placeholder: { protocols: %w(http https), placeholder_regex: PLACEHOLDERS_REGEX }
+ validates :link_url, :image_url, url: { protocols: %w(http https) }
validates :type, presence: true
def rendered_link_url(project = nil)
diff --git a/app/models/broadcast_message.rb b/app/models/broadcast_message.rb
index 0b561203914..4aa236555cb 100644
--- a/app/models/broadcast_message.rb
+++ b/app/models/broadcast_message.rb
@@ -19,7 +19,7 @@ class BroadcastMessage < ActiveRecord::Base
after_commit :flush_redis_cache
def self.current
- messages = Rails.cache.fetch(CACHE_KEY) { current_and_future_messages.to_a }
+ messages = Rails.cache.fetch(CACHE_KEY, expires_in: cache_expires_in) { current_and_future_messages.to_a }
return messages if messages.empty?
@@ -36,6 +36,10 @@ class BroadcastMessage < ActiveRecord::Base
where('ends_at > :now', now: Time.zone.now).order_id_asc
end
+ def self.cache_expires_in
+ nil
+ end
+
def active?
started? && !ended?
end
diff --git a/app/models/ci/artifact_blob.rb b/app/models/ci/artifact_blob.rb
index ec56cc53aea..760f01f225b 100644
--- a/app/models/ci/artifact_blob.rb
+++ b/app/models/ci/artifact_blob.rb
@@ -36,16 +36,15 @@ module Ci
def external_url(project, job)
return unless external_link?(job)
- full_path_parts = project.full_path_components
- top_level_group = full_path_parts.shift
+ url_project_path = project.full_path.partition('/').last
artifact_path = [
- '-', *full_path_parts, '-',
+ '-', url_project_path, '-',
'jobs', job.id,
'artifacts', path
].join('/')
- "#{pages_config.protocol}://#{top_level_group}.#{pages_config.host}/#{artifact_path}"
+ "#{project.pages_group_url}/#{artifact_path}"
end
def external_link?(job)
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 1e066b69c6e..41446946a5e 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -3,8 +3,10 @@ module Ci
prepend ArtifactMigratable
include TokenAuthenticatable
include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
include Presentable
include Importable
+ include Gitlab::Utils::StrongMemoize
MissingDependenciesError = Class.new(StandardError)
@@ -17,18 +19,26 @@ module Ci
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
+ has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id
- has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
- # The "environment" field for builds is a String, and is the unexpanded name
+ has_one :metadata, class_name: 'Ci::BuildMetadata'
+ delegate :timeout, to: :metadata, prefix: true, allow_nil: true
+ delegate :gitlab_deploy_token, to: :project
+
+ ##
+ # The "environment" field for builds is a String, and is the unexpanded name!
+ #
def persisted_environment
- @persisted_environment ||= Environment.find_by(
- name: expanded_environment_name,
- project: project
- )
+ return unless has_environment?
+
+ strong_memoize(:persisted_environment) do
+ Environment.find_by(name: expanded_environment_name, project: project)
+ end
end
serialize :options # rubocop:disable Cop/ActiveRecordSerialize
@@ -45,11 +55,18 @@ module Ci
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end
+
+ scope :without_archived_trace, ->() do
+ where('NOT EXISTS (?)', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').trace)
+ end
+
+ scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) }
+ scope :with_live_trace, -> { where('EXISTS (?)', Ci::BuildTraceChunk.where('ci_builds.id = ci_build_trace_chunks.build_id').select(1)) }
scope :matches_tag_ids, -> (tag_ids) do
matcher = ::ActsAsTaggableOn::Tagging
@@ -81,12 +98,13 @@ module Ci
before_save :ensure_token
before_destroy { unscoped_project }
+ before_create :ensure_metadata
after_create unless: :importing? do |build|
run_after_commit { BuildHooksWorker.perform_async(build.id) }
end
- after_commit :update_project_statistics_after_save, on: [:create, :update]
- after_commit :update_project_statistics, on: :destroy
+ after_save :update_project_statistics_after_save, if: :artifacts_size_changed?
+ after_destroy :update_project_statistics_after_destroy, unless: :project_destroyed?
class << self
# This is needed for url_for to work,
@@ -132,6 +150,7 @@ module Ci
after_transition any => [:success] do |build|
build.run_after_commit do
BuildSuccessWorker.perform_async(id)
+ PagesWorker.perform_async(:deploy, id) if build.pages_generator?
end
end
@@ -151,6 +170,14 @@ module Ci
before_transition any => [:running] do |build|
build.validates_dependencies! unless Feature.enabled?('ci_disable_validates_dependencies')
end
+
+ after_transition pending: :running do |build|
+ build.ensure_metadata.update_timeout_state
+ end
+ end
+
+ def ensure_metadata
+ metadata || build_metadata(project: project)
end
def detailed_status(current_user)
@@ -163,8 +190,13 @@ module Ci
pipeline.manual_actions.where.not(name: name)
end
+ def pages_generator?
+ Gitlab.config.pages.enabled &&
+ self.name == 'pages'
+ end
+
def playable?
- action? && (manual? || complete?)
+ action? && (manual? || retryable?)
end
def action?
@@ -198,7 +230,11 @@ module Ci
end
def expanded_environment_name
- ExpandVariables.expand(environment, simple_variables) if environment
+ return unless has_environment?
+
+ strong_memoize(:expanded_environment_name) do
+ ExpandVariables.expand(environment, simple_variables)
+ end
end
def has_environment?
@@ -229,10 +265,6 @@ module Ci
latest_builds.where('stage_idx < ?', stage_idx)
end
- def timeout
- project.build_timeout
- end
-
def triggered_by?(current_user)
user == current_user
end
@@ -248,31 +280,52 @@ module Ci
Gitlab::Utils.slugify(ref.to_s)
end
- # Variables whose value does not depend on environment
- def simple_variables
- variables(environment: nil)
- end
-
- # All variables, including those dependent on environment, which could
- # contain unexpanded variables.
- def variables(environment: persisted_environment)
- collection = Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ ##
+ # Variables in the environment name scope.
+ #
+ def scoped_variables(environment: expanded_environment_name)
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
variables.concat(predefined_variables)
variables.concat(project.predefined_variables)
variables.concat(pipeline.predefined_variables)
variables.concat(runner.predefined_variables) if runner
- variables.concat(project.deployment_variables(environment: environment)) if has_environment?
+ variables.concat(project.deployment_variables(environment: environment)) if environment
variables.concat(yaml_variables)
variables.concat(user_variables)
- variables.concat(project.group.secret_variables_for(ref, project)) if project.group
- variables.concat(secret_variables(environment: environment))
+ variables.concat(secret_group_variables)
+ variables.concat(secret_project_variables(environment: environment))
variables.concat(trigger_request.user_variables) if trigger_request
variables.concat(pipeline.variables)
variables.concat(pipeline.pipeline_schedule.job_variables) if pipeline.pipeline_schedule
- variables.concat(persisted_environment_variables) if environment
end
+ end
- collection.to_runner_variables
+ ##
+ # Variables that do not depend on the environment name.
+ #
+ def simple_variables
+ strong_memoize(:simple_variables) do
+ scoped_variables(environment: nil).to_runner_variables
+ end
+ end
+
+ ##
+ # All variables, including persisted environment variables.
+ #
+ def variables
+ Gitlab::Ci::Variables::Collection.new
+ .concat(persisted_variables)
+ .concat(scoped_variables)
+ .concat(persisted_environment_variables)
+ .to_runner_variables
+ end
+
+ ##
+ # Regular Ruby hash of scoped variables, without duplicates that are
+ # possible to be present in an array of hashes returned from `variables`.
+ #
+ def scoped_variables_hash
+ scoped_variables.to_hash
end
def features
@@ -361,17 +414,21 @@ module Ci
build_data = Gitlab::DataBuilder::Build.build(self)
project.execute_hooks(build_data.dup, :job_hooks)
project.execute_services(build_data.dup, :job_hooks)
- PagesService.new(build_data).execute
- project.running_or_pending_build_count(force: true)
+ end
+
+ def browsable_artifacts?
+ artifacts_metadata?
end
def artifacts_metadata_entry(path, **options)
- metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
- artifacts_metadata.path,
- path,
- **options)
+ artifacts_metadata.use_file do |metadata_path|
+ metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
+ metadata_path,
+ path,
+ **options)
- metadata.to_entry
+ metadata.to_entry
+ end
end
def erase_artifacts!
@@ -434,7 +491,7 @@ module Ci
def user_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
- return variables if user.blank?
+ break variables if user.blank?
variables.append(key: 'GITLAB_USER_ID', value: user.id.to_s)
variables.append(key: 'GITLAB_USER_EMAIL', value: user.email)
@@ -443,9 +500,14 @@ module Ci
end
end
- def secret_variables(environment: persisted_environment)
+ def secret_group_variables
+ return [] unless project.group
+
+ project.group.secret_variables_for(ref, project)
+ end
+
+ def secret_project_variables(environment: persisted_environment)
project.secret_variables_for(ref: ref, environment: environment)
- .map(&:to_runner_variable)
end
def steps
@@ -542,24 +604,37 @@ module Ci
CI_REGISTRY_USER = 'gitlab-ci-token'.freeze
+ def persisted_variables
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ break variables unless persisted?
+
+ variables
+ .concat(pipeline.persisted_variables)
+ .append(key: 'CI_JOB_ID', value: id.to_s)
+ .append(key: 'CI_JOB_URL', value: Gitlab::Routing.url_helpers.project_job_url(project, self))
+ .append(key: 'CI_JOB_TOKEN', value: token, public: false)
+ .append(key: 'CI_BUILD_ID', value: id.to_s)
+ .append(key: 'CI_BUILD_TOKEN', value: token, public: false)
+ .append(key: 'CI_REGISTRY_USER', value: CI_REGISTRY_USER)
+ .append(key: 'CI_REGISTRY_PASSWORD', value: token, public: false)
+ .append(key: 'CI_REPOSITORY_URL', value: repo_url, public: false)
+ .concat(deploy_token_variables)
+ end
+ end
+
def predefined_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
variables.append(key: 'CI', value: 'true')
variables.append(key: 'GITLAB_CI', value: 'true')
- variables.append(key: 'GITLAB_FEATURES', value: project.namespace.features.join(','))
+ variables.append(key: 'GITLAB_FEATURES', value: project.licensed_features.join(','))
variables.append(key: 'CI_SERVER_NAME', value: 'GitLab')
variables.append(key: 'CI_SERVER_VERSION', value: Gitlab::VERSION)
- variables.append(key: 'CI_SERVER_REVISION', value: Gitlab::REVISION)
- variables.append(key: 'CI_JOB_ID', value: id.to_s)
+ variables.append(key: 'CI_SERVER_REVISION', value: Gitlab.revision)
variables.append(key: 'CI_JOB_NAME', value: name)
variables.append(key: 'CI_JOB_STAGE', value: stage)
- variables.append(key: 'CI_JOB_TOKEN', value: token, public: false)
variables.append(key: 'CI_COMMIT_SHA', value: sha)
variables.append(key: 'CI_COMMIT_REF_NAME', value: ref)
variables.append(key: 'CI_COMMIT_REF_SLUG', value: ref_slug)
- variables.append(key: 'CI_REGISTRY_USER', value: CI_REGISTRY_USER)
- variables.append(key: 'CI_REGISTRY_PASSWORD', value: token, public: false)
- variables.append(key: 'CI_REPOSITORY_URL', value: repo_url, public: false)
variables.append(key: "CI_COMMIT_TAG", value: ref) if tag?
variables.append(key: "CI_PIPELINE_TRIGGERED", value: 'true') if trigger_request
variables.append(key: "CI_JOB_MANUAL", value: 'true') if action?
@@ -567,9 +642,23 @@ module Ci
end
end
+ def legacy_variables
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ variables.append(key: 'CI_BUILD_REF', value: sha)
+ variables.append(key: 'CI_BUILD_BEFORE_SHA', value: before_sha)
+ variables.append(key: 'CI_BUILD_REF_NAME', value: ref)
+ variables.append(key: 'CI_BUILD_REF_SLUG', value: ref_slug)
+ variables.append(key: 'CI_BUILD_NAME', value: name)
+ variables.append(key: 'CI_BUILD_STAGE', value: stage)
+ variables.append(key: "CI_BUILD_TAG", value: ref) if tag?
+ variables.append(key: "CI_BUILD_TRIGGERED", value: 'true') if trigger_request
+ variables.append(key: "CI_BUILD_MANUAL", value: 'true') if action?
+ end
+ end
+
def persisted_environment_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
- return variables unless persisted_environment
+ break variables unless persisted? && persisted_environment.present?
variables.concat(persisted_environment.predefined_variables)
@@ -580,19 +669,12 @@ module Ci
end
end
- def legacy_variables
+ def deploy_token_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
- variables.append(key: 'CI_BUILD_ID', value: id.to_s)
- variables.append(key: 'CI_BUILD_TOKEN', value: token, public: false)
- variables.append(key: 'CI_BUILD_REF', value: sha)
- variables.append(key: 'CI_BUILD_BEFORE_SHA', value: before_sha)
- variables.append(key: 'CI_BUILD_REF_NAME', value: ref)
- variables.append(key: 'CI_BUILD_REF_SLUG', value: ref_slug)
- variables.append(key: 'CI_BUILD_NAME', value: name)
- variables.append(key: 'CI_BUILD_STAGE', value: stage)
- variables.append(key: "CI_BUILD_TAG", value: ref) if tag?
- variables.append(key: "CI_BUILD_TRIGGERED", value: 'true') if trigger_request
- variables.append(key: "CI_BUILD_MANUAL", value: 'true') if action?
+ break variables unless gitlab_deploy_token
+
+ variables.append(key: 'CI_DEPLOY_USER', value: gitlab_deploy_token.username)
+ variables.append(key: 'CI_DEPLOY_PASSWORD', value: gitlab_deploy_token.token, public: false)
end
end
@@ -606,16 +688,20 @@ module Ci
pipeline.config_processor.build_attributes(name)
end
- def update_project_statistics
- return unless project
+ def update_project_statistics_after_save
+ update_project_statistics(read_attribute(:artifacts_size).to_i - artifacts_size_was.to_i)
+ end
- ProjectCacheWorker.perform_async(project_id, [], [:build_artifacts_size])
+ def update_project_statistics_after_destroy
+ update_project_statistics(-artifacts_size)
end
- def update_project_statistics_after_save
- if previous_changes.include?('artifacts_size')
- update_project_statistics
- end
+ def update_project_statistics(difference)
+ ProjectStatistics.increment_statistic(project_id, :build_artifacts_size, difference)
+ end
+
+ def project_destroyed?
+ project.pending_delete?
end
end
end
diff --git a/app/models/ci/build_metadata.rb b/app/models/ci/build_metadata.rb
new file mode 100644
index 00000000000..96762f8845c
--- /dev/null
+++ b/app/models/ci/build_metadata.rb
@@ -0,0 +1,35 @@
+module Ci
+ # The purpose of this class is to store Build related data that can be disposed.
+ # Data that should be persisted forever, should be stored with Ci::Build model.
+ class BuildMetadata < ActiveRecord::Base
+ extend Gitlab::Ci::Model
+ include Presentable
+ include ChronicDurationAttribute
+
+ self.table_name = 'ci_builds_metadata'
+
+ belongs_to :build, class_name: 'Ci::Build'
+ belongs_to :project
+
+ validates :build, presence: true
+ validates :project, presence: true
+
+ chronic_duration_attr_reader :timeout_human_readable, :timeout
+
+ enum timeout_source: {
+ unknown_timeout_source: 1,
+ project_timeout_source: 2,
+ runner_timeout_source: 3
+ }
+
+ def update_timeout_state
+ return unless build.runner.present?
+
+ project_timeout = project&.build_timeout
+ timeout = [project_timeout, build.runner.maximum_timeout].compact.min
+ timeout_source = timeout < project_timeout ? :runner_timeout_source : :project_timeout_source
+
+ update(timeout: timeout, timeout_source: timeout_source)
+ end
+ end
+end
diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb
new file mode 100644
index 00000000000..4856f10846c
--- /dev/null
+++ b/app/models/ci/build_trace_chunk.rb
@@ -0,0 +1,180 @@
+module Ci
+ class BuildTraceChunk < ActiveRecord::Base
+ include FastDestroyAll
+ extend Gitlab::Ci::Model
+
+ belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
+
+ default_value_for :data_store, :redis
+
+ WriteError = Class.new(StandardError)
+
+ CHUNK_SIZE = 128.kilobytes
+ CHUNK_REDIS_TTL = 1.week
+ WRITE_LOCK_RETRY = 10
+ WRITE_LOCK_SLEEP = 0.01.seconds
+ WRITE_LOCK_TTL = 1.minute
+
+ enum data_store: {
+ redis: 1,
+ db: 2
+ }
+
+ class << self
+ def redis_data_key(build_id, chunk_index)
+ "gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}"
+ end
+
+ def redis_data_keys
+ redis.pluck(:build_id, :chunk_index).map do |data|
+ redis_data_key(data.first, data.second)
+ end
+ end
+
+ def redis_delete_data(keys)
+ return if keys.empty?
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.del(keys)
+ end
+ end
+
+ ##
+ # FastDestroyAll concerns
+ def begin_fast_destroy
+ redis_data_keys
+ end
+
+ ##
+ # FastDestroyAll concerns
+ def finalize_fast_destroy(keys)
+ redis_delete_data(keys)
+ end
+ end
+
+ ##
+ # Data is memoized for optimizing #size and #end_offset
+ def data
+ @data ||= get_data.to_s
+ end
+
+ def truncate(offset = 0)
+ raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
+ return if offset == size # Skip the following process as it doesn't affect anything
+
+ self.append("", offset)
+ end
+
+ def append(new_data, offset)
+ raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
+ raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize)
+
+ set_data(data.byteslice(0, offset) + new_data)
+ end
+
+ def size
+ data&.bytesize.to_i
+ end
+
+ def start_offset
+ chunk_index * CHUNK_SIZE
+ end
+
+ def end_offset
+ start_offset + size
+ end
+
+ def range
+ (start_offset...end_offset)
+ end
+
+ def use_database!
+ in_lock do
+ break if db?
+ break unless size > 0
+
+ self.update!(raw_data: data, data_store: :db)
+ self.class.redis_delete_data([redis_data_key])
+ end
+ end
+
+ private
+
+ def get_data
+ if redis?
+ redis_data
+ elsif db?
+ raw_data
+ else
+ raise 'Unsupported data store'
+ end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
+ end
+
+ def set_data(value)
+ raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE
+
+ in_lock do
+ if redis?
+ redis_set_data(value)
+ elsif db?
+ self.raw_data = value
+ else
+ raise 'Unsupported data store'
+ end
+
+ @data = value
+
+ save! if changed?
+ end
+
+ schedule_to_db if full?
+ end
+
+ def schedule_to_db
+ return if db?
+
+ Ci::BuildTraceChunkFlushWorker.perform_async(id)
+ end
+
+ def full?
+ size == CHUNK_SIZE
+ end
+
+ def redis_data
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.get(redis_data_key)
+ end
+ end
+
+ def redis_set_data(data)
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(redis_data_key, data, ex: CHUNK_REDIS_TTL)
+ end
+ end
+
+ def redis_data_key
+ self.class.redis_data_key(build_id, chunk_index)
+ end
+
+ def in_lock
+ write_lock_key = "trace_write:#{build_id}:chunks:#{chunk_index}"
+
+ lease = Gitlab::ExclusiveLease.new(write_lock_key, timeout: WRITE_LOCK_TTL)
+ retry_count = 0
+
+ until uuid = lease.try_obtain
+ # Keep trying until we obtain the lease. To prevent hammering Redis too
+ # much we'll wait for a bit between retries.
+ sleep(WRITE_LOCK_SLEEP)
+ break if WRITE_LOCK_RETRY < (retry_count += 1)
+ end
+
+ raise WriteError, 'Failed to obtain write lock' unless uuid
+
+ self.reload if self.persisted?
+ return yield
+ ensure
+ Gitlab::ExclusiveLease.cancel(write_lock_key, uuid)
+ end
+ end
+end
diff --git a/app/models/ci/group.rb b/app/models/ci/group.rb
index 87898b086c6..9c1046e8715 100644
--- a/app/models/ci/group.rb
+++ b/app/models/ci/group.rb
@@ -31,6 +31,14 @@ module Ci
end
end
+ def self.fabricate(stage)
+ stage.statuses.ordered.latest
+ .sort_by(&:sortable_name).group_by(&:group_name)
+ .map do |group_name, grouped_statuses|
+ self.new(stage, name: group_name, jobs: grouped_statuses)
+ end
+ end
+
private
def commit_statuses
diff --git a/app/models/ci/group_variable.rb b/app/models/ci/group_variable.rb
index 1dd0e050ba9..44cb583e1bd 100644
--- a/app/models/ci/group_variable.rb
+++ b/app/models/ci/group_variable.rb
@@ -4,7 +4,9 @@ module Ci
include HasVariable
include Presentable
- belongs_to :group
+ belongs_to :group, class_name: "::Group"
+
+ alias_attribute :secret_value, :value
validates :key, uniqueness: {
scope: :group_id,
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 0a599f72bc7..3b952391b7e 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -1,15 +1,23 @@
module Ci
class JobArtifact < ActiveRecord::Base
+ include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
+ mount_uploader :file, JobArtifactUploader
+
before_save :set_size, if: :file_changed?
+ after_save :update_project_statistics_after_save, if: :size_changed?
+ after_destroy :update_project_statistics_after_destroy, unless: :project_destroyed?
- mount_uploader :file, JobArtifactUploader
+ after_save :update_file_store, if: :file_changed?
- delegate :open, :exists?, to: :file
+ scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
+
+ delegate :exists?, :open, to: :file
enum file_type: {
archive: 1,
@@ -17,12 +25,18 @@ module Ci
trace: 3
}
+ def update_file_store
+ # The file.object_store is set during `uploader.store!`
+ # which happens after object is inserted/updated
+ self.update_column(:file_store, file.object_store)
+ end
+
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
- def set_size
- self.size = file.size
+ def local_store?
+ [nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
end
def expire_in
@@ -35,5 +49,28 @@ module Ci
ChronicDuration.parse(value)&.seconds&.from_now
end
end
+
+ private
+
+ def set_size
+ self.size = file.size
+ end
+
+ def update_project_statistics_after_save
+ update_project_statistics(size.to_i - size_was.to_i)
+ end
+
+ def update_project_statistics_after_destroy
+ update_project_statistics(-self.size)
+ end
+
+ def update_project_statistics(difference)
+ ProjectStatistics.increment_statistic(project_id, :build_artifacts_size, difference)
+ end
+
+ def project_destroyed?
+ # Use job.project to avoid extra DB query for project
+ job.project.pending_delete?
+ end
end
end
diff --git a/app/models/ci/legacy_stage.rb b/app/models/ci/legacy_stage.rb
index 9b536af672b..ce691875e42 100644
--- a/app/models/ci/legacy_stage.rb
+++ b/app/models/ci/legacy_stage.rb
@@ -16,11 +16,7 @@ module Ci
end
def groups
- @groups ||= statuses.ordered.latest
- .sort_by(&:sortable_name).group_by(&:group_name)
- .map do |group_name, grouped_statuses|
- Ci::Group.new(self, name: group_name, jobs: grouped_statuses)
- end
+ @groups ||= Ci::Group.fabricate(self)
end
def to_param
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 44f9bdf111e..e5caa3ffa41 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -6,13 +6,20 @@ module Ci
include AfterCommitQueue
include Presentable
include Gitlab::OptimisticLocking
+ include Gitlab::Utils::StrongMemoize
+ include AtomicInternalId
+ include EnumWithNil
belongs_to :project, inverse_of: :pipelines
belongs_to :user
belongs_to :auto_canceled_by, class_name: 'Ci::Pipeline'
belongs_to :pipeline_schedule, class_name: 'Ci::PipelineSchedule'
- has_many :stages
+ has_internal_id :iid, scope: :project, presence: false, init: ->(s) do
+ s&.project&.pipelines&.maximum(:iid) || s&.project&.pipelines&.count
+ end
+
+ has_many :stages, -> { order(position: :asc) }, inverse_of: :pipeline
has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
has_many :builds, foreign_key: :commit_id, inverse_of: :pipeline
has_many :trigger_requests, dependent: :destroy, foreign_key: :commit_id # rubocop:disable Cop/ActiveRecordDependent
@@ -31,18 +38,24 @@ module Ci
has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id'
has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id'
+ accepts_nested_attributes_for :variables, reject_if: :persisted?
+
delegate :id, to: :project, prefix: true
delegate :full_path, to: :project, prefix: true
- validates :source, exclusion: { in: %w(unknown), unless: :importing? }, on: :create
validates :sha, presence: { unless: :importing? }
validates :ref, presence: { unless: :importing? }
validates :status, presence: { unless: :importing? }
validate :valid_commit_sha, unless: :importing?
+ # Replace validator below with
+ # `validates :source, presence: { unless: :importing? }, on: :create`
+ # when removing Gitlab.rails5? code.
+ validate :valid_source, unless: :importing?, on: :create
+
after_create :keep_around_commits, unless: :importing?
- enum source: {
+ enum_with_nil source: {
unknown: nil,
push: 1,
web: 2,
@@ -52,7 +65,7 @@ module Ci
external: 6
}
- enum config_source: {
+ enum_with_nil config_source: {
unknown_source: nil,
repository_source: 1,
auto_devops_source: 2
@@ -242,6 +255,20 @@ module Ci
stage unless stage.statuses_count.zero?
end
+ ##
+ # TODO We do not completely switch to persisted stages because of
+ # race conditions with setting statuses gitlab-ce#23257.
+ #
+ def ordered_stages
+ return legacy_stages unless complete?
+
+ if Feature.enabled?('ci_pipeline_persisted_stages')
+ stages
+ else
+ legacy_stages
+ end
+ end
+
def legacy_stages
# TODO, this needs refactoring, see gitlab-ce#26481.
@@ -268,19 +295,39 @@ module Ci
end
def git_author_name
- commit.try(:author_name)
+ strong_memoize(:git_author_name) do
+ commit.try(:author_name)
+ end
end
def git_author_email
- commit.try(:author_email)
+ strong_memoize(:git_author_email) do
+ commit.try(:author_email)
+ end
end
def git_commit_message
- commit.try(:message)
+ strong_memoize(:git_commit_message) do
+ commit.try(:message)
+ end
end
def git_commit_title
- commit.try(:title)
+ strong_memoize(:git_commit_title) do
+ commit.try(:title)
+ end
+ end
+
+ def git_commit_full_title
+ strong_memoize(:git_commit_full_title) do
+ commit.try(:full_title)
+ end
+ end
+
+ def git_commit_description
+ strong_memoize(:git_commit_description) do
+ commit.try(:description)
+ end
end
def short_sha
@@ -361,23 +408,36 @@ module Ci
def stage_seeds
return [] unless config_processor
- @stage_seeds ||= config_processor.stage_seeds(self)
+ strong_memoize(:stage_seeds) do
+ seeds = config_processor.stages_attributes.map do |attributes|
+ Gitlab::Ci::Pipeline::Seed::Stage.new(self, attributes)
+ end
+
+ seeds.select(&:included?)
+ end
end
def seeds_size
- @seeds_size ||= stage_seeds.sum(&:size)
+ stage_seeds.sum(&:size)
end
def has_kubernetes_active?
project.deployment_platform&.active?
end
- def has_stage_seeds?
- stage_seeds.any?
+ def has_warnings?
+ number_of_warnings.positive?
end
- def has_warnings?
- builds.latest.failed_but_allowed.any?
+ def number_of_warnings
+ BatchLoader.for(id).batch(default_value: 0) do |pipeline_ids, loader|
+ ::Ci::Build.where(commit_id: pipeline_ids)
+ .latest
+ .failed_but_allowed
+ .group(:commit_id)
+ .count
+ .each { |id, amount| loader.call(id, amount) }
+ end
end
def set_config_source
@@ -388,6 +448,9 @@ module Ci
end
end
+ ##
+ # TODO, setting yaml_errors should be moved to the pipeline creation chain.
+ #
def config_processor
return unless ci_yaml_file
return @config_processor if defined?(@config_processor)
@@ -460,7 +523,8 @@ module Ci
def update_status
retry_optimistic_lock(self) do
- case latest_builds_status
+ case latest_builds_status.to_s
+ when 'created' then nil
when 'pending' then enqueue
when 'running' then run
when 'success' then succeed
@@ -468,15 +532,38 @@ module Ci
when 'canceled' then cancel
when 'skipped' then skip
when 'manual' then block
+ else
+ raise HasStatus::UnknownStatusError,
+ "Unknown status `#{latest_builds_status}`"
end
end
end
+ def protected_ref?
+ strong_memoize(:protected_ref) { project.protected_for?(ref) }
+ end
+
+ def legacy_trigger
+ strong_memoize(:legacy_trigger) { trigger_requests.first }
+ end
+
+ def persisted_variables
+ Gitlab::Ci::Variables::Collection.new.tap do |variables|
+ break variables unless persisted?
+
+ variables.append(key: 'CI_PIPELINE_ID', value: id.to_s)
+ variables.append(key: 'CI_PIPELINE_URL', value: Gitlab::Routing.url_helpers.project_pipeline_url(project, self))
+ end
+ end
+
def predefined_variables
Gitlab::Ci::Variables::Collection.new
- .append(key: 'CI_PIPELINE_ID', value: id.to_s)
+ .append(key: 'CI_PIPELINE_IID', value: iid.to_s)
.append(key: 'CI_CONFIG_PATH', value: ci_yaml_file_path)
.append(key: 'CI_PIPELINE_SOURCE', value: source.to_s)
+ .append(key: 'CI_COMMIT_MESSAGE', value: git_commit_message.to_s)
+ .append(key: 'CI_COMMIT_TITLE', value: git_commit_full_title.to_s)
+ .append(key: 'CI_COMMIT_DESCRIPTION', value: git_commit_description.to_s)
end
def queued_duration
@@ -551,5 +638,11 @@ module Ci
project.repository.keep_around(self.sha)
project.repository.keep_around(self.before_sha)
end
+
+ def valid_source
+ if source.nil? || source == "unknown"
+ errors.add(:source, "invalid source")
+ end
+ end
end
end
diff --git a/app/models/ci/pipeline_schedule_variable.rb b/app/models/ci/pipeline_schedule_variable.rb
index af989fb14b4..03df4e3e638 100644
--- a/app/models/ci/pipeline_schedule_variable.rb
+++ b/app/models/ci/pipeline_schedule_variable.rb
@@ -5,6 +5,8 @@ module Ci
belongs_to :pipeline_schedule
+ alias_attribute :secret_value, :value
+
validates :key, uniqueness: { scope: :pipeline_schedule_id }
end
end
diff --git a/app/models/ci/pipeline_variable.rb b/app/models/ci/pipeline_variable.rb
index de5aae17a15..38e14ffbc0c 100644
--- a/app/models/ci/pipeline_variable.rb
+++ b/app/models/ci/pipeline_variable.rb
@@ -5,6 +5,8 @@ module Ci
belongs_to :pipeline
+ alias_attribute :secret_value, :value
+
validates :key, uniqueness: { scope: :pipeline_id }
end
end
diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb
index 7173f88f1c7..8c9aacca8de 100644
--- a/app/models/ci/runner.rb
+++ b/app/models/ci/runner.rb
@@ -3,42 +3,67 @@ module Ci
extend Gitlab::Ci::Model
include Gitlab::SQL::Pattern
include RedisCacheable
+ include ChronicDurationAttribute
RUNNER_QUEUE_EXPIRY_TIME = 60.minutes
ONLINE_CONTACT_TIMEOUT = 1.hour
UPDATE_DB_RUNNER_INFO_EVERY = 40.minutes
AVAILABLE_SCOPES = %w[specific shared active paused online].freeze
- FORM_EDITABLE = %i[description tag_list active run_untagged locked access_level].freeze
+ FORM_EDITABLE = %i[description tag_list active run_untagged locked access_level maximum_timeout_human_readable].freeze
has_many :builds
- has_many :runner_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :runner_projects, inverse_of: :runner, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :runner_projects
+ has_many :runner_namespaces, inverse_of: :runner
+ has_many :groups, through: :runner_namespaces
has_one :last_build, ->() { order('id DESC') }, class_name: 'Ci::Build'
before_validation :set_default_values
- scope :specific, ->() { where(is_shared: false) }
- scope :shared, ->() { where(is_shared: true) }
- scope :active, ->() { where(active: true) }
- scope :paused, ->() { where(active: false) }
- scope :online, ->() { where('contacted_at > ?', contact_time_deadline) }
- scope :ordered, ->() { order(id: :desc) }
+ scope :specific, -> { where(is_shared: false) }
+ scope :shared, -> { where(is_shared: true) }
+ scope :active, -> { where(active: true) }
+ scope :paused, -> { where(active: false) }
+ scope :online, -> { where('contacted_at > ?', contact_time_deadline) }
+ scope :ordered, -> { order(id: :desc) }
- scope :owned_or_shared, ->(project_id) do
- joins('LEFT JOIN ci_runner_projects ON ci_runner_projects.runner_id = ci_runners.id')
- .where("ci_runner_projects.project_id = :project_id OR ci_runners.is_shared = true", project_id: project_id)
+ scope :belonging_to_project, -> (project_id) {
+ joins(:runner_projects).where(ci_runner_projects: { project_id: project_id })
+ }
+
+ scope :belonging_to_parent_group_of_project, -> (project_id) {
+ project_groups = ::Group.joins(:projects).where(projects: { id: project_id })
+ hierarchy_groups = Gitlab::GroupHierarchy.new(project_groups).base_and_ancestors
+
+ joins(:groups).where(namespaces: { id: hierarchy_groups })
+ }
+
+ scope :owned_or_shared, -> (project_id) do
+ union = Gitlab::SQL::Union.new(
+ [belonging_to_project(project_id), belonging_to_parent_group_of_project(project_id), shared],
+ remove_duplicates: false
+ )
+ from("(#{union.to_sql}) ci_runners")
end
scope :assignable_for, ->(project) do
# FIXME: That `to_sql` is needed to workaround a weird Rails bug.
# Without that, placeholders would miss one and couldn't match.
where(locked: false)
- .where.not("id IN (#{project.runners.select(:id).to_sql})").specific
+ .where.not("ci_runners.id IN (#{project.runners.select(:id).to_sql})")
+ .project_type
end
validate :tag_constraints
validates :access_level, presence: true
+ validates :runner_type, presence: true
+
+ validate :no_projects, unless: :project_type?
+ validate :no_groups, unless: :group_type?
+ validate :any_project, if: :project_type?
+ validate :exactly_one_group, if: :group_type?
+ validate :validate_is_shared
acts_as_taggable
@@ -49,7 +74,19 @@ module Ci
ref_protected: 1
}
- cached_attr_reader :version, :revision, :platform, :architecture, :contacted_at, :ip_address
+ enum runner_type: {
+ instance_type: 1,
+ group_type: 2,
+ project_type: 3
+ }
+
+ cached_attr_reader :version, :revision, :platform, :architecture, :ip_address, :contacted_at
+
+ chronic_duration_attr :maximum_timeout_human_readable, :maximum_timeout
+
+ validates :maximum_timeout, allow_nil: true,
+ numericality: { greater_than_or_equal_to: 600,
+ message: 'needs to be at least 10 minutes' }
# Searches for runners matching the given query.
#
@@ -76,9 +113,22 @@ module Ci
end
def assign_to(project, current_user = nil)
- self.is_shared = false if shared?
- self.save
- project.runner_projects.create(runner_id: self.id)
+ if shared?
+ self.is_shared = false if shared?
+ self.runner_type = :project_type
+ elsif group_type?
+ raise ArgumentError, 'Transitioning a group runner to a project runner is not supported'
+ end
+
+ begin
+ transaction do
+ self.projects << project
+ self.save!
+ end
+ rescue ActiveRecord::RecordInvalid => e
+ self.errors.add(:assign_to, e.message)
+ false
+ end
end
def display_name
@@ -113,6 +163,14 @@ module Ci
!shared?
end
+ def assigned_to_group?
+ runner_namespaces.any?
+ end
+
+ def assigned_to_project?
+ runner_projects.any?
+ end
+
def can_pick?(build)
return false if self.ref_protected? && !build.protected?
@@ -161,9 +219,13 @@ module Ci
cache_attributes(values)
- if persist_cached_data?
- self.assign_attributes(values)
- self.save if self.changed?
+ # We save data without validation, it will always change due to `contacted_at`
+ self.update_columns(values) if persist_cached_data?
+ end
+
+ def pick_build!(build)
+ if can_pick?(build)
+ tick_runner_queue
end
end
@@ -198,7 +260,37 @@ module Ci
end
def assignable_for?(project_id)
- is_shared? || projects.exists?(id: project_id)
+ self.class.owned_or_shared(project_id).where(id: self.id).any?
+ end
+
+ def no_projects
+ if projects.any?
+ errors.add(:runner, 'cannot have projects assigned')
+ end
+ end
+
+ def no_groups
+ if groups.any?
+ errors.add(:runner, 'cannot have groups assigned')
+ end
+ end
+
+ def any_project
+ unless projects.any?
+ errors.add(:runner, 'needs to be assigned to at least one project')
+ end
+ end
+
+ def exactly_one_group
+ unless groups.one?
+ errors.add(:runner, 'needs to be assigned to exactly one group')
+ end
+ end
+
+ def validate_is_shared
+ unless is_shared? == instance_type?
+ errors.add(:is_shared, 'is not equal to instance_type?')
+ end
end
def accepting_tags?(build)
diff --git a/app/models/ci/runner_namespace.rb b/app/models/ci/runner_namespace.rb
new file mode 100644
index 00000000000..29508fdd326
--- /dev/null
+++ b/app/models/ci/runner_namespace.rb
@@ -0,0 +1,11 @@
+module Ci
+ class RunnerNamespace < ActiveRecord::Base
+ extend Gitlab::Ci::Model
+
+ belongs_to :runner, inverse_of: :runner_namespaces, validate: true
+ belongs_to :namespace, inverse_of: :runner_namespaces, class_name: '::Namespace'
+ belongs_to :group, class_name: '::Group', foreign_key: :namespace_id
+
+ validates :runner_id, uniqueness: { scope: :namespace_id }
+ end
+end
diff --git a/app/models/ci/runner_project.rb b/app/models/ci/runner_project.rb
index 505d178ba8e..52437047300 100644
--- a/app/models/ci/runner_project.rb
+++ b/app/models/ci/runner_project.rb
@@ -2,8 +2,8 @@ module Ci
class RunnerProject < ActiveRecord::Base
extend Gitlab::Ci::Model
- belongs_to :runner
- belongs_to :project
+ belongs_to :runner, inverse_of: :runner_projects
+ belongs_to :project, inverse_of: :runner_projects
validates :runner_id, uniqueness: { scope: :project_id }
end
diff --git a/app/models/ci/stage.rb b/app/models/ci/stage.rb
index 75b8ea2a371..ea07f37e6c1 100644
--- a/app/models/ci/stage.rb
+++ b/app/models/ci/stage.rb
@@ -13,14 +13,27 @@ module Ci
has_many :statuses, class_name: 'CommitStatus', foreign_key: :stage_id
has_many :builds, foreign_key: :stage_id
- validates :project, presence: true, unless: :importing?
- validates :pipeline, presence: true, unless: :importing?
- validates :name, presence: true, unless: :importing?
+ with_options unless: :importing? do
+ validates :project, presence: true
+ validates :pipeline, presence: true
+ validates :name, presence: true
+ validates :position, presence: true
+ end
- after_initialize do |stage|
+ after_initialize do
self.status = DEFAULT_STATUS if self.status.nil?
end
+ before_validation unless: :importing? do
+ next if position.present?
+
+ self.position = statuses.select(:stage_idx)
+ .where('stage_idx IS NOT NULL')
+ .group(:stage_idx)
+ .order('COUNT(*) DESC')
+ .first&.stage_idx.to_i
+ end
+
state_machine :status, initial: :created do
event :enqueue do
transition created: :pending
@@ -55,16 +68,44 @@ module Ci
def update_status
retry_optimistic_lock(self) do
case statuses.latest.status
+ when 'created' then nil
when 'pending' then enqueue
when 'running' then run
when 'success' then succeed
when 'failed' then drop
when 'canceled' then cancel
when 'manual' then block
- when 'skipped' then skip
- else skip
+ when 'skipped', nil then skip
+ else
+ raise HasStatus::UnknownStatusError,
+ "Unknown status `#{statuses.latest.status}`"
end
end
end
+
+ def groups
+ @groups ||= Ci::Group.fabricate(self)
+ end
+
+ def has_warnings?
+ number_of_warnings.positive?
+ end
+
+ def number_of_warnings
+ BatchLoader.for(id).batch(default_value: 0) do |stage_ids, loader|
+ ::Ci::Build.where(stage_id: stage_ids)
+ .latest
+ .failed_but_allowed
+ .group(:stage_id)
+ .count
+ .each { |id, amount| loader.call(id, amount) }
+ end
+ end
+
+ def detailed_status(current_user)
+ Gitlab::Ci::Status::Stage::Factory
+ .new(self, current_user)
+ .fabricate!
+ end
end
end
diff --git a/app/models/ci/variable.rb b/app/models/ci/variable.rb
index 7c71291de84..452cb910bca 100644
--- a/app/models/ci/variable.rb
+++ b/app/models/ci/variable.rb
@@ -6,6 +6,8 @@ module Ci
belongs_to :project
+ alias_attribute :secret_value, :value
+
validates :key, uniqueness: {
scope: [:project_id, :environment_scope],
message: "(%{value}) has already been taken"
diff --git a/app/models/clusters/applications/jupyter.rb b/app/models/clusters/applications/jupyter.rb
new file mode 100644
index 00000000000..975d434e1a4
--- /dev/null
+++ b/app/models/clusters/applications/jupyter.rb
@@ -0,0 +1,92 @@
+module Clusters
+ module Applications
+ class Jupyter < ActiveRecord::Base
+ VERSION = '0.0.1'.freeze
+
+ self.table_name = 'clusters_applications_jupyter'
+
+ include ::Clusters::Concerns::ApplicationCore
+ include ::Clusters::Concerns::ApplicationStatus
+ include ::Clusters::Concerns::ApplicationData
+
+ belongs_to :oauth_application, class_name: 'Doorkeeper::Application'
+
+ default_value_for :version, VERSION
+
+ def set_initial_status
+ return unless not_installable?
+
+ if cluster&.application_ingress_installed? && cluster.application_ingress.external_ip
+ self.status = 'installable'
+ end
+ end
+
+ def chart
+ "#{name}/jupyterhub"
+ end
+
+ def repository
+ 'https://jupyterhub.github.io/helm-chart/'
+ end
+
+ def values
+ content_values.to_yaml
+ end
+
+ def install_command
+ Gitlab::Kubernetes::Helm::InstallCommand.new(
+ name,
+ chart: chart,
+ values: values,
+ repository: repository
+ )
+ end
+
+ def callback_url
+ "http://#{hostname}/hub/oauth_callback"
+ end
+
+ private
+
+ def specification
+ {
+ "ingress" => {
+ "hosts" => [hostname]
+ },
+ "hub" => {
+ "extraEnv" => {
+ "GITLAB_HOST" => gitlab_url
+ },
+ "cookieSecret" => cookie_secret
+ },
+ "proxy" => {
+ "secretToken" => secret_token
+ },
+ "auth" => {
+ "gitlab" => {
+ "clientId" => oauth_application.uid,
+ "clientSecret" => oauth_application.secret,
+ "callbackUrl" => callback_url
+ }
+ }
+ }
+ end
+
+ def gitlab_url
+ Gitlab.config.gitlab.url
+ end
+
+ def content_values
+ YAML.load_file(chart_values_file).deep_merge!(specification)
+ end
+
+ def secret_token
+ @secret_token ||= SecureRandom.hex(32)
+ end
+
+ def cookie_secret
+ @cookie_secret ||= SecureRandom.hex(32)
+ end
+ end
+ end
+end
diff --git a/app/models/clusters/applications/prometheus.rb b/app/models/clusters/applications/prometheus.rb
index 7b25d8c4089..48137c2ed68 100644
--- a/app/models/clusters/applications/prometheus.rb
+++ b/app/models/clusters/applications/prometheus.rb
@@ -3,7 +3,7 @@ module Clusters
class Prometheus < ActiveRecord::Base
include PrometheusAdapter
- VERSION = "2.0.0".freeze
+ VERSION = '6.7.3'.freeze
self.table_name = 'clusters_applications_prometheus'
@@ -37,6 +37,7 @@ module Clusters
Gitlab::Kubernetes::Helm::InstallCommand.new(
name,
chart: chart,
+ version: version,
values: values
)
end
@@ -49,6 +50,11 @@ module Clusters
# ensures headers containing auth data are appended to original k8s client options
options = kube_client.rest_client.options.merge(headers: kube_client.headers)
RestClient::Resource.new(proxy_url, options)
+ rescue Kubeclient::HttpError
+ # If users have mistakenly set parameters or removed the depended clusters,
+ # `proxy_url` could raise an exception because gitlab can not communicate with the cluster.
+ # Since `PrometheusAdapter#can_query?` is eargely loaded on environement pages in gitlab,
+ # we need to silence the exceptions
end
private
diff --git a/app/models/clusters/applications/runner.rb b/app/models/clusters/applications/runner.rb
index 16efe90fa27..e6f795f3e0b 100644
--- a/app/models/clusters/applications/runner.rb
+++ b/app/models/clusters/applications/runner.rb
@@ -43,12 +43,21 @@ module Clusters
def create_and_assign_runner
transaction do
- project.runners.create!(name: 'kubernetes-cluster', tag_list: %w(kubernetes cluster)).tap do |runner|
+ Ci::Runner.create!(runner_create_params).tap do |runner|
update!(runner_id: runner.id)
end
end
end
+ def runner_create_params
+ {
+ name: 'kubernetes-cluster',
+ runner_type: :project_type,
+ tag_list: %w(kubernetes cluster),
+ projects: [project]
+ }
+ end
+
def gitlab_url
Gitlab::Routing.url_helpers.root_url(only_path: false)
end
diff --git a/app/models/clusters/cluster.rb b/app/models/clusters/cluster.rb
index 49eb069016a..b426b1bf8a1 100644
--- a/app/models/clusters/cluster.rb
+++ b/app/models/clusters/cluster.rb
@@ -8,8 +8,10 @@ module Clusters
Applications::Helm.application_name => Applications::Helm,
Applications::Ingress.application_name => Applications::Ingress,
Applications::Prometheus.application_name => Applications::Prometheus,
- Applications::Runner.application_name => Applications::Runner
+ Applications::Runner.application_name => Applications::Runner,
+ Applications::Jupyter.application_name => Applications::Jupyter
}.freeze
+ DEFAULT_ENVIRONMENT = '*'.freeze
belongs_to :user
@@ -25,6 +27,7 @@ module Clusters
has_one :application_ingress, class_name: 'Clusters::Applications::Ingress'
has_one :application_prometheus, class_name: 'Clusters::Applications::Prometheus'
has_one :application_runner, class_name: 'Clusters::Applications::Runner'
+ has_one :application_jupyter, class_name: 'Clusters::Applications::Jupyter'
accepts_nested_attributes_for :provider_gcp, update_only: true
accepts_nested_attributes_for :platform_kubernetes, update_only: true
@@ -38,6 +41,7 @@ module Clusters
delegate :active?, to: :platform_kubernetes, prefix: true, allow_nil: true
delegate :installed?, to: :application_helm, prefix: true, allow_nil: true
+ delegate :installed?, to: :application_ingress, prefix: true, allow_nil: true
enum platform_type: {
kubernetes: 1
@@ -50,6 +54,11 @@ module Clusters
scope :enabled, -> { where(enabled: true) }
scope :disabled, -> { where(enabled: false) }
+ scope :user_provided, -> { where(provider_type: ::Clusters::Cluster.provider_types[:user]) }
+ scope :gcp_provided, -> { where(provider_type: ::Clusters::Cluster.provider_types[:gcp]) }
+ scope :gcp_installed, -> { gcp_provided.includes(:provider_gcp).where(cluster_providers_gcp: { status: ::Clusters::Providers::Gcp.state_machines[:status].states[:created].value }) }
+
+ scope :default_environment, -> { where(environment_scope: DEFAULT_ENVIRONMENT) }
def status_name
if provider
@@ -68,7 +77,8 @@ module Clusters
application_helm || build_application_helm,
application_ingress || build_application_ingress,
application_prometheus || build_application_prometheus,
- application_runner || build_application_runner
+ application_runner || build_application_runner,
+ application_jupyter || build_application_jupyter
]
end
diff --git a/app/models/clusters/concerns/application_status.rb b/app/models/clusters/concerns/application_status.rb
index 7b7c8eac773..8f3eb75bfa9 100644
--- a/app/models/clusters/concerns/application_status.rb
+++ b/app/models/clusters/concerns/application_status.rb
@@ -4,6 +4,8 @@ module Clusters
extend ActiveSupport::Concern
included do
+ scope :installed, -> { where(status: self.state_machines[:status].states[:installed].value) }
+
state_machine :status, initial: :not_installable do
state :not_installable, value: -2
state :errored, value: -1
diff --git a/app/models/clusters/platforms/kubernetes.rb b/app/models/clusters/platforms/kubernetes.rb
index ba6552f238f..36631d57ad1 100644
--- a/app/models/clusters/platforms/kubernetes.rb
+++ b/app/models/clusters/platforms/kubernetes.rb
@@ -11,12 +11,12 @@ module Clusters
attr_encrypted :password,
mode: :per_attribute_iv,
- key: Gitlab::Application.secrets.db_key_base,
+ key: Settings.attr_encrypted_db_key_base_truncated,
algorithm: 'aes-256-cbc'
attr_encrypted :token,
mode: :per_attribute_iv,
- key: Gitlab::Application.secrets.db_key_base,
+ key: Settings.attr_encrypted_db_key_base_truncated,
algorithm: 'aes-256-cbc'
before_validation :enforce_namespace_to_lower_case
diff --git a/app/models/clusters/providers/gcp.rb b/app/models/clusters/providers/gcp.rb
index 7fac32466ab..4db1bb35c12 100644
--- a/app/models/clusters/providers/gcp.rb
+++ b/app/models/clusters/providers/gcp.rb
@@ -11,7 +11,7 @@ module Clusters
attr_encrypted :access_token,
mode: :per_attribute_iv,
- key: Gitlab::Application.secrets.db_key_base,
+ key: Settings.attr_encrypted_db_key_base_truncated,
algorithm: 'aes-256-cbc'
validates :gcp_project_id,
diff --git a/app/models/commit.rb b/app/models/commit.rb
index cceae5efb72..56d4c86774e 100644
--- a/app/models/commit.rb
+++ b/app/models/commit.rb
@@ -30,9 +30,12 @@ class Commit
MIN_SHA_LENGTH = Gitlab::Git::Commit::MIN_SHA_LENGTH
COMMIT_SHA_PATTERN = /\h{#{MIN_SHA_LENGTH},40}/.freeze
+ # Used by GFM to match and present link extensions on node texts and hrefs.
+ LINK_EXTENSION_PATTERN = /(patch)/.freeze
def banzai_render_context(field)
- context = { pipeline: :single_line, project: self.project }
+ pipeline = field == :description ? :commit_description : :single_line
+ context = { pipeline: pipeline, project: self.project }
context[:author] = self.author if self.author
context
@@ -102,6 +105,10 @@ class Commit
end
end
end
+
+ def parent_class
+ ::Project
+ end
end
attr_accessor :raw
@@ -142,7 +149,8 @@ class Commit
end
def self.link_reference_pattern
- @link_reference_pattern ||= super("commit", /(?<commit>#{COMMIT_SHA_PATTERN})/)
+ @link_reference_pattern ||=
+ super("commit", /(?<commit>#{COMMIT_SHA_PATTERN})?(\.(?<extension>#{LINK_EXTENSION_PATTERN}))?/)
end
def to_reference(from = nil, full: false)
@@ -175,7 +183,7 @@ class Commit
if safe_message.blank?
no_commit_message
else
- safe_message.split("\n", 2).first
+ safe_message.split(/[\r\n]/, 2).first
end
end
@@ -216,8 +224,34 @@ class Commit
Gitlab::ClosingIssueExtractor.new(project, current_user).closed_by_message(safe_message)
end
+ def lazy_author
+ BatchLoader.for(author_email.downcase).batch do |emails, loader|
+ # A Hash that maps user Emails to the corresponding User objects. The
+ # Emails at this point are the _primary_ Emails of the Users.
+ users_for_emails = User
+ .by_any_email(emails)
+ .each_with_object({}) { |user, hash| hash[user.email] = user }
+
+ users_for_ids = users_for_emails
+ .values
+ .each_with_object({}) { |user, hash| hash[user.id] = user }
+
+ # Some commits may have used an alternative Email address. In this case we
+ # need to query the "emails" table to map those addresses to User objects.
+ Email
+ .where(email: emails - users_for_emails.keys)
+ .pluck(:email, :user_id)
+ .each { |(email, id)| users_for_emails[email] = users_for_ids[id] }
+
+ users_for_emails.each { |email, user| loader.call(email, user) }
+ end
+ end
+
def author
- User.find_by_any_email(author_email.downcase)
+ # We use __sync so that we get the actual objects back (including an actual
+ # nil), instead of a wrapper, as returning a wrapped nil breaks a lot of
+ # code.
+ lazy_author.__sync
end
request_cache(:author) { author_email.downcase }
@@ -244,7 +278,7 @@ class Commit
end
def notes_with_associations
- notes.includes(:author)
+ notes.includes(:author, :award_emoji)
end
def merge_requests
@@ -416,6 +450,12 @@ class Commit
# no-op but needs to be defined since #persisted? is defined
end
+ def touch_later
+ # No-op.
+ # This method is called by ActiveRecord.
+ # We don't want to do anything for `Commit` model, so this is empty.
+ end
+
WIP_REGEX = /\A\s*(((?i)(\[WIP\]|WIP:|WIP)\s|WIP$))|(fixup!|squash!)\s/.freeze
def work_in_progress?
diff --git a/app/models/commit_status.rb b/app/models/commit_status.rb
index 9fb5b7efec6..97516079b66 100644
--- a/app/models/commit_status.rb
+++ b/app/models/commit_status.rb
@@ -2,6 +2,8 @@ class CommitStatus < ActiveRecord::Base
include HasStatus
include Importable
include AfterCommitQueue
+ include Presentable
+ include EnumWithNil
self.table_name = 'ci_builds'
@@ -38,7 +40,7 @@ class CommitStatus < ActiveRecord::Base
scope :retried_ordered, -> { retried.ordered.includes(project: :namespace) }
scope :after_stage, -> (index) { where('stage_idx > ?', index) }
- enum failure_reason: {
+ enum_with_nil failure_reason: {
unknown_failure: nil,
script_failure: 1,
api_failure: 2,
@@ -87,7 +89,7 @@ class CommitStatus < ActiveRecord::Base
transition [:created, :pending, :running, :manual] => :canceled
end
- before_transition created: [:pending, :running] do |commit_status|
+ before_transition [:created, :skipped, :manual] => :pending do |commit_status|
commit_status.queued_at = Time.now
end
@@ -141,7 +143,7 @@ class CommitStatus < ActiveRecord::Base
end
def group_name
- name.to_s.gsub(%r{\d+[\.\s:/\\]+\d+\s*}, '').strip
+ name.to_s.gsub(%r{\d+[\s:/\\]+\d+\s*}, '').strip
end
def failed_but_allowed?
diff --git a/app/models/concerns/atomic_internal_id.rb b/app/models/concerns/atomic_internal_id.rb
index 4b66725a3e6..164c704260e 100644
--- a/app/models/concerns/atomic_internal_id.rb
+++ b/app/models/concerns/atomic_internal_id.rb
@@ -25,22 +25,23 @@ module AtomicInternalId
extend ActiveSupport::Concern
module ClassMethods
- def has_internal_id(column, scope:, init:) # rubocop:disable Naming/PredicateName
- before_validation(on: :create) do
- if read_attribute(column).blank?
- scope_attrs = { scope => association(scope).reader }
+ def has_internal_id(column, scope:, init:, presence: true) # rubocop:disable Naming/PredicateName
+ before_validation :"ensure_#{scope}_#{column}!", on: :create
+ validates column, presence: presence
+
+ define_method("ensure_#{scope}_#{column}!") do
+ scope_value = association(scope).reader
+
+ if read_attribute(column).blank? && scope_value
+ scope_attrs = { scope_value.class.table_name.singularize.to_sym => scope_value }
usage = self.class.table_name.to_sym
new_iid = InternalId.generate_next(self, scope_attrs, usage, init)
write_attribute(column, new_iid)
end
- end
- validates column, presence: true, numericality: true
+ read_attribute(column)
+ end
end
end
-
- def to_param
- iid.to_s
- end
end
diff --git a/app/models/concerns/avatarable.rb b/app/models/concerns/avatarable.rb
index d35e37935fb..095897b08e3 100644
--- a/app/models/concerns/avatarable.rb
+++ b/app/models/concerns/avatarable.rb
@@ -3,11 +3,15 @@ module Avatarable
included do
prepend ShadowMethods
+ include ObjectStorage::BackgroundMove
+ include Gitlab::Utils::StrongMemoize
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
mount_uploader :avatar, AvatarUploader
+
+ after_initialize :add_avatar_to_batch
end
module ShadowMethods
@@ -17,11 +21,22 @@ module Avatarable
avatar_path(only_path: args.fetch(:only_path, true)) || super
end
+
+ def retrieve_upload(identifier, paths)
+ upload = retrieve_upload_from_batch(identifier)
+
+ # This fallback is needed when deleting an upload, because we may have
+ # already been removed from the DB. We have to check an explicit `#nil?`
+ # because it's a BatchLoader instance.
+ upload = super if upload.nil?
+
+ upload
+ end
end
def avatar_type
unless self.avatar.image?
- self.errors.add :avatar, "only images allowed"
+ errors.add :avatar, "file format is not supported. Please try one of the following supported formats: #{AvatarUploader::IMAGE_EXT.join(', ')}"
end
end
@@ -30,12 +45,13 @@ module Avatarable
asset_host = ActionController::Base.asset_host
use_asset_host = asset_host.present?
+ use_authentication = respond_to?(:public?) && !public?
# Avatars for private and internal groups and projects require authentication to be viewed,
# which means they can only be served by Rails, on the regular GitLab host.
# If an asset host is configured, we need to return the fully qualified URL
# instead of only the avatar path, so that Rails doesn't prefix it with the asset host.
- if use_asset_host && respond_to?(:public?) && !public?
+ if use_asset_host && use_authentication
use_asset_host = false
only_path = false
end
@@ -48,6 +64,39 @@ module Avatarable
url_base << gitlab_config.relative_url_root
end
- url_base + avatar.url
+ url_base + avatar.local_url
+ end
+
+ # Path that is persisted in the tracking Upload model. Used to fetch the
+ # upload from the model.
+ def upload_paths(identifier)
+ avatar_mounter.blank_uploader.store_dirs.map { |store, path| File.join(path, identifier) }
+ end
+
+ private
+
+ def retrieve_upload_from_batch(identifier)
+ BatchLoader.for(identifier: identifier, model: self).batch(key: self.class) do |upload_params, loader, args|
+ model_class = args[:key]
+ paths = upload_params.flat_map do |params|
+ params[:model].upload_paths(params[:identifier])
+ end
+
+ Upload.where(uploader: AvatarUploader, path: paths).find_each do |upload|
+ model = model_class.instantiate('id' => upload.model_id)
+
+ loader.call({ model: model, identifier: File.basename(upload.path) }, upload)
+ end
+ end
+ end
+
+ def add_avatar_to_batch
+ return unless avatar_mounter
+
+ avatar_mounter.read_identifiers.each { |identifier| retrieve_upload_from_batch(identifier) }
+ end
+
+ def avatar_mounter
+ strong_memoize(:avatar_mounter) { _mounter(:avatar) }
end
end
diff --git a/app/models/concerns/awardable.rb b/app/models/concerns/awardable.rb
index d8394415362..fce37e7f78e 100644
--- a/app/models/concerns/awardable.rb
+++ b/app/models/concerns/awardable.rb
@@ -79,11 +79,7 @@ module Awardable
end
def user_can_award?(current_user, name)
- if user_authored?(current_user)
- !awardable_votes?(normalize_name(name))
- else
- true
- end
+ awardable_by_user?(current_user, name) && Ability.allowed?(current_user, :award_emoji, self)
end
def user_authored?(current_user)
@@ -119,4 +115,12 @@ module Awardable
def normalize_name(name)
Gitlab::Emoji.normalize_emoji_name(name)
end
+
+ def awardable_by_user?(current_user, name)
+ if user_authored?(current_user)
+ !awardable_votes?(normalize_name(name))
+ else
+ true
+ end
+ end
end
diff --git a/app/models/concerns/batch_destroy_dependent_associations.rb b/app/models/concerns/batch_destroy_dependent_associations.rb
new file mode 100644
index 00000000000..353ee2e73d0
--- /dev/null
+++ b/app/models/concerns/batch_destroy_dependent_associations.rb
@@ -0,0 +1,28 @@
+# Provides a way to work around Rails issue where dependent objects are all
+# loaded into memory before destroyed: https://github.com/rails/rails/issues/22510.
+#
+# This concern allows an ActiveRecord module to destroy all its dependent
+# associations in batches. The idea is borrowed from https://github.com/thisismydesign/batch_dependent_associations.
+#
+# The differences here with that gem:
+#
+# 1. We allow excluding certain associations.
+# 2. We don't need to support delete_all since we can use the EachBatch concern.
+module BatchDestroyDependentAssociations
+ extend ActiveSupport::Concern
+
+ DEPENDENT_ASSOCIATIONS_BATCH_SIZE = 1000
+
+ def dependent_associations_to_destroy
+ self.class.reflect_on_all_associations(:has_many).select { |assoc| assoc.options[:dependent] == :destroy }
+ end
+
+ def destroy_dependent_associations_in_batches(exclude: [])
+ dependent_associations_to_destroy.each do |association|
+ next if exclude.include?(association.name)
+
+ # rubocop:disable GitlabSecurity/PublicSend
+ public_send(association.name).find_each(batch_size: DEPENDENT_ASSOCIATIONS_BATCH_SIZE, &:destroy)
+ end
+ end
+end
diff --git a/app/models/concerns/cache_markdown_field.rb b/app/models/concerns/cache_markdown_field.rb
index 4ae5dd8c677..9f6358cecbe 100644
--- a/app/models/concerns/cache_markdown_field.rb
+++ b/app/models/concerns/cache_markdown_field.rb
@@ -11,7 +11,9 @@ module CacheMarkdownField
extend ActiveSupport::Concern
# Increment this number every time the renderer changes its output
- CACHE_VERSION = 3
+ CACHE_REDCARPET_VERSION = 3
+ CACHE_COMMONMARK_VERSION_START = 10
+ CACHE_COMMONMARK_VERSION = 11
# changes to these attributes cause the cache to be invalidates
INVALIDATED_BY = %w[author project].freeze
@@ -49,12 +51,14 @@ module CacheMarkdownField
# Always include a project key, or Banzai complains
project = self.project if self.respond_to?(:project)
- group = self.group if self.respond_to?(:group)
+ group = self.group if self.respond_to?(:group)
context = cached_markdown_fields[field].merge(project: project, group: group)
# Banzai is less strict about authors, so don't always have an author key
context[:author] = self.author if self.respond_to?(:author)
+ context[:markdown_engine] = markdown_engine
+
context
end
@@ -69,7 +73,7 @@ module CacheMarkdownField
Banzai::Renderer.cacheless_render_field(self, markdown_field, options)
]
end.to_h
- updates['cached_markdown_version'] = CacheMarkdownField::CACHE_VERSION
+ updates['cached_markdown_version'] = latest_cached_markdown_version
updates.each {|html_field, data| write_attribute(html_field, data) }
end
@@ -90,7 +94,7 @@ module CacheMarkdownField
markdown_changed = attribute_changed?(markdown_field) || false
html_changed = attribute_changed?(html_field) || false
- CacheMarkdownField::CACHE_VERSION == cached_markdown_version &&
+ latest_cached_markdown_version == cached_markdown_version &&
(html_changed || markdown_changed == html_changed)
end
@@ -109,6 +113,24 @@ module CacheMarkdownField
__send__(cached_markdown_fields.html_field(markdown_field)) # rubocop:disable GitlabSecurity/PublicSend
end
+ def latest_cached_markdown_version
+ return CacheMarkdownField::CACHE_COMMONMARK_VERSION unless cached_markdown_version
+
+ if cached_markdown_version < CacheMarkdownField::CACHE_COMMONMARK_VERSION_START
+ CacheMarkdownField::CACHE_REDCARPET_VERSION
+ else
+ CacheMarkdownField::CACHE_COMMONMARK_VERSION
+ end
+ end
+
+ def markdown_engine
+ if latest_cached_markdown_version < CacheMarkdownField::CACHE_COMMONMARK_VERSION_START
+ :redcarpet
+ else
+ :common_mark
+ end
+ end
+
included do
cattr_reader :cached_markdown_fields do
FieldData.new
diff --git a/app/models/concerns/cacheable_attributes.rb b/app/models/concerns/cacheable_attributes.rb
new file mode 100644
index 00000000000..d58d7165969
--- /dev/null
+++ b/app/models/concerns/cacheable_attributes.rb
@@ -0,0 +1,74 @@
+module CacheableAttributes
+ extend ActiveSupport::Concern
+
+ included do
+ after_commit { self.class.expire }
+ end
+
+ class_methods do
+ def cache_key
+ "#{name}:#{Gitlab::VERSION}:#{Gitlab.migrations_hash}:#{Rails.version}".freeze
+ end
+
+ # Can be overriden
+ def current_without_cache
+ last
+ end
+
+ # Can be overriden
+ def defaults
+ {}
+ end
+
+ def build_from_defaults(attributes = {})
+ new(defaults.merge(attributes))
+ end
+
+ def cached
+ if RequestStore.active?
+ RequestStore[:"#{name}_cached_attributes"] ||= retrieve_from_cache
+ else
+ retrieve_from_cache
+ end
+ end
+
+ def retrieve_from_cache
+ record = Rails.cache.read(cache_key)
+ ensure_cache_setup if record.present?
+
+ record
+ end
+
+ def current
+ cached_record = cached
+ return cached_record if cached_record.present?
+
+ current_without_cache.tap { |current_record| current_record&.cache! }
+ rescue => e
+ if Rails.env.production?
+ Rails.logger.warn("Cached record for #{name} couldn't be loaded, falling back to uncached record: #{e}")
+ else
+ raise e
+ end
+ # Fall back to an uncached value if there are any problems (e.g. Redis down)
+ current_without_cache
+ end
+
+ def expire
+ Rails.cache.delete(cache_key)
+ rescue
+ # Gracefully handle when Redis is not available. For example,
+ # omnibus may fail here during gitlab:assets:compile.
+ end
+
+ def ensure_cache_setup
+ # This is a workaround for a Rails bug that causes attribute methods not
+ # to be loaded when read from cache: https://github.com/rails/rails/issues/27348
+ define_attribute_methods
+ end
+ end
+
+ def cache!
+ Rails.cache.write(self.class.cache_key, self)
+ end
+end
diff --git a/app/models/concerns/chronic_duration_attribute.rb b/app/models/concerns/chronic_duration_attribute.rb
new file mode 100644
index 00000000000..593a9b3d71d
--- /dev/null
+++ b/app/models/concerns/chronic_duration_attribute.rb
@@ -0,0 +1,39 @@
+module ChronicDurationAttribute
+ extend ActiveSupport::Concern
+
+ class_methods do
+ def chronic_duration_attr_reader(virtual_attribute, source_attribute)
+ define_method(virtual_attribute) do
+ chronic_duration_attributes[virtual_attribute] || output_chronic_duration_attribute(source_attribute)
+ end
+ end
+
+ def chronic_duration_attr_writer(virtual_attribute, source_attribute, parameters = {})
+ chronic_duration_attr_reader(virtual_attribute, source_attribute)
+
+ define_method("#{virtual_attribute}=") do |value|
+ chronic_duration_attributes[virtual_attribute] = value.presence || parameters[:default].presence.to_s
+
+ begin
+ new_value = value.present? ? ChronicDuration.parse(value).to_i : parameters[:default].presence
+ assign_attributes(source_attribute => new_value)
+ rescue ChronicDuration::DurationParseError
+ # ignore error as it will be caught by validation
+ end
+ end
+
+ validates virtual_attribute, allow_nil: true, duration: true
+ end
+
+ alias_method :chronic_duration_attr, :chronic_duration_attr_writer
+ end
+
+ def chronic_duration_attributes
+ @chronic_duration_attributes ||= {}
+ end
+
+ def output_chronic_duration_attribute(source_attribute)
+ value = attributes[source_attribute.to_s]
+ ChronicDuration.output(value, format: :short) if value
+ end
+end
diff --git a/app/models/concerns/deployment_platform.rb b/app/models/concerns/deployment_platform.rb
index faa94204e33..52851b3d0b2 100644
--- a/app/models/concerns/deployment_platform.rb
+++ b/app/models/concerns/deployment_platform.rb
@@ -1,16 +1,24 @@
module DeploymentPlatform
- # EE would override this and utilize the extra argument
+ # EE would override this and utilize environment argument
+ # rubocop:disable Gitlab/ModuleWithInstanceVariables
def deployment_platform(environment: nil)
- @deployment_platform ||=
- find_cluster_platform_kubernetes ||
- find_kubernetes_service_integration ||
- build_cluster_and_deployment_platform
+ @deployment_platform ||= {}
+
+ @deployment_platform[environment] ||= find_deployment_platform(environment)
end
private
- def find_cluster_platform_kubernetes
- clusters.find_by(enabled: true)&.platform_kubernetes
+ def find_deployment_platform(environment)
+ find_cluster_platform_kubernetes(environment: environment) ||
+ find_kubernetes_service_integration ||
+ build_cluster_and_deployment_platform
+ end
+
+ # EE would override this and utilize environment argument
+ def find_cluster_platform_kubernetes(environment: nil)
+ clusters.enabled.default_environment
+ .last&.platform_kubernetes
end
def find_kubernetes_service_integration
diff --git a/app/models/concerns/diff_file.rb b/app/models/concerns/diff_file.rb
new file mode 100644
index 00000000000..72332072012
--- /dev/null
+++ b/app/models/concerns/diff_file.rb
@@ -0,0 +1,9 @@
+module DiffFile
+ extend ActiveSupport::Concern
+
+ def to_hash
+ keys = Gitlab::Git::Diff::SERIALIZE_KEYS - [:diff]
+
+ as_json(only: keys).merge(diff: diff).with_indifferent_access
+ end
+end
diff --git a/app/models/concerns/enum_with_nil.rb b/app/models/concerns/enum_with_nil.rb
new file mode 100644
index 00000000000..6b37903da20
--- /dev/null
+++ b/app/models/concerns/enum_with_nil.rb
@@ -0,0 +1,33 @@
+module EnumWithNil
+ extend ActiveSupport::Concern
+
+ included do
+ def self.enum_with_nil(definitions)
+ # use original `enum` to auto-define all methods
+ enum(definitions)
+
+ # override auto-defined methods only for the
+ # key which uses nil value
+ definitions.each do |name, values|
+ next unless key_with_nil = values.key(nil)
+
+ # E.g. for enum_with_nil failure_reason: { unknown_failure: nil }
+ # this overrides auto-generated method `unknown_failure?`
+ define_method("#{key_with_nil}?") do
+ Gitlab.rails5? ? self[name].nil? : super()
+ end
+
+ # E.g. for enum_with_nil failure_reason: { unknown_failure: nil }
+ # this overrides auto-generated method `failure_reason`
+ define_method(name) do
+ orig = super()
+
+ return orig unless Gitlab.rails5?
+ return orig unless orig.nil?
+
+ self.class.public_send(name.to_s.pluralize).key(nil) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+ end
+end
diff --git a/app/models/concerns/fast_destroy_all.rb b/app/models/concerns/fast_destroy_all.rb
new file mode 100644
index 00000000000..7ea042c6742
--- /dev/null
+++ b/app/models/concerns/fast_destroy_all.rb
@@ -0,0 +1,91 @@
+##
+# This module is for replacing `dependent: :destroy` and `before_destroy` hooks.
+#
+# In general, `destroy_all` is inefficient because it calls each callback with `DELETE` queries i.e. O(n), whereas,
+# `delete_all` is efficient as it deletes all rows with a single `DELETE` query.
+#
+# It's better to use `delete_all` as our best practice, however,
+# if external data (e.g. ObjectStorage, FileStorage or Redis) are assosiated with database records,
+# it is difficult to accomplish it.
+#
+# This module defines a format to use `delete_all` and delete associated external data.
+# Here is an exmaple
+#
+# Situation
+# - `Project` has many `Ci::BuildTraceChunk` through `Ci::Build`
+# - `Ci::BuildTraceChunk` stores associated data in Redis, so it relies on `dependent: :destroy` and `before_destroy` for the deletion
+#
+# How to use
+# - Define `use_fast_destroy :build_trace_chunks` in `Project` model.
+# - Define `begin_fast_destroy` and `finalize_fast_destroy(params)` in `Ci::BuildTraceChunk` model.
+# - Use `fast_destroy_all` instead of `destroy` and `destroy_all`
+# - Remove `dependent: :destroy` and `before_destroy` as it's no longer need
+#
+# Expectation
+# - When a project is `destroy`ed, the associated trace_chunks will be deleted by `delete_all`,
+# and the associated data will be removed, too.
+# - When `fast_destroy_all` is called, it also performns as same.
+module FastDestroyAll
+ extend ActiveSupport::Concern
+
+ ForbiddenActionError = Class.new(StandardError)
+
+ included do
+ before_destroy do
+ raise ForbiddenActionError, '`destroy` and `destroy_all` are forbbiden. Please use `fast_destroy_all`'
+ end
+ end
+
+ class_methods do
+ ##
+ # This method delete rows and associated external data efficiently
+ #
+ # This method can replace `destroy` and `destroy_all` without having `after_destroy` hook
+ def fast_destroy_all
+ params = begin_fast_destroy
+
+ delete_all
+
+ finalize_fast_destroy(params)
+ end
+
+ ##
+ # This method returns identifiers to delete associated external data (e.g. file paths, redis keys)
+ #
+ # This method must be defined in fast destroyable model
+ def begin_fast_destroy
+ raise NotImplementedError
+ end
+
+ ##
+ # This method deletes associated external data with the identifiers returned by `begin_fast_destroy`
+ #
+ # This method must be defined in fast destroyable model
+ def finalize_fast_destroy(params)
+ raise NotImplementedError
+ end
+ end
+
+ module Helpers
+ extend ActiveSupport::Concern
+
+ class_methods do
+ ##
+ # This method is to be defined on models which have fast destroyable models as children,
+ # and let us avoid to use `dependent: :destroy` hook
+ def use_fast_destroy(relation)
+ before_destroy(prepend: true) do
+ perform_fast_destroy(public_send(relation)) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+
+ def perform_fast_destroy(subject)
+ params = subject.begin_fast_destroy
+
+ run_after_commit do
+ subject.finalize_fast_destroy(params)
+ end
+ end
+ end
+end
diff --git a/app/models/concerns/group_descendant.rb b/app/models/concerns/group_descendant.rb
index 01957da0bf3..261ace57a17 100644
--- a/app/models/concerns/group_descendant.rb
+++ b/app/models/concerns/group_descendant.rb
@@ -37,7 +37,20 @@ module GroupDescendant
parent ||= preloaded.detect { |possible_parent| possible_parent.is_a?(Group) && possible_parent.id == child.parent_id }
if parent.nil? && !child.parent_id.nil?
- raise ArgumentError.new('parent was not preloaded')
+ parent = child.parent
+
+ exception = ArgumentError.new <<~MSG
+ parent: [GroupDescendant: #{parent.inspect}] was not preloaded for [#{child.inspect}]")
+ This error is not user facing, but causes a +1 query.
+ MSG
+ extras = {
+ parent: parent,
+ child: child,
+ preloaded: preloaded.map(&:full_path)
+ }
+ issue_url = 'https://gitlab.com/gitlab-org/gitlab-ce/issues/40785'
+
+ Gitlab::Sentry.track_exception(exception, issue_url: issue_url, extra: extras)
end
if parent.nil? && hierarchy_top.present?
diff --git a/app/models/concerns/has_status.rb b/app/models/concerns/has_status.rb
index 7c3ed96bc28..72c236a0fc7 100644
--- a/app/models/concerns/has_status.rb
+++ b/app/models/concerns/has_status.rb
@@ -11,6 +11,8 @@ module HasStatus
STATUSES_ENUM = { created: 0, pending: 1, running: 2, success: 3,
failed: 4, canceled: 5, skipped: 6, manual: 7 }.freeze
+ UnknownStatusError = Class.new(StandardError)
+
class_methods do
def status_sql
scope_relevant = respond_to?(:exclude_ignored) ? exclude_ignored : all
diff --git a/app/models/concerns/has_variable.rb b/app/models/concerns/has_variable.rb
index 8a241e4374a..c8e20c0ab81 100644
--- a/app/models/concerns/has_variable.rb
+++ b/app/models/concerns/has_variable.rb
@@ -13,7 +13,7 @@ module HasVariable
attr_encrypted :value,
mode: :per_attribute_iv_and_salt,
insecure_mode: true,
- key: Gitlab::Application.secrets.db_key_base,
+ key: Settings.attr_encrypted_db_key_base,
algorithm: 'aes-256-cbc'
def key=(new_key)
diff --git a/app/models/concerns/iid_routes.rb b/app/models/concerns/iid_routes.rb
new file mode 100644
index 00000000000..246748cf52c
--- /dev/null
+++ b/app/models/concerns/iid_routes.rb
@@ -0,0 +1,9 @@
+module IidRoutes
+ ##
+ # This automagically enforces all related routes to use `iid` instead of `id`
+ # If you want to use `iid` for some routes and `id` for other routes, this module should not to be included,
+ # instead you should define `iid` or `id` explictly at each route generators. e.g. pipeline_path(project.id, pipeline.iid)
+ def to_param
+ iid.to_s
+ end
+end
diff --git a/app/models/concerns/issuable.rb b/app/models/concerns/issuable.rb
index 5a566f3ac02..b93c1145f82 100644
--- a/app/models/concerns/issuable.rb
+++ b/app/models/concerns/issuable.rb
@@ -97,8 +97,6 @@ module Issuable
strip_attributes :title
- after_save :ensure_metrics, unless: :imported?
-
# We want to use optimistic lock for cases when only title or description are involved
# http://api.rubyonrails.org/classes/ActiveRecord/Locking/Optimistic.html
def locking_enabled?
@@ -109,6 +107,10 @@ module Issuable
false
end
+ def etag_caching_enabled?
+ false
+ end
+
def has_multiple_assignees?
assignees.count > 1
end
@@ -137,7 +139,7 @@ module Issuable
fuzzy_search(query, [:title, :description])
end
- def sort(method, excluded_labels: [])
+ def sort_by_attribute(method, excluded_labels: [])
sorted =
case method.to_s
when 'downvotes_desc' then order_downvotes_desc
diff --git a/app/models/concerns/milestoneish.rb b/app/models/concerns/milestoneish.rb
index caf8afa97f9..967fd9c5eea 100644
--- a/app/models/concerns/milestoneish.rb
+++ b/app/models/concerns/milestoneish.rb
@@ -45,11 +45,11 @@ module Milestoneish
end
def sorted_issues(user)
- issues_visible_to_user(user).preload_associations.sort('label_priority')
+ issues_visible_to_user(user).preload_associations.sort_by_attribute('label_priority')
end
def sorted_merge_requests
- merge_requests.sort('label_priority')
+ merge_requests.sort_by_attribute('label_priority')
end
def upcoming?
@@ -102,14 +102,14 @@ module Milestoneish
Gitlab::TimeTrackingFormatter.output(total_issue_time_estimate)
end
- private
-
def count_issues_by_state(user)
memoize_per_user(user, :count_issues_by_state) do
issues_visible_to_user(user).reorder(nil).group(:state).count
end
end
+ private
+
def memoize_per_user(user, method_name)
memoized_users[method_name][user&.id] ||= yield
end
diff --git a/app/models/concerns/nonatomic_internal_id.rb b/app/models/concerns/nonatomic_internal_id.rb
deleted file mode 100644
index 9d0c9b8512f..00000000000
--- a/app/models/concerns/nonatomic_internal_id.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-module NonatomicInternalId
- extend ActiveSupport::Concern
-
- included do
- validate :set_iid, on: :create
- validates :iid, presence: true, numericality: true
- end
-
- def set_iid
- if iid.blank?
- parent = project || group
- records = parent.public_send(self.class.name.tableize) # rubocop:disable GitlabSecurity/PublicSend
- max_iid = records.maximum(:iid)
-
- self.iid = max_iid.to_i + 1
- end
- end
-
- def to_param
- iid.to_s
- end
-end
diff --git a/app/models/concerns/participable.rb b/app/models/concerns/participable.rb
index e48bc0be410..01b1ef9f82c 100644
--- a/app/models/concerns/participable.rb
+++ b/app/models/concerns/participable.rb
@@ -98,6 +98,10 @@ module Participable
participants.merge(ext.users)
+ filter_by_ability(participants)
+ end
+
+ def filter_by_ability(participants)
case self
when PersonalSnippet
Ability.users_that_can_read_personal_snippet(participants.to_a, self)
diff --git a/app/models/concerns/presentable.rb b/app/models/concerns/presentable.rb
index 7b33b837004..bc4fbd19a02 100644
--- a/app/models/concerns/presentable.rb
+++ b/app/models/concerns/presentable.rb
@@ -1,4 +1,12 @@
module Presentable
+ extend ActiveSupport::Concern
+
+ class_methods do
+ def present(attributes)
+ all.map { |klass_object| klass_object.present(attributes) }
+ end
+ end
+
def present(**attributes)
Gitlab::View::Presenter::Factory
.new(self, attributes)
diff --git a/app/models/concerns/project_features_compatibility.rb b/app/models/concerns/project_features_compatibility.rb
index b3fec99c816..1f7d78a2efe 100644
--- a/app/models/concerns/project_features_compatibility.rb
+++ b/app/models/concerns/project_features_compatibility.rb
@@ -1,4 +1,4 @@
-# Makes api V3 compatible with old project features permissions methods
+# Makes api V4 compatible with old project features permissions methods
#
# After migrating issues_enabled merge_requests_enabled builds_enabled snippets_enabled and wiki_enabled
# fields to a new table "project_features", support for the old fields is still needed in the API.
diff --git a/app/models/concerns/protected_ref.rb b/app/models/concerns/protected_ref.rb
index 454374121f3..94eef4ff7cd 100644
--- a/app/models/concerns/protected_ref.rb
+++ b/app/models/concerns/protected_ref.rb
@@ -31,7 +31,7 @@ module ProtectedRef
end
end
- def protected_ref_accessible_to?(ref, user, action:, protected_refs: nil)
+ def protected_ref_accessible_to?(ref, user, project:, action:, protected_refs: nil)
access_levels_for_ref(ref, action: action, protected_refs: protected_refs).any? do |access_level|
access_level.check_access(user)
end
diff --git a/app/models/concerns/protected_ref_access.rb b/app/models/concerns/protected_ref_access.rb
index bfda5b1678b..e3a7f2d5498 100644
--- a/app/models/concerns/protected_ref_access.rb
+++ b/app/models/concerns/protected_ref_access.rb
@@ -8,8 +8,8 @@ module ProtectedRefAccess
].freeze
HUMAN_ACCESS_LEVELS = {
- Gitlab::Access::MASTER => "Masters".freeze,
- Gitlab::Access::DEVELOPER => "Developers + Masters".freeze,
+ Gitlab::Access::MASTER => "Maintainers".freeze,
+ Gitlab::Access::DEVELOPER => "Developers + Maintainers".freeze,
Gitlab::Access::NO_ACCESS => "No one".freeze
}.freeze
diff --git a/app/models/concerns/reactive_caching.rb b/app/models/concerns/reactive_caching.rb
index 2589215ad19..be0a5b49012 100644
--- a/app/models/concerns/reactive_caching.rb
+++ b/app/models/concerns/reactive_caching.rb
@@ -60,22 +60,26 @@ module ReactiveCaching
end
def with_reactive_cache(*args, &blk)
- within_reactive_cache_lifetime(*args) do
+ bootstrap = !within_reactive_cache_lifetime?(*args)
+ Rails.cache.write(alive_reactive_cache_key(*args), true, expires_in: self.class.reactive_cache_lifetime)
+
+ if bootstrap
+ ReactiveCachingWorker.perform_async(self.class, id, *args)
+ nil
+ else
data = Rails.cache.read(full_reactive_cache_key(*args))
yield data if data.present?
end
- ensure
- Rails.cache.write(alive_reactive_cache_key(*args), true, expires_in: self.class.reactive_cache_lifetime)
- ReactiveCachingWorker.perform_async(self.class, id, *args)
end
def clear_reactive_cache!(*args)
Rails.cache.delete(full_reactive_cache_key(*args))
+ Rails.cache.delete(alive_reactive_cache_key(*args))
end
def exclusively_update_reactive_cache!(*args)
locking_reactive_cache(*args) do
- within_reactive_cache_lifetime(*args) do
+ if within_reactive_cache_lifetime?(*args)
enqueuing_update(*args) do
value = calculate_reactive_cache(*args)
Rails.cache.write(full_reactive_cache_key(*args), value)
@@ -105,8 +109,8 @@ module ReactiveCaching
Gitlab::ExclusiveLease.cancel(full_reactive_cache_key(*args), uuid)
end
- def within_reactive_cache_lifetime(*args)
- yield if Rails.cache.read(alive_reactive_cache_key(*args))
+ def within_reactive_cache_lifetime?(*args)
+ !!Rails.cache.read(alive_reactive_cache_key(*args))
end
def enqueuing_update(*args)
diff --git a/app/models/concerns/redis_cacheable.rb b/app/models/concerns/redis_cacheable.rb
index b889f4202dc..3bdc1330d23 100644
--- a/app/models/concerns/redis_cacheable.rb
+++ b/app/models/concerns/redis_cacheable.rb
@@ -7,7 +7,11 @@ module RedisCacheable
class_methods do
def cached_attr_reader(*attributes)
attributes.each do |attribute|
- define_method("#{attribute}") do
+ define_method(attribute) do
+ unless self.has_attribute?(attribute)
+ raise ArgumentError, "`cached_attr_reader` requires the #{self.class.name}\##{attribute} attribute to have a database column"
+ end
+
cached_attribute(attribute) || read_attribute(attribute)
end
end
@@ -15,13 +19,16 @@ module RedisCacheable
end
def cached_attribute(attribute)
- (cached_attributes || {})[attribute]
+ cached_value = (cached_attributes || {})[attribute]
+ cast_value_from_cache(attribute, cached_value) if cached_value
end
def cache_attributes(values)
Gitlab::Redis::SharedState.with do |redis|
redis.set(cache_attribute_key, values.to_json, ex: CACHED_ATTRIBUTES_EXPIRY_TIME)
end
+
+ clear_memoization(:cached_attributes)
end
private
@@ -38,4 +45,12 @@ module RedisCacheable
end
end
end
+
+ def cast_value_from_cache(attribute, value)
+ if Gitlab.rails5?
+ self.class.type_for_attribute(attribute.to_s).cast(value)
+ else
+ self.class.column_for_attribute(attribute).type_cast_from_database(value)
+ end
+ end
end
diff --git a/app/models/concerns/resolvable_note.rb b/app/models/concerns/resolvable_note.rb
index 668c5a079e3..4a0f8b92b3a 100644
--- a/app/models/concerns/resolvable_note.rb
+++ b/app/models/concerns/resolvable_note.rb
@@ -32,7 +32,7 @@ module ResolvableNote
# Keep this method in sync with the `potentially_resolvable` scope
def potentially_resolvable?
- RESOLVABLE_TYPES.include?(self.class.name) && noteable.supports_resolvable_notes?
+ RESOLVABLE_TYPES.include?(self.class.name) && noteable&.supports_resolvable_notes?
end
# Keep this method in sync with the `resolvable` scope
diff --git a/app/models/concerns/routable.rb b/app/models/concerns/routable.rb
index dfd7d94450b..0176a12a131 100644
--- a/app/models/concerns/routable.rb
+++ b/app/models/concerns/routable.rb
@@ -4,7 +4,9 @@ module Routable
extend ActiveSupport::Concern
included do
- has_one :route, as: :source, autosave: true, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ # Remove `inverse_of: source` when upgraded to rails 5.2
+ # See https://github.com/rails/rails/pull/28808
+ has_one :route, as: :source, autosave: true, dependent: :destroy, inverse_of: :source # rubocop:disable Cop/ActiveRecordDependent
has_many :redirect_routes, as: :source, autosave: true, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
validates :route, presence: true
@@ -102,7 +104,7 @@ module Routable
# the route. Caching this per request ensures that even if we have multiple instances,
# we will not have to duplicate work, avoiding N+1 queries in some cases.
def full_path
- return uncached_full_path unless RequestStore.active?
+ return uncached_full_path unless RequestStore.active? && persisted?
RequestStore[full_path_key] ||= uncached_full_path
end
@@ -124,6 +126,11 @@ module Routable
end
end
+ # Group would override this to check from association
+ def owned_by?(user)
+ owner == user
+ end
+
private
def set_path_errors
diff --git a/app/models/concerns/sha_attribute.rb b/app/models/concerns/sha_attribute.rb
index 703a72c355c..3796737427a 100644
--- a/app/models/concerns/sha_attribute.rb
+++ b/app/models/concerns/sha_attribute.rb
@@ -4,18 +4,34 @@ module ShaAttribute
module ClassMethods
def sha_attribute(name)
return if ENV['STATIC_VERIFICATION']
- return unless table_exists?
+
+ validate_binary_column_exists!(name) unless Rails.env.production?
+
+ attribute(name, Gitlab::Database::ShaAttribute.new)
+ end
+
+ # This only gets executed in non-production environments as an additional check to ensure
+ # the column is the correct type. In production it should behave like any other attribute.
+ # See https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/5502 for more discussion
+ def validate_binary_column_exists!(name)
+ unless table_exists?
+ warn "WARNING: sha_attribute #{name.inspect} is invalid since the table doesn't exist - you may need to run database migrations"
+ return
+ end
column = columns.find { |c| c.name == name.to_s }
- # In case the table doesn't exist we won't be able to find the column,
- # thus we will only check the type if the column is present.
- if column && column.type != :binary
- raise ArgumentError,
- "sha_attribute #{name.inspect} is invalid since the column type is not :binary"
+ unless column
+ warn "WARNING: sha_attribute #{name.inspect} is invalid since the column doesn't exist - you may need to run database migrations"
+ return
end
- attribute(name, Gitlab::Database::ShaAttribute.new)
+ unless column.type == :binary
+ raise ArgumentError.new("sha_attribute #{name.inspect} is invalid since the column type is not :binary")
+ end
+ rescue => error
+ Gitlab::AppLogger.error "ShaAttribute initialization: #{error.message}"
+ raise
end
end
end
diff --git a/app/models/concerns/sortable.rb b/app/models/concerns/sortable.rb
index cefa5c13c5f..cb76ae971d4 100644
--- a/app/models/concerns/sortable.rb
+++ b/app/models/concerns/sortable.rb
@@ -12,8 +12,8 @@ module Sortable
scope :order_created_asc, -> { reorder(created_at: :asc) }
scope :order_updated_desc, -> { reorder(updated_at: :desc) }
scope :order_updated_asc, -> { reorder(updated_at: :asc) }
- scope :order_name_asc, -> { reorder(name: :asc) }
- scope :order_name_desc, -> { reorder(name: :desc) }
+ scope :order_name_asc, -> { reorder(Arel::Nodes::Ascending.new(arel_table[:name].lower)) }
+ scope :order_name_desc, -> { reorder(Arel::Nodes::Descending.new(arel_table[:name].lower)) }
end
module ClassMethods
diff --git a/app/models/concerns/storage/legacy_namespace.rb b/app/models/concerns/storage/legacy_namespace.rb
index f05e606995d..f66bdd529f1 100644
--- a/app/models/concerns/storage/legacy_namespace.rb
+++ b/app/models/concerns/storage/legacy_namespace.rb
@@ -45,25 +45,25 @@ module Storage
# Hooks
- # Save the storage paths before the projects are destroyed to use them on after destroy
+ # Save the storages before the projects are destroyed to use them on after destroy
def prepare_for_destroy
- old_repository_storage_paths
+ old_repository_storages
end
private
def move_repositories
- # Move the namespace directory in all storage paths used by member projects
- repository_storage_paths.each do |repository_storage_path|
+ # Move the namespace directory in all storages used by member projects
+ repository_storages.each do |repository_storage|
# Ensure old directory exists before moving it
- gitlab_shell.add_namespace(repository_storage_path, full_path_was)
+ gitlab_shell.add_namespace(repository_storage, full_path_was)
# Ensure new directory exists before moving it (if there's a parent)
- gitlab_shell.add_namespace(repository_storage_path, parent.full_path) if parent
+ gitlab_shell.add_namespace(repository_storage, parent.full_path) if parent
- unless gitlab_shell.mv_namespace(repository_storage_path, full_path_was, full_path)
+ unless gitlab_shell.mv_namespace(repository_storage, full_path_was, full_path)
- Rails.logger.error "Exception moving path #{repository_storage_path} from #{full_path_was} to #{full_path}"
+ Rails.logger.error "Exception moving path #{repository_storage} from #{full_path_was} to #{full_path}"
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
@@ -72,33 +72,33 @@ module Storage
end
end
- def old_repository_storage_paths
- @old_repository_storage_paths ||= repository_storage_paths
+ def old_repository_storages
+ @old_repository_storage_paths ||= repository_storages
end
- def repository_storage_paths
+ def repository_storages
# We need to get the storage paths for all the projects, even the ones that are
# pending delete. Unscoping also get rids of the default order, which causes
# problems with SELECT DISTINCT.
Project.unscoped do
- all_projects.select('distinct(repository_storage)').to_a.map(&:repository_storage_path)
+ all_projects.select('distinct(repository_storage)').to_a.map(&:repository_storage)
end
end
def rm_dir
# Remove the namespace directory in all storages paths used by member projects
- old_repository_storage_paths.each do |repository_storage_path|
+ old_repository_storages.each do |repository_storage|
# Move namespace directory into trash.
# We will remove it later async
new_path = "#{full_path}+#{id}+deleted"
- if gitlab_shell.mv_namespace(repository_storage_path, full_path, new_path)
+ if gitlab_shell.mv_namespace(repository_storage, full_path, new_path)
Gitlab::AppLogger.info %Q(Namespace directory "#{full_path}" moved to "#{new_path}")
# Remove namespace directroy async with delay so
# GitLab has time to remove all projects first
run_after_commit do
- GitlabShellWorker.perform_in(5.minutes, :rm_namespace, repository_storage_path, new_path)
+ GitlabShellWorker.perform_in(5.minutes, :rm_namespace, repository_storage, new_path)
end
end
end
diff --git a/app/models/concerns/time_trackable.rb b/app/models/concerns/time_trackable.rb
index 5911b56c34c..0fc321c52bc 100644
--- a/app/models/concerns/time_trackable.rb
+++ b/app/models/concerns/time_trackable.rb
@@ -51,6 +51,10 @@ module TimeTrackable
Gitlab::TimeTrackingFormatter.output(time_estimate)
end
+ def time_estimate=(val)
+ val.is_a?(Integer) ? super([val, Gitlab::Database::MAX_INT_VALUE].min) : super(val)
+ end
+
private
def reset_spent_time
diff --git a/app/models/concerns/uniquify.rb b/app/models/concerns/uniquify.rb
index a7fe5951b6e..549a76da20e 100644
--- a/app/models/concerns/uniquify.rb
+++ b/app/models/concerns/uniquify.rb
@@ -1,13 +1,21 @@
+# Uniquify
+#
+# Return a version of the given 'base' string that is unique
+# by appending a counter to it. Uniqueness is determined by
+# repeated calls to the passed block.
+#
+# You can pass an initial value for the counter, if not given
+# counting starts from 1.
+#
+# If `base` is a function/proc, we expect that calling it with a
+# candidate counter returns a string to test/return.
class Uniquify
- # Return a version of the given 'base' string that is unique
- # by appending a counter to it. Uniqueness is determined by
- # repeated calls to the passed block.
- #
- # If `base` is a function/proc, we expect that calling it with a
- # candidate counter returns a string to test/return.
+ def initialize(counter = nil)
+ @counter = counter
+ end
+
def string(base)
@base = base
- @counter = nil
increment_counter! while yield(base_string)
base_string
diff --git a/app/models/concerns/with_uploads.rb b/app/models/concerns/with_uploads.rb
new file mode 100644
index 00000000000..4245d083a49
--- /dev/null
+++ b/app/models/concerns/with_uploads.rb
@@ -0,0 +1,43 @@
+# Mounted uploaders are destroyed by carrierwave's after_commit
+# hook. This hook fetches upload location (local vs remote) from
+# Upload model. So it's neccessary to make sure that during that
+# after_commit hook model's associated uploads are not deleted yet.
+# IOW we can not use dependent: :destroy :
+# has_many :uploads, as: :model, dependent: :destroy
+#
+# And because not-mounted uploads require presence of upload's
+# object model when destroying them (FileUploader's `build_upload` method
+# references `model` on delete), we can not use after_commit hook for these
+# uploads.
+#
+# Instead FileUploads are destroyed in before_destroy hook and remaining uploads
+# are destroyed by the carrierwave's after_commit hook.
+
+module WithUploads
+ extend ActiveSupport::Concern
+
+ # Currently there is no simple way how to select only not-mounted
+ # uploads, it should be all FileUploaders so we select them by
+ # `uploader` class
+ FILE_UPLOADERS = %w(PersonalFileUploader NamespaceFileUploader FileUploader).freeze
+
+ included do
+ has_many :uploads, as: :model
+
+ before_destroy :destroy_file_uploads
+ end
+
+ # mounted uploads are deleted in carrierwave's after_commit hook,
+ # but FileUploaders which are not mounted must be deleted explicitly and
+ # it can not be done in after_commit because FileUploader requires loads
+ # associated model on destroy (which is already deleted in after_commit)
+ def destroy_file_uploads
+ self.uploads.where(uploader: FILE_UPLOADERS).find_each do |upload|
+ upload.destroy
+ end
+ end
+
+ def retrieve_upload(_identifier, paths)
+ uploads.find_by(path: paths)
+ end
+end
diff --git a/app/models/deploy_key.rb b/app/models/deploy_key.rb
index c2e0a5fa126..89a74b7dcb1 100644
--- a/app/models/deploy_key.rb
+++ b/app/models/deploy_key.rb
@@ -27,6 +27,10 @@ class DeployKey < Key
self.private?
end
+ def user
+ super || User.ghost
+ end
+
def has_access_to?(project)
deploy_keys_project_for(project).present?
end
diff --git a/app/models/deploy_token.rb b/app/models/deploy_token.rb
new file mode 100644
index 00000000000..5082dc45368
--- /dev/null
+++ b/app/models/deploy_token.rb
@@ -0,0 +1,66 @@
+class DeployToken < ActiveRecord::Base
+ include Expirable
+ include TokenAuthenticatable
+ add_authentication_token_field :token
+
+ AVAILABLE_SCOPES = %i(read_repository read_registry).freeze
+ GITLAB_DEPLOY_TOKEN_NAME = 'gitlab-deploy-token'.freeze
+
+ default_value_for(:expires_at) { Forever.date }
+
+ has_many :project_deploy_tokens, inverse_of: :deploy_token
+ has_many :projects, through: :project_deploy_tokens
+
+ validate :ensure_at_least_one_scope
+ before_save :ensure_token
+
+ accepts_nested_attributes_for :project_deploy_tokens
+
+ scope :active, -> { where("revoked = false AND expires_at >= NOW()") }
+
+ def self.gitlab_deploy_token
+ active.find_by(name: GITLAB_DEPLOY_TOKEN_NAME)
+ end
+
+ def revoke!
+ update!(revoked: true)
+ end
+
+ def active?
+ !revoked
+ end
+
+ def scopes
+ AVAILABLE_SCOPES.select { |token_scope| read_attribute(token_scope) }
+ end
+
+ def username
+ "gitlab+deploy-token-#{id}"
+ end
+
+ def has_access_to?(requested_project)
+ active? && project == requested_project
+ end
+
+ # This is temporal. Currently we limit DeployToken
+ # to a single project, later we're going to extend
+ # that to be for multiple projects and namespaces.
+ def project
+ projects.first
+ end
+
+ def expires_at
+ expires_at = read_attribute(:expires_at)
+ expires_at != Forever.date ? expires_at : nil
+ end
+
+ def expires_at=(value)
+ write_attribute(:expires_at, value.presence || Forever.date)
+ end
+
+ private
+
+ def ensure_at_least_one_scope
+ errors.add(:base, "Scopes can't be blank") unless read_repository || read_registry
+ end
+end
diff --git a/app/models/deployment.rb b/app/models/deployment.rb
index e18ea8bfea4..ac86e9e8de0 100644
--- a/app/models/deployment.rb
+++ b/app/models/deployment.rb
@@ -1,11 +1,14 @@
class Deployment < ActiveRecord::Base
- include NonatomicInternalId
+ include AtomicInternalId
+ include IidRoutes
belongs_to :project, required: true
belongs_to :environment, required: true
belongs_to :user
belongs_to :deployable, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
+ has_internal_id :iid, scope: :project, init: ->(s) { s&.project&.deployments&.maximum(:iid) }
+
validates :sha, presence: true
validates :ref, presence: true
diff --git a/app/models/diff_note.rb b/app/models/diff_note.rb
index 15122cbc693..d752d5bcdee 100644
--- a/app/models/diff_note.rb
+++ b/app/models/diff_note.rb
@@ -3,6 +3,7 @@
# A note of this type can be resolvable.
class DiffNote < Note
include NoteOnDiff
+ include Gitlab::Utils::StrongMemoize
NOTEABLE_TYPES = %w(MergeRequest Commit).freeze
@@ -12,7 +13,6 @@ class DiffNote < Note
validates :original_position, presence: true
validates :position, presence: true
- validates :diff_line, presence: true, if: :on_text?
validates :line_code, presence: true, line_code: true, if: :on_text?
validates :noteable_type, inclusion: { in: NOTEABLE_TYPES }
validate :positions_complete
@@ -23,6 +23,7 @@ class DiffNote < Note
before_validation :update_position, on: :create, if: :on_text?
before_validation :set_line_code, if: :on_text?
after_save :keep_around_commits
+ after_commit :create_diff_file, on: :create
def discussion_class(*)
DiffDiscussion
@@ -53,8 +54,25 @@ class DiffNote < Note
position.position_type == "image"
end
+ def create_diff_file
+ return unless should_create_diff_file?
+
+ diff_file = fetch_diff_file
+ diff_line = diff_file.line_for_position(self.original_position)
+
+ creation_params = diff_file.diff.to_hash
+ .except(:too_large)
+ .merge(diff: diff_file.diff_hunk(diff_line))
+
+ create_note_diff_file(creation_params)
+ end
+
def diff_file
- @diff_file ||= self.original_position.diff_file(self.project.repository)
+ strong_memoize(:diff_file) do
+ enqueue_diff_file_creation_job if should_create_diff_file?
+
+ fetch_diff_file
+ end
end
def diff_line
@@ -85,6 +103,38 @@ class DiffNote < Note
private
+ def enqueue_diff_file_creation_job
+ # Avoid enqueuing multiple file creation jobs at once for a note (i.e.
+ # parallel calls to `DiffNote#diff_file`).
+ lease = Gitlab::ExclusiveLease.new("note_diff_file_creation:#{id}", timeout: 1.hour.to_i)
+ return unless lease.try_obtain
+
+ CreateNoteDiffFileWorker.perform_async(id)
+ end
+
+ def should_create_diff_file?
+ on_text? && note_diff_file.nil? && self == discussion.first_note
+ end
+
+ def fetch_diff_file
+ if note_diff_file
+ diff = Gitlab::Git::Diff.new(note_diff_file.to_hash)
+ Gitlab::Diff::File.new(diff,
+ repository: project.repository,
+ diff_refs: original_position.diff_refs)
+ elsif created_at_diff?(noteable.diff_refs)
+ # We're able to use the already persisted diffs (Postgres) if we're
+ # presenting a "current version" of the MR discussion diff.
+ # So no need to make an extra Gitaly diff request for it.
+ # As an extra benefit, the returned `diff_file` already
+ # has `highlighted_diff_lines` data set from Redis on
+ # `Diff::FileCollection::MergeRequestDiff`.
+ noteable.diffs(paths: original_position.paths, expanded: true).diff_files.first
+ else
+ original_position.diff_file(self.project.repository)
+ end
+ end
+
def supported?
for_commit? || self.noteable.has_complete_diff_refs?
end
diff --git a/app/models/discussion.rb b/app/models/discussion.rb
index 92482a1a875..35a0ef00856 100644
--- a/app/models/discussion.rb
+++ b/app/models/discussion.rb
@@ -17,6 +17,10 @@ class Discussion
to: :first_note
+ def project_id
+ project&.id
+ end
+
def self.build(notes, context_noteable = nil)
notes.first.discussion_class(context_noteable).new(notes, context_noteable)
end
diff --git a/app/models/environment.rb b/app/models/environment.rb
index 9517723d9d9..8d523dae324 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -32,7 +32,7 @@ class Environment < ActiveRecord::Base
validates :external_url,
length: { maximum: 255 },
allow_nil: true,
- addressable_url: true
+ url: true
delegate :stop_action, :manual_actions, to: :last_deployment, allow_nil: true
@@ -224,7 +224,7 @@ class Environment < ActiveRecord::Base
end
def deployment_platform
- project.deployment_platform(environment: self)
+ project.deployment_platform(environment: self.name)
end
private
diff --git a/app/models/event.rb b/app/models/event.rb
index 17a198d52c7..ac0b1c7b27c 100644
--- a/app/models/event.rb
+++ b/app/models/event.rb
@@ -40,6 +40,7 @@ class Event < ActiveRecord::Base
).freeze
RESET_PROJECT_ACTIVITY_INTERVAL = 1.hour
+ REPOSITORY_UPDATED_AT_INTERVAL = 5.minutes
delegate :name, :email, :public_email, :username, to: :author, prefix: true, allow_nil: true
delegate :title, to: :issue, prefix: true, allow_nil: true
@@ -52,12 +53,12 @@ class Event < ActiveRecord::Base
belongs_to :target, -> {
# If the association for "target" defines an "author" association we want to
# eager-load this so Banzai & friends don't end up performing N+1 queries to
- # get the authors of notes, issues, etc.
- if reflections['events'].active_record.reflect_on_association(:author)
- includes(:author)
- else
- self
+ # get the authors of notes, issues, etc. (likewise for "noteable").
+ incs = %i(author noteable).select do |a|
+ reflections['events'].active_record.reflect_on_association(a)
end
+
+ incs.reduce(self) { |obj, a| obj.includes(a) }
}, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
has_one :push_event_payload
@@ -110,7 +111,10 @@ class Event < ActiveRecord::Base
end
end
+ # Remove this method when removing Gitlab.rails5? code.
def subclass_from_attributes(attrs)
+ return super if Gitlab.rails5?
+
# Without this Rails will keep calling this method on the returned class,
# resulting in an infinite loop.
return unless self == Event
@@ -388,6 +392,7 @@ class Event < ActiveRecord::Base
def set_last_repository_updated_at
Project.unscoped.where(id: project_id)
+ .where("last_repository_updated_at < ? OR last_repository_updated_at IS NULL", REPOSITORY_UPDATED_AT_INTERVAL.ago)
.update_all(last_repository_updated_at: created_at)
end
diff --git a/app/models/generic_commit_status.rb b/app/models/generic_commit_status.rb
index 532b8f4ad69..5ac8bde44cd 100644
--- a/app/models/generic_commit_status.rb
+++ b/app/models/generic_commit_status.rb
@@ -1,7 +1,7 @@
class GenericCommitStatus < CommitStatus
before_validation :set_default_values
- validates :target_url, addressable_url: true,
+ validates :target_url, url: true,
length: { maximum: 255 },
allow_nil: true
diff --git a/app/models/group.rb b/app/models/group.rb
index f669b1a7009..9c171de7fc3 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -9,6 +9,9 @@ class Group < Namespace
include SelectForProjectAuthorization
include LoadedInGroupList
include GroupDescendant
+ include TokenAuthenticatable
+ include WithUploads
+ include Gitlab::Utils::StrongMemoize
has_many :group_members, -> { where(requested_at: nil) }, dependent: :destroy, as: :source # rubocop:disable Cop/ActiveRecordDependent
alias_method :members, :group_members
@@ -24,13 +27,15 @@ class Group < Namespace
has_many :milestones
has_many :project_group_links, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :shared_projects, through: :project_group_links, source: :project
+
+ # Overridden on another method
+ # Left here just to be dependent: :destroy
has_many :notification_settings, dependent: :destroy, as: :source # rubocop:disable Cop/ActiveRecordDependent
+
has_many :labels, class_name: 'GroupLabel'
has_many :variables, class_name: 'Ci::GroupVariable'
has_many :custom_attributes, class_name: 'GroupCustomAttribute'
- has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
-
has_many :boards
has_many :badges, class_name: 'GroupBadge'
@@ -43,6 +48,8 @@ class Group < Namespace
validates :two_factor_grace_period, presence: true, numericality: { greater_than_or_equal_to: 0 }
+ add_authentication_token_field :runners_token
+
after_create :post_create_hook
after_destroy :post_destroy_hook
after_save :update_two_factor_requirement
@@ -53,7 +60,7 @@ class Group < Namespace
Gitlab::Database.postgresql?
end
- def sort(method)
+ def sort_by_attribute(method)
if method == 'storage_size_desc'
# storage_size is a virtual column so we need to
# pass a string to avoid AR adding the table name
@@ -86,6 +93,15 @@ class Group < Namespace
end
end
+ # Overrides notification_settings has_many association
+ # This allows to apply notification settings from parent groups
+ # to child groups and projects.
+ def notification_settings
+ source_type = self.class.base_class.name
+
+ NotificationSetting.where(source_type: source_type, source_id: self_and_ancestors_ids)
+ end
+
def to_reference(_from = nil, full: nil)
"#{self.class.reference_prefix}#{full_path}"
end
@@ -125,6 +141,10 @@ class Group < Namespace
self[:lfs_enabled]
end
+ def owned_by?(user)
+ owners.include?(user)
+ end
+
def add_users(users, access_level, current_user: nil, expires_at: nil)
GroupMember.add_users(
self,
@@ -135,13 +155,14 @@ class Group < Namespace
)
end
- def add_user(user, access_level, current_user: nil, expires_at: nil)
+ def add_user(user, access_level, current_user: nil, expires_at: nil, ldap: false)
GroupMember.add_user(
self,
user,
access_level,
current_user: current_user,
- expires_at: expires_at
+ expires_at: expires_at,
+ ldap: ldap
)
end
@@ -189,10 +210,8 @@ class Group < Namespace
owners.include?(user) && owners.size == 1
end
- def avatar_type
- unless self.avatar.image?
- self.errors.add :avatar, "only images allowed"
- end
+ def ldap_synced?
+ false
end
def post_create_hook
@@ -220,6 +239,12 @@ class Group < Namespace
members_with_parents.pluck(:user_id)
end
+ def self_and_ancestors_ids
+ strong_memoize(:self_and_ancestors_ids) do
+ self_and_ancestors.pluck(:id)
+ end
+ end
+
def members_with_parents
# Avoids an unnecessary SELECT when the group has no parents
source_ids =
@@ -240,6 +265,13 @@ class Group < Namespace
.where(source_id: self_and_descendants.reorder(nil).select(:id))
end
+ # Returns all members that are part of the group, it's subgroups, and ancestor groups
+ def direct_and_indirect_members
+ GroupMember
+ .active_without_invites_and_requests
+ .where(source_id: self_and_hierarchy.reorder(nil).select(:id))
+ end
+
def users_with_parents
User
.where(id: members_with_parents.select(:user_id))
@@ -252,6 +284,30 @@ class Group < Namespace
.reorder(nil)
end
+ # Returns all users that are members of the group because:
+ # 1. They belong to the group
+ # 2. They belong to a project that belongs to the group
+ # 3. They belong to a sub-group or project in such sub-group
+ # 4. They belong to an ancestor group
+ def direct_and_indirect_users
+ union = Gitlab::SQL::Union.new([
+ User
+ .where(id: direct_and_indirect_members.select(:user_id))
+ .reorder(nil),
+ project_users_with_descendants
+ ])
+
+ User.from("(#{union.to_sql}) #{User.table_name}")
+ end
+
+ # Returns all users that are members of projects
+ # belonging to the current group or sub-groups
+ def project_users_with_descendants
+ User
+ .joins(projects: :group)
+ .where(namespaces: { id: self_and_descendants.select(:id) })
+ end
+
def max_member_access_for_user(user)
return GroupMember::OWNER if user.admin?
@@ -292,6 +348,17 @@ class Group < Namespace
false
end
+ def refresh_project_authorizations
+ refresh_members_authorized_projects(blocking: false)
+ end
+
+ # each existing group needs to have a `runners_token`.
+ # we do this on read since migrating all existing groups is not a feasible
+ # solution.
+ def runners_token
+ ensure_runners_token!
+ end
+
private
def update_two_factor_requirement
diff --git a/app/models/hooks/project_hook.rb b/app/models/hooks/project_hook.rb
index b6dd39b860b..ec072882cc9 100644
--- a/app/models/hooks/project_hook.rb
+++ b/app/models/hooks/project_hook.rb
@@ -7,6 +7,7 @@ class ProjectHook < WebHook
:issue_hooks,
:confidential_issue_hooks,
:note_hooks,
+ :confidential_note_hooks,
:merge_request_hooks,
:job_hooks,
:pipeline_hooks,
diff --git a/app/models/hooks/system_hook.rb b/app/models/hooks/system_hook.rb
index 0528266e5b3..6bef00f26ea 100644
--- a/app/models/hooks/system_hook.rb
+++ b/app/models/hooks/system_hook.rb
@@ -11,4 +11,9 @@ class SystemHook < WebHook
default_value_for :push_events, false
default_value_for :repository_update_events, true
default_value_for :merge_requests_events, false
+
+ # Allow urls pointing localhost and the local network
+ def allow_local_requests?
+ true
+ end
end
diff --git a/app/models/hooks/web_hook.rb b/app/models/hooks/web_hook.rb
index 27729deeac9..e353abdda9c 100644
--- a/app/models/hooks/web_hook.rb
+++ b/app/models/hooks/web_hook.rb
@@ -3,7 +3,9 @@ class WebHook < ActiveRecord::Base
has_many :web_hook_logs, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
- validates :url, presence: true, url: true
+ validates :url, presence: true, public_url: { allow_localhost: lambda(&:allow_local_requests?),
+ allow_local_network: lambda(&:allow_local_requests?) }
+
validates :token, format: { without: /\n/ }
def execute(data, hook_name)
@@ -13,4 +15,9 @@ class WebHook < ActiveRecord::Base
def async_execute(data, hook_name)
WebHookService.new(self, data, hook_name).async_execute
end
+
+ # Allow urls pointing localhost and the local network
+ def allow_local_requests?
+ false
+ end
end
diff --git a/app/models/identity.rb b/app/models/identity.rb
index 1011b9f1109..3fd0c5e751d 100644
--- a/app/models/identity.rb
+++ b/app/models/identity.rb
@@ -1,12 +1,16 @@
class Identity < ActiveRecord::Base
+ def self.uniqueness_scope
+ :provider
+ end
+
include Sortable
include CaseSensitivity
belongs_to :user
validates :provider, presence: true
- validates :extern_uid, allow_blank: true, uniqueness: { scope: :provider, case_sensitive: false }
- validates :user_id, uniqueness: { scope: :provider }
+ validates :extern_uid, allow_blank: true, uniqueness: { scope: uniqueness_scope, case_sensitive: false }
+ validates :user_id, uniqueness: { scope: uniqueness_scope }
before_save :ensure_normalized_extern_uid, if: :extern_uid_changed?
after_destroy :clear_user_synced_attributes, if: :user_synced_attributes_metadata_from_provider?
diff --git a/app/models/internal_id.rb b/app/models/internal_id.rb
index cbec735c2dd..f50f28deffe 100644
--- a/app/models/internal_id.rb
+++ b/app/models/internal_id.rb
@@ -12,8 +12,9 @@
# * (Optionally) add columns to `internal_ids` if needed for scope.
class InternalId < ActiveRecord::Base
belongs_to :project
+ belongs_to :namespace
- enum usage: { issues: 0 }
+ enum usage: { issues: 0, merge_requests: 1, deployments: 2, milestones: 3, epics: 4, ci_pipelines: 5 }
validates :usage, presence: true
diff --git a/app/models/issue.rb b/app/models/issue.rb
index 7bfc45c1f43..d3df2da14e2 100644
--- a/app/models/issue.rb
+++ b/app/models/issue.rb
@@ -2,6 +2,7 @@ require 'carrierwave/orm/activerecord'
class Issue < ActiveRecord::Base
include AtomicInternalId
+ include IidRoutes
include Issuable
include Noteable
include Referable
@@ -14,15 +15,17 @@ class Issue < ActiveRecord::Base
ignore_column :assignee_id, :branch_name, :deleted_at
- DueDateStruct = Struct.new(:title, :name).freeze
- NoDueDate = DueDateStruct.new('No Due Date', '0').freeze
- AnyDueDate = DueDateStruct.new('Any Due Date', '').freeze
- Overdue = DueDateStruct.new('Overdue', 'overdue').freeze
- DueThisWeek = DueDateStruct.new('Due This Week', 'week').freeze
- DueThisMonth = DueDateStruct.new('Due This Month', 'month').freeze
+ DueDateStruct = Struct.new(:title, :name).freeze
+ NoDueDate = DueDateStruct.new('No Due Date', '0').freeze
+ AnyDueDate = DueDateStruct.new('Any Due Date', '').freeze
+ Overdue = DueDateStruct.new('Overdue', 'overdue').freeze
+ DueThisWeek = DueDateStruct.new('Due This Week', 'week').freeze
+ DueThisMonth = DueDateStruct.new('Due This Month', 'month').freeze
+ DueNextMonthAndPreviousTwoWeeks = DueDateStruct.new('Due Next Month And Previous Two Weeks', 'next_month_and_previous_two_weeks').freeze
belongs_to :project
belongs_to :moved_to, class_name: 'Issue'
+ belongs_to :closed_by, class_name: 'User'
has_internal_id :iid, scope: :project, init: ->(s) { s&.project&.issues&.maximum(:iid) }
@@ -45,18 +48,22 @@ class Issue < ActiveRecord::Base
scope :unassigned, -> { where('NOT EXISTS (SELECT TRUE FROM issue_assignees WHERE issue_id = issues.id)') }
scope :assigned_to, ->(u) { where('EXISTS (SELECT TRUE FROM issue_assignees WHERE user_id = ? AND issue_id = issues.id)', u.id)}
+ scope :with_due_date, -> { where('due_date IS NOT NULL') }
scope :without_due_date, -> { where(due_date: nil) }
scope :due_before, ->(date) { where('issues.due_date < ?', date) }
scope :due_between, ->(from_date, to_date) { where('issues.due_date >= ?', from_date).where('issues.due_date <= ?', to_date) }
+ scope :due_tomorrow, -> { where(due_date: Date.tomorrow) }
scope :order_due_date_asc, -> { reorder('issues.due_date IS NULL, issues.due_date ASC') }
scope :order_due_date_desc, -> { reorder('issues.due_date IS NULL, issues.due_date DESC') }
+ scope :order_closest_future_date, -> { reorder('CASE WHEN due_date >= CURRENT_DATE THEN 0 ELSE 1 END ASC, ABS(CURRENT_DATE - due_date) ASC') }
scope :preload_associations, -> { preload(:labels, project: :namespace) }
scope :public_only, -> { where(confidential: false) }
after_save :expire_etag_cache
+ after_save :ensure_metrics, unless: :imported?
attr_spammable :title, spam_title: true
attr_spammable :description, spam_description: true
@@ -78,6 +85,11 @@ class Issue < ActiveRecord::Base
before_transition any => :closed do |issue|
issue.closed_at = Time.zone.now
end
+
+ before_transition closed: :opened do |issue|
+ issue.closed_at = nil
+ issue.closed_by = nil
+ end
end
class << self
@@ -110,8 +122,9 @@ class Issue < ActiveRecord::Base
'project_id'
end
- def self.sort(method, excluded_labels: [])
+ def self.sort_by_attribute(method, excluded_labels: [])
case method.to_s
+ when 'closest_future_date' then order_closest_future_date
when 'due_date' then order_due_date_asc
when 'due_date_asc' then order_due_date_asc
when 'due_date_desc' then order_due_date_desc
@@ -187,6 +200,15 @@ class Issue < ActiveRecord::Base
branches_with_iid - branches_with_merge_request
end
+ def suggested_branch_name
+ return to_branch_name unless project.repository.branch_exists?(to_branch_name)
+
+ start_counting_from = 2
+ Uniquify.new(start_counting_from).string(-> (counter) { "#{to_branch_name}-#{counter}" }) do |suggested_branch_name|
+ project.repository.branch_exists?(suggested_branch_name)
+ end
+ end
+
# Returns boolean if a related branch exists for the current issue
# ignores merge requests branchs
def has_related_branch?
@@ -241,11 +263,8 @@ class Issue < ActiveRecord::Base
end
end
- def can_be_worked_on?(current_user)
- !self.closed? &&
- !self.project.forked? &&
- self.related_branches(current_user).empty? &&
- self.closed_by_merge_requests(current_user).empty?
+ def can_be_worked_on?
+ !self.closed? && !self.project.forked?
end
# Returns `true` if the current issue can be viewed by either a logged in User
@@ -266,11 +285,17 @@ class Issue < ActiveRecord::Base
def as_json(options = {})
super(options).tap do |json|
- if options.key?(:sidebar_endpoints) && project
+ if options.key?(:issue_endpoints) && project
url_helper = Gitlab::Routing.url_helpers
- json.merge!(issue_sidebar_endpoint: url_helper.project_issue_path(project, self, format: :json, serializer: 'sidebar'),
- toggle_subscription_endpoint: url_helper.toggle_subscription_project_issue_path(project, self))
+ issue_reference = options[:include_full_project_path] ? to_reference(full: true) : to_reference
+
+ json.merge!(
+ reference_path: issue_reference,
+ real_path: url_helper.project_issue_path(project, self),
+ issue_sidebar_endpoint: url_helper.project_issue_path(project, self, format: :json, serializer: 'sidebar'),
+ toggle_subscription_endpoint: url_helper.toggle_subscription_project_issue_path(project, self)
+ )
end
if options.key?(:labels)
@@ -283,6 +308,10 @@ class Issue < ActiveRecord::Base
end
end
+ def etag_caching_enabled?
+ true
+ end
+
def discussions_rendered_on_frontend?
true
end
diff --git a/app/models/label.rb b/app/models/label.rb
index de7f1d56c64..7bbcaa121ca 100644
--- a/app/models/label.rb
+++ b/app/models/label.rb
@@ -85,11 +85,16 @@ class Label < ActiveRecord::Base
(#{Project.reference_pattern})?
#{Regexp.escape(reference_prefix)}
(?:
- (?<label_id>\d+(?!\S\w)\b) | # Integer-based label ID, or
- (?<label_name>
- [A-Za-z0-9_\-\?\.&]+ | # String-based single-word label title, or
- ".+?" # String-based multi-word label surrounded in quotes
- )
+ (?<label_id>\d+(?!\S\w)\b)
+ | # Integer-based label ID, or
+ (?<label_name>
+ # String-based single-word label title, or
+ [A-Za-z0-9_\-\?\.&]+
+ (?<!\.|\?)
+ |
+ # String-based multi-word label surrounded in quotes
+ ".+?"
+ )
)
}x
end
@@ -137,6 +142,10 @@ class Label < ActiveRecord::Base
priority.try(:priority)
end
+ def priority?
+ priorities.present?
+ end
+
def template?
template
end
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index b444812a4cf..84487031ee5 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -1,15 +1,32 @@
class LfsObject < ActiveRecord::Base
+ include AfterCommitQueue
+ include ObjectStorage::BackgroundMove
+
has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :lfs_objects_projects
+ scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
+
validates :oid, presence: true, uniqueness: true
mount_uploader :file, LfsObjectUploader
+ after_save :update_file_store, if: :file_changed?
+
+ def update_file_store
+ # The file.object_store is set during `uploader.store!`
+ # which happens after object is inserted/updated
+ self.update_column(:file_store, file.object_store)
+ end
+
def project_allowed_access?(project)
projects.exists?(project.lfs_storage_project.id)
end
+ def local_store?
+ [nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
+ end
+
def self.destroy_unreferenced
joins("LEFT JOIN lfs_objects_projects ON lfs_objects_projects.lfs_object_id = #{table_name}.id")
.where(lfs_objects_projects: { id: nil })
diff --git a/app/models/list.rb b/app/models/list.rb
index 918275be142..4edcfa78835 100644
--- a/app/models/list.rb
+++ b/app/models/list.rb
@@ -2,17 +2,27 @@ class List < ActiveRecord::Base
belongs_to :board
belongs_to :label
- enum list_type: { backlog: 0, label: 1, closed: 2 }
+ enum list_type: { backlog: 0, label: 1, closed: 2, assignee: 3 }
validates :board, :list_type, presence: true
validates :label, :position, presence: true, if: :label?
validates :label_id, uniqueness: { scope: :board_id }, if: :label?
- validates :position, numericality: { only_integer: true, greater_than_or_equal_to: 0 }, if: :label?
+ validates :position, numericality: { only_integer: true, greater_than_or_equal_to: 0 }, if: :movable?
before_destroy :can_be_destroyed
- scope :destroyable, -> { where(list_type: list_types[:label]) }
- scope :movable, -> { where(list_type: list_types[:label]) }
+ scope :destroyable, -> { where(list_type: list_types.slice(*destroyable_types).values) }
+ scope :movable, -> { where(list_type: list_types.slice(*movable_types).values) }
+
+ class << self
+ def destroyable_types
+ [:label]
+ end
+
+ def movable_types
+ [:label]
+ end
+ end
def destroyable?
label?
@@ -31,7 +41,8 @@ class List < ActiveRecord::Base
if options.key?(:label)
json[:label] = label.as_json(
project: board.project,
- only: [:id, :title, :description, :color]
+ only: [:id, :title, :description, :color],
+ methods: [:text_color]
)
end
end
diff --git a/app/models/member.rb b/app/models/member.rb
index e1a32148538..68572f2e33a 100644
--- a/app/models/member.rb
+++ b/app/models/member.rb
@@ -96,7 +96,18 @@ class Member < ActiveRecord::Base
joins(:user).merge(User.search(query))
end
- def sort(method)
+ def filter_by_2fa(value)
+ case value
+ when 'enabled'
+ left_join_users.merge(User.with_two_factor_indistinct)
+ when 'disabled'
+ left_join_users.merge(User.without_two_factor)
+ else
+ all
+ end
+ end
+
+ def sort_by_attribute(method)
case method.to_s
when 'access_level_asc' then reorder(access_level: :asc)
when 'access_level_desc' then reorder(access_level: :desc)
diff --git a/app/models/members/group_member.rb b/app/models/members/group_member.rb
index 661e668dbf9..5da739f9618 100644
--- a/app/models/members/group_member.rb
+++ b/app/models/members/group_member.rb
@@ -37,20 +37,20 @@ class GroupMember < Member
private
def send_invite
- notification_service.invite_group_member(self, @raw_invite_token)
+ run_after_commit_or_now { notification_service.invite_group_member(self, @raw_invite_token) }
super
end
def post_create_hook
- notification_service.new_group_member(self)
+ run_after_commit_or_now { notification_service.new_group_member(self) }
super
end
def post_update_hook
if access_level_changed?
- notification_service.update_group_member(self)
+ run_after_commit { notification_service.update_group_member(self) }
end
super
diff --git a/app/models/members/project_member.rb b/app/models/members/project_member.rb
index 1c7ed4a96df..024106056b4 100644
--- a/app/models/members/project_member.rb
+++ b/app/models/members/project_member.rb
@@ -92,7 +92,7 @@ class ProjectMember < Member
private
def send_invite
- notification_service.invite_project_member(self, @raw_invite_token)
+ run_after_commit_or_now { notification_service.invite_project_member(self, @raw_invite_token) }
super
end
@@ -100,7 +100,7 @@ class ProjectMember < Member
def post_create_hook
unless owner?
event_service.join_project(self.project, self.user)
- notification_service.new_project_member(self)
+ run_after_commit_or_now { notification_service.new_project_member(self) }
end
super
@@ -108,7 +108,7 @@ class ProjectMember < Member
def post_update_hook
if access_level_changed?
- notification_service.update_project_member(self)
+ run_after_commit { notification_service.update_project_member(self) }
end
super
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index 7e6d89ec9c7..6c96c8ca391 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -1,5 +1,6 @@
class MergeRequest < ActiveRecord::Base
- include NonatomicInternalId
+ include AtomicInternalId
+ include IidRoutes
include Issuable
include Noteable
include Referable
@@ -18,6 +19,8 @@ class MergeRequest < ActiveRecord::Base
belongs_to :source_project, class_name: "Project"
belongs_to :merge_user, class_name: "User"
+ has_internal_id :iid, scope: :target_project, init: ->(s) { s&.target_project&.merge_requests&.maximum(:iid) }
+
has_many :merge_request_diffs
has_one :merge_request_diff,
@@ -56,6 +59,7 @@ class MergeRequest < ActiveRecord::Base
after_create :ensure_merge_request_diff, unless: :importing?
after_update :clear_memoized_shas
after_update :reload_diff_if_branch_changed
+ after_save :ensure_metrics
# When this attribute is true some MR validation is ignored
# It allows us to close or modify broken merge requests
@@ -102,24 +106,42 @@ class MergeRequest < ActiveRecord::Base
state_machine :merge_status, initial: :unchecked do
event :mark_as_unchecked do
- transition [:can_be_merged, :cannot_be_merged] => :unchecked
+ transition [:can_be_merged, :unchecked] => :unchecked
+ transition [:cannot_be_merged, :cannot_be_merged_recheck] => :cannot_be_merged_recheck
end
event :mark_as_mergeable do
- transition [:unchecked, :cannot_be_merged] => :can_be_merged
+ transition [:unchecked, :cannot_be_merged_recheck] => :can_be_merged
end
event :mark_as_unmergeable do
- transition [:unchecked, :can_be_merged] => :cannot_be_merged
+ transition [:unchecked, :cannot_be_merged_recheck] => :cannot_be_merged
end
state :unchecked
+ state :cannot_be_merged_recheck
state :can_be_merged
state :cannot_be_merged
around_transition do |merge_request, transition, block|
Gitlab::Timeless.timeless(merge_request, &block)
end
+
+ after_transition unchecked: :cannot_be_merged do |merge_request, transition|
+ begin
+ if merge_request.notify_conflict?
+ NotificationService.new.merge_request_unmergeable(merge_request)
+ TodoService.new.merge_request_became_unmergeable(merge_request)
+ end
+ rescue Gitlab::Git::CommandError
+ # Checking mergeability can trigger exception, e.g. non-utf8
+ # We ignore this type of errors.
+ end
+ end
+
+ def check_state?(merge_status)
+ [:unchecked, :cannot_be_merged_recheck].include?(merge_status.to_sym)
+ end
end
validates :source_project, presence: true, unless: [:allow_broken, :importing?, :closed_without_fork?]
@@ -321,10 +343,20 @@ class MergeRequest < ActiveRecord::Base
# updates `merge_jid` with the MergeWorker#jid.
# This helps tracking enqueued and ongoing merge jobs.
def merge_async(user_id, params)
- jid = MergeWorker.perform_async(id, user_id, params)
+ jid = MergeWorker.perform_async(id, user_id, params.to_h)
update_column(:merge_jid, jid)
end
+ def merge_participants
+ participants = [author]
+
+ if merge_when_pipeline_succeeds? && !participants.include?(merge_user)
+ participants << merge_user
+ end
+
+ participants
+ end
+
def first_commit
merge_request_diff ? merge_request_diff.first_commit : compare_commits.first
end
@@ -344,6 +376,10 @@ class MergeRequest < ActiveRecord::Base
end
end
+ def non_latest_diffs
+ merge_request_diffs.where.not(id: merge_request_diff.id)
+ end
+
def diff_size
# Calling `merge_request_diff.diffs.real_size` will also perform
# highlighting, which we don't need here.
@@ -536,18 +572,25 @@ class MergeRequest < ActiveRecord::Base
merge_request_diff(true)
end
+ def viewable_diffs
+ @viewable_diffs ||= merge_request_diffs.viewable.to_a
+ end
+
def merge_request_diff_for(diff_refs_or_sha)
- @merge_request_diffs_by_diff_refs_or_sha ||= Hash.new do |h, diff_refs_or_sha|
- diffs = merge_request_diffs.viewable
- h[diff_refs_or_sha] =
- if diff_refs_or_sha.is_a?(Gitlab::Diff::DiffRefs)
- diffs.find_by_diff_refs(diff_refs_or_sha)
- else
- diffs.find_by(head_commit_sha: diff_refs_or_sha)
- end
- end
+ matcher =
+ if diff_refs_or_sha.is_a?(Gitlab::Diff::DiffRefs)
+ {
+ 'start_commit_sha' => diff_refs_or_sha.start_sha,
+ 'head_commit_sha' => diff_refs_or_sha.head_sha,
+ 'base_commit_sha' => diff_refs_or_sha.base_sha
+ }
+ else
+ { 'head_commit_sha' => diff_refs_or_sha }
+ end
- @merge_request_diffs_by_diff_refs_or_sha[diff_refs_or_sha]
+ viewable_diffs.find do |diff|
+ diff.attributes.slice(*matcher.keys) == matcher
+ end
end
def version_params_for(diff_refs)
@@ -578,22 +621,11 @@ class MergeRequest < ActiveRecord::Base
def reload_diff(current_user = nil)
return unless open?
- old_diff_refs = self.diff_refs
- new_diff = create_merge_request_diff
-
- MergeRequests::MergeRequestDiffCacheService.new.execute(self, new_diff)
-
- new_diff_refs = self.diff_refs
-
- update_diff_discussion_positions(
- old_diff_refs: old_diff_refs,
- new_diff_refs: new_diff_refs,
- current_user: current_user
- )
+ MergeRequests::ReloadDiffsService.new(self, current_user).execute
end
def check_if_can_be_merged
- return unless unchecked? && Gitlab::Database.read_write?
+ return unless self.class.state_machines[:merge_status].check_state?(merge_status) && Gitlab::Database.read_write?
can_be_merged =
!broken? && project.repository.can_be_merged?(diff_head_sha, target_branch)
@@ -674,6 +706,10 @@ class MergeRequest < ActiveRecord::Base
should_remove_source_branch? || force_remove_source_branch?
end
+ def notify_conflict?
+ (opened? || locked?) && !project.repository.can_be_merged?(diff_head_sha, target_branch)
+ end
+
def related_notes
# Fetch comments only from last 100 commits
commits_for_notes_limit = 100
@@ -998,6 +1034,10 @@ class MergeRequest < ActiveRecord::Base
@merge_commit ||= project.commit(merge_commit_sha) if merge_commit_sha
end
+ def short_merge_commit_sha
+ Commit.truncate_sha(merge_commit_sha) if merge_commit_sha
+ end
+
def can_be_reverted?(current_user)
return false unless merge_commit
@@ -1079,6 +1119,10 @@ class MergeRequest < ActiveRecord::Base
true
end
+ def discussions_rendered_on_frontend?
+ true
+ end
+
def update_project_counter_caches
Projects::OpenMergeRequestsCountService.new(target_project).refresh_cache
end
@@ -1089,21 +1133,31 @@ class MergeRequest < ActiveRecord::Base
project.merge_requests.merged.where(author_id: author_id).empty?
end
- def allow_maintainer_to_push
- maintainer_push_possible? && super
+ # TODO: remove once production database rename completes
+ alias_attribute :allow_collaboration, :allow_maintainer_to_push
+
+ def allow_collaboration
+ collaborative_push_possible? && allow_maintainer_to_push
end
- alias_method :allow_maintainer_to_push?, :allow_maintainer_to_push
+ alias_method :allow_collaboration?, :allow_collaboration
- def maintainer_push_possible?
+ def collaborative_push_possible?
source_project.present? && for_fork? &&
target_project.visibility_level > Gitlab::VisibilityLevel::PRIVATE &&
source_project.visibility_level > Gitlab::VisibilityLevel::PRIVATE &&
!ProtectedBranch.protected?(source_project, source_branch)
end
- def can_allow_maintainer_to_push?(user)
- maintainer_push_possible? &&
+ def can_allow_collaboration?(user)
+ collaborative_push_possible? &&
Ability.allowed?(user, :push_code, source_project)
end
+
+ def squash_in_progress?
+ # The source project can be deleted
+ return false unless source_project
+
+ source_project.repository.squash_in_progress?(id)
+ end
end
diff --git a/app/models/merge_request_diff.rb b/app/models/merge_request_diff.rb
index c1c27ccf3e5..3d72c447b4b 100644
--- a/app/models/merge_request_diff.rb
+++ b/app/models/merge_request_diff.rb
@@ -3,6 +3,7 @@ class MergeRequestDiff < ActiveRecord::Base
include Importable
include ManualInverseAssociation
include IgnorableColumn
+ include EachBatch
# Don't display more than 100 commits at once
COMMITS_SAFE_SIZE = 100
@@ -17,8 +18,14 @@ class MergeRequestDiff < ActiveRecord::Base
has_many :merge_request_diff_commits, -> { order(:merge_request_diff_id, :relative_order) }
state_machine :state, initial: :empty do
+ event :clean do
+ transition any => :without_files
+ end
+
state :collected
state :overflow
+ # Diff files have been deleted by the system
+ state :without_files
# Deprecated states: these are no longer used but these values may still occur
# in the database.
state :timeout
@@ -27,6 +34,7 @@ class MergeRequestDiff < ActiveRecord::Base
state :overflow_diff_lines_limit
end
+ scope :with_files, -> { without_states(:without_files, :empty) }
scope :viewable, -> { without_state(:empty) }
scope :by_commit_sha, ->(sha) do
joins(:merge_request_diff_commits).where(merge_request_diff_commits: { sha: sha }).reorder(nil)
@@ -42,6 +50,10 @@ class MergeRequestDiff < ActiveRecord::Base
find_by(start_commit_sha: diff_refs.start_sha, head_commit_sha: diff_refs.head_sha, base_commit_sha: diff_refs.base_sha)
end
+ def viewable?
+ collected? || without_files? || overflow?
+ end
+
# Collect information about commits and diff from repository
# and save it to the database as serialized data
def save_git_content
@@ -170,6 +182,21 @@ class MergeRequestDiff < ActiveRecord::Base
end
def diffs(diff_options = nil)
+ if without_files? && comparison = diff_refs.compare_in(project)
+ # It should fetch the repository when diffs are cleaned by the system.
+ # We don't keep these for storage overload purposes.
+ # See https://gitlab.com/gitlab-org/gitlab-ce/issues/37639
+ comparison.diffs(diff_options)
+ else
+ diffs_collection(diff_options)
+ end
+ end
+
+ # Should always return the DB persisted diffs collection
+ # (e.g. Gitlab::Diff::FileCollection::MergeRequestDiff.
+ # It's useful when trying to invalidate old caches through
+ # FileCollection::MergeRequestDiff#clear_cache!
+ def diffs_collection(diff_options = nil)
Gitlab::Diff::FileCollection::MergeRequestDiff.new(self, diff_options: diff_options)
end
@@ -197,10 +224,6 @@ class MergeRequestDiff < ActiveRecord::Base
CompareService.new(project, head_commit_sha).execute(project, sha, straight: true)
end
- def commits_count
- super || merge_request_diff_commits.size
- end
-
private
def create_merge_request_diff_files(diffs)
diff --git a/app/models/merge_request_diff_commit.rb b/app/models/merge_request_diff_commit.rb
index b75387e236e..1c2e57bb01f 100644
--- a/app/models/merge_request_diff_commit.rb
+++ b/app/models/merge_request_diff_commit.rb
@@ -17,7 +17,7 @@ class MergeRequestDiffCommit < ActiveRecord::Base
commit_hash.merge(
merge_request_diff_id: merge_request_diff_id,
relative_order: index,
- sha: sha_attribute.type_cast_for_database(sha),
+ sha: sha_attribute.serialize(sha), # rubocop:disable Cop/ActiveRecordSerialize
authored_date: Gitlab::Database.sanitize_timestamp(commit_hash[:authored_date]),
committed_date: Gitlab::Database.sanitize_timestamp(commit_hash[:committed_date])
)
diff --git a/app/models/merge_request_diff_file.rb b/app/models/merge_request_diff_file.rb
index 1199ff5af22..cd8ba6b904d 100644
--- a/app/models/merge_request_diff_file.rb
+++ b/app/models/merge_request_diff_file.rb
@@ -1,5 +1,6 @@
class MergeRequestDiffFile < ActiveRecord::Base
include Gitlab::EncodingHelper
+ include DiffFile
belongs_to :merge_request_diff
@@ -12,10 +13,4 @@ class MergeRequestDiffFile < ActiveRecord::Base
def diff
binary? ? super.unpack('m0').first : super
end
-
- def to_hash
- keys = Gitlab::Git::Diff::SERIALIZE_KEYS - [:diff]
-
- as_json(only: keys).merge(diff: diff).with_indifferent_access
- end
end
diff --git a/app/models/milestone.rb b/app/models/milestone.rb
index e7d397f40f5..d05dcfd083a 100644
--- a/app/models/milestone.rb
+++ b/app/models/milestone.rb
@@ -8,7 +8,8 @@ class Milestone < ActiveRecord::Base
Started = MilestoneStruct.new('Started', '#started', -3)
include CacheMarkdownField
- include NonatomicInternalId
+ include AtomicInternalId
+ include IidRoutes
include Sortable
include Referable
include StripAttribute
@@ -21,6 +22,9 @@ class Milestone < ActiveRecord::Base
belongs_to :project
belongs_to :group
+ has_internal_id :iid, scope: :project, init: ->(s) { s&.project&.milestones&.maximum(:iid) }
+ has_internal_id :iid, scope: :group, init: ->(s) { s&.group&.milestones&.maximum(:iid) }
+
has_many :issues
has_many :labels, -> { distinct.reorder('labels.title') }, through: :issues
has_many :merge_requests
@@ -34,8 +38,8 @@ class Milestone < ActiveRecord::Base
scope :for_projects_and_groups, -> (project_ids, group_ids) do
conditions = []
- conditions << arel_table[:project_id].in(project_ids) if project_ids.compact.any?
- conditions << arel_table[:group_id].in(group_ids) if group_ids.compact.any?
+ conditions << arel_table[:project_id].in(project_ids) if project_ids&.compact&.any?
+ conditions << arel_table[:group_id].in(group_ids) if group_ids&.compact&.any?
where(conditions.reduce(:or))
end
@@ -138,7 +142,7 @@ class Milestone < ActiveRecord::Base
User.joins(assigned_issues: :milestone).where("milestones.id = ?", id).uniq
end
- def self.sort(method)
+ def self.sort_by_attribute(method)
case method.to_s
when 'due_date_asc'
reorder(Gitlab::Database.nulls_last_order('due_date', 'ASC'))
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index e350b675639..7034c633268 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -21,6 +21,9 @@ class Namespace < ActiveRecord::Base
has_many :projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :project_statistics
+ has_many :runner_namespaces, inverse_of: :namespace, class_name: 'Ci::RunnerNamespace'
+ has_many :runners, through: :runner_namespaces, source: :runner, class_name: 'Ci::Runner'
+
# This should _not_ be `inverse_of: :namespace`, because that would also set
# `user.namespace` when this user creates a group with themselves as `owner`.
belongs_to :owner, class_name: "User"
@@ -163,6 +166,13 @@ class Namespace < ActiveRecord::Base
projects.with_shared_runners.any?
end
+ # Returns all ancestors, self, and descendants of the current namespace.
+ def self_and_hierarchy
+ Gitlab::GroupHierarchy
+ .new(self.class.where(id: id))
+ .all_groups
+ end
+
# Returns all the ancestors of the current namespaces.
def ancestors
return self.class.none unless parent_id
@@ -218,6 +228,10 @@ class Namespace < ActiveRecord::Base
parent.present?
end
+ def root_ancestor
+ ancestors.reorder(nil).find_by(parent_id: nil)
+ end
+
def subgroup?
has_parent?
end
@@ -248,8 +262,8 @@ class Namespace < ActiveRecord::Base
all_projects.with_storage_feature(:repository).find_each(&:remove_exports)
end
- def features
- []
+ def refresh_project_authorizations
+ owner.refresh_authorized_projects
end
private
diff --git a/app/models/note.rb b/app/models/note.rb
index 787a80f0196..abc40d9016e 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -63,6 +63,7 @@ class Note < ActiveRecord::Base
has_many :todos
has_many :events, as: :target, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_one :system_note_metadata
+ has_one :note_diff_file, inverse_of: :diff_note, foreign_key: :diff_note_id
delegate :gfm_reference, :local_reference, to: :noteable
delegate :name, to: :project, prefix: true
@@ -100,7 +101,8 @@ class Note < ActiveRecord::Base
scope :inc_author_project, -> { includes(:project, :author) }
scope :inc_author, -> { includes(:author) }
scope :inc_relations_for_view, -> do
- includes(:project, :author, :updated_by, :resolved_by, :award_emoji, :system_note_metadata)
+ includes(:project, :author, :updated_by, :resolved_by, :award_emoji,
+ :system_note_metadata, :note_diff_file)
end
scope :diff_notes, -> { where(type: %w(LegacyDiffNote DiffNote)) }
@@ -268,6 +270,10 @@ class Note < ActiveRecord::Base
self.special_role = Note::SpecialRole::FIRST_TIME_CONTRIBUTOR
end
+ def confidential?
+ noteable.try(:confidential?)
+ end
+
def editable?
!system?
end
@@ -378,13 +384,17 @@ class Note < ActiveRecord::Base
def expire_etag_cache
return unless noteable&.discussions_rendered_on_frontend?
+ return unless noteable&.etag_caching_enabled?
+
+ Gitlab::EtagCaching::Store.new.touch(etag_key)
+ end
- key = Gitlab::Routing.url_helpers.project_noteable_notes_path(
+ def etag_key
+ Gitlab::Routing.url_helpers.project_noteable_notes_path(
project,
target_type: noteable_type.underscore,
target_id: noteable_id
)
- Gitlab::EtagCaching::Store.new.touch(key)
end
def touch(*args)
@@ -426,6 +436,10 @@ class Note < ActiveRecord::Base
super.merge(noteable: noteable)
end
+ def retrieve_upload(_identifier, paths)
+ Upload.find_by(model: self, path: paths)
+ end
+
private
def keep_around_commit
diff --git a/app/models/note_diff_file.rb b/app/models/note_diff_file.rb
new file mode 100644
index 00000000000..e688018a6d9
--- /dev/null
+++ b/app/models/note_diff_file.rb
@@ -0,0 +1,7 @@
+class NoteDiffFile < ActiveRecord::Base
+ include DiffFile
+
+ belongs_to :diff_note, inverse_of: :note_diff_file
+
+ validates :diff_note, presence: true
+end
diff --git a/app/models/notification_recipient.rb b/app/models/notification_recipient.rb
index e95655e19f8..1a03dd9df56 100644
--- a/app/models/notification_recipient.rb
+++ b/app/models/notification_recipient.rb
@@ -1,4 +1,6 @@
class NotificationRecipient
+ include Gitlab::Utils::StrongMemoize
+
attr_reader :user, :type, :reason
def initialize(user, type, **opts)
unless NotificationSetting.levels.key?(type) || type == :subscription
@@ -48,7 +50,7 @@ class NotificationRecipient
when :custom
custom_enabled? || %i[participating mention].include?(@type)
when :watch, :participating
- !excluded_watcher_action?
+ !action_excluded?
when :mention
@type == :mention
else
@@ -64,7 +66,7 @@ class NotificationRecipient
return false unless @target
return false unless @target.respond_to?(:subscriptions)
- subscription = @target.subscriptions.find_by_user_id(@user.id)
+ subscription = @target.subscriptions.find { |subscription| subscription.user_id == @user.id }
subscription && !subscription.subscribed
end
@@ -83,26 +85,35 @@ class NotificationRecipient
def has_access?
DeclarativePolicy.subject_scope do
- return false unless user.can?(:receive_notifications)
- return true if @skip_read_ability
+ break false unless user.can?(:receive_notifications)
+ break true if @skip_read_ability
- return false if @target && !user.can?(:read_cross_project)
- return false if @project && !user.can?(:read_project, @project)
+ break false if @target && !user.can?(:read_cross_project)
+ break false if @project && !user.can?(:read_project, @project)
- return true unless read_ability
- return true unless DeclarativePolicy.has_policy?(@target)
+ break true unless read_ability
+ break true unless DeclarativePolicy.has_policy?(@target)
user.can?(read_ability, @target)
end
end
+ def action_excluded?
+ excluded_watcher_action? || excluded_participating_action?
+ end
+
def excluded_watcher_action?
- return false unless @custom_action
- return false if notification_level == :custom
+ return false unless @custom_action && notification_level == :watch
NotificationSetting::EXCLUDED_WATCHER_EVENTS.include?(@custom_action)
end
+ def excluded_participating_action?
+ return false unless @custom_action && notification_level == :participating
+
+ NotificationSetting::EXCLUDED_PARTICIPATING_EVENTS.include?(@custom_action)
+ end
+
private
def read_ability
@@ -133,10 +144,33 @@ class NotificationRecipient
return project_setting unless project_setting.nil? || project_setting.global?
- group_setting = @group && user.notification_settings_for(@group)
+ group_setting = closest_non_global_group_notification_settting
- return group_setting unless group_setting.nil? || group_setting.global?
+ return group_setting unless group_setting.nil?
user.global_notification_setting
end
+
+ # Returns the notificaton_setting of the lowest group in hierarchy with non global level
+ def closest_non_global_group_notification_settting
+ return unless @group
+ return if indexed_group_notification_settings.empty?
+
+ notification_setting = nil
+
+ @group.self_and_ancestors_ids.each do |id|
+ notification_setting = indexed_group_notification_settings[id]
+ break if notification_setting
+ end
+
+ notification_setting
+ end
+
+ def indexed_group_notification_settings
+ strong_memoize(:indexed_group_notification_settings) do
+ @group.notification_settings.where(user_id: user.id)
+ .where.not(level: NotificationSetting.levels[:global])
+ .index_by(&:source_id)
+ end
+ end
end
diff --git a/app/models/notification_setting.rb b/app/models/notification_setting.rb
index 245f8dddcf9..9195408551f 100644
--- a/app/models/notification_setting.rb
+++ b/app/models/notification_setting.rb
@@ -33,6 +33,7 @@ class NotificationSetting < ActiveRecord::Base
:close_issue,
:reassign_issue,
:new_merge_request,
+ :push_to_merge_request,
:reopen_merge_request,
:close_merge_request,
:reassign_merge_request,
@@ -41,10 +42,15 @@ class NotificationSetting < ActiveRecord::Base
:success_pipeline
].freeze
- EXCLUDED_WATCHER_EVENTS = [
+ EXCLUDED_PARTICIPATING_EVENTS = [
:success_pipeline
].freeze
+ EXCLUDED_WATCHER_EVENTS = [
+ :push_to_merge_request,
+ :issue_due
+ ].push(*EXCLUDED_PARTICIPATING_EVENTS).freeze
+
def self.find_or_create_for(source)
setting = find_or_initialize_by(source: source)
diff --git a/app/models/pages_domain.rb b/app/models/pages_domain.rb
index 588bd50ed77..bfea64c3759 100644
--- a/app/models/pages_domain.rb
+++ b/app/models/pages_domain.rb
@@ -6,8 +6,10 @@ class PagesDomain < ActiveRecord::Base
validates :domain, hostname: { allow_numeric_hostname: true }
validates :domain, uniqueness: { case_sensitive: false }
- validates :certificate, certificate: true, allow_nil: true, allow_blank: true
- validates :key, certificate_key: true, allow_nil: true, allow_blank: true
+ validates :certificate, presence: { message: 'must be present if HTTPS-only is enabled' }, if: ->(domain) { domain.project&.pages_https_only? }
+ validates :certificate, certificate: true, if: ->(domain) { domain.certificate.present? }
+ validates :key, presence: { message: 'must be present if HTTPS-only is enabled' }, if: ->(domain) { domain.project&.pages_https_only? }
+ validates :key, certificate_key: true, if: ->(domain) { domain.key.present? }
validates :verification_code, presence: true, allow_blank: false
validate :validate_pages_domain
@@ -17,7 +19,7 @@ class PagesDomain < ActiveRecord::Base
attr_encrypted :key,
mode: :per_attribute_iv_and_salt,
insecure_mode: true,
- key: Gitlab::Application.secrets.db_key_base,
+ key: Settings.attr_encrypted_db_key_base,
algorithm: 'aes-256-cbc'
after_initialize :set_verification_code
@@ -46,6 +48,10 @@ class PagesDomain < ActiveRecord::Base
!Gitlab::CurrentSettings.pages_domain_verification_enabled? || enabled_until.present?
end
+ def https?
+ certificate.present?
+ end
+
def to_param
domain
end
diff --git a/app/models/personal_snippet.rb b/app/models/personal_snippet.rb
index 82c1c4de3a0..355624fd552 100644
--- a/app/models/personal_snippet.rb
+++ b/app/models/personal_snippet.rb
@@ -1,2 +1,3 @@
class PersonalSnippet < Snippet
+ include WithUploads
end
diff --git a/app/models/project.rb b/app/models/project.rb
index 250680e2a2c..d91d7dcfe9a 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -21,6 +21,11 @@ class Project < ActiveRecord::Base
include Gitlab::SQL::Pattern
include DeploymentPlatform
include ::Gitlab::Utils::StrongMemoize
+ include ChronicDurationAttribute
+ include FastDestroyAll::Helpers
+ include WithUploads
+ include BatchDestroyDependentAssociations
+ extend Gitlab::Cache::RequestCache
extend Gitlab::ConfigHelper
@@ -63,15 +68,29 @@ class Project < ActiveRecord::Base
default_value_for :only_allow_merge_if_all_discussions_are_resolved, false
add_authentication_token_field :runners_token
+
+ before_validation :mark_remote_mirrors_for_removal, if: -> { RemoteMirror.table_exists? }
+
before_save :ensure_runners_token
after_save :update_project_statistics, if: :namespace_id_changed?
+
+ after_save :create_import_state, if: ->(project) { project.import? && project.import_state.nil? }
+
after_create :create_project_feature, unless: :project_feature
+
+ after_create :create_ci_cd_settings,
+ unless: :ci_cd_settings,
+ if: proc { ProjectCiCdSetting.available? }
+
after_create :set_last_activity_at
after_create :set_last_repository_updated_at
after_update :update_forks_visibility_level
before_destroy :remove_private_deploy_keys
+
+ use_fast_destroy :build_trace_chunks
+
after_destroy -> { run_after_commit { remove_pages } }
after_destroy :remove_exports
@@ -151,6 +170,8 @@ class Project < ActiveRecord::Base
has_one :fork_network_member
has_one :fork_network, through: :fork_network_member
+ has_one :import_state, autosave: true, class_name: 'ProjectImportState', inverse_of: :project
+
# Merge Requests for target project should be removed with it
has_many :merge_requests, foreign_key: 'target_project_id'
has_many :source_of_merge_requests, foreign_key: 'source_project_id', class_name: 'MergeRequest'
@@ -199,6 +220,7 @@ class Project < ActiveRecord::Base
has_one :cluster_project, class_name: 'Clusters::Project'
has_many :clusters, through: :cluster_project, class_name: 'Clusters::Cluster'
+ has_many :cluster_ingresses, through: :clusters, source: :application_ingress, class_name: 'Clusters::Applications::Ingress'
# Container repositories need to remove data from the container registry,
# which is not managed by the DB. Hence we're still using dependent: :destroy
@@ -207,6 +229,7 @@ class Project < ActiveRecord::Base
has_many :commit_statuses
has_many :pipelines, class_name: 'Ci::Pipeline', inverse_of: :project
+ has_many :stages, class_name: 'Ci::Stage', inverse_of: :project
# Ci::Build objects store data on the file system such as artifact files and
# build traces. Currently there's no efficient way of removing this data in
@@ -214,30 +237,39 @@ class Project < ActiveRecord::Base
# still using `dependent: :destroy` here.
has_many :builds, class_name: 'Ci::Build', inverse_of: :project, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :build_trace_section_names, class_name: 'Ci::BuildTraceSectionName'
- has_many :runner_projects, class_name: 'Ci::RunnerProject'
+ has_many :build_trace_chunks, class_name: 'Ci::BuildTraceChunk', through: :builds, source: :trace_chunks
+ has_many :runner_projects, class_name: 'Ci::RunnerProject', inverse_of: :project
has_many :runners, through: :runner_projects, source: :runner, class_name: 'Ci::Runner'
has_many :variables, class_name: 'Ci::Variable'
has_many :triggers, class_name: 'Ci::Trigger'
has_many :environments
has_many :deployments
has_many :pipeline_schedules, class_name: 'Ci::PipelineSchedule'
-
- has_many :active_runners, -> { active }, through: :runner_projects, source: :runner, class_name: 'Ci::Runner'
+ has_many :project_deploy_tokens
+ has_many :deploy_tokens, through: :project_deploy_tokens
has_one :auto_devops, class_name: 'ProjectAutoDevops'
has_many :custom_attributes, class_name: 'ProjectCustomAttribute'
has_many :project_badges, class_name: 'ProjectBadge'
+ has_one :ci_cd_settings, class_name: 'ProjectCiCdSetting', inverse_of: :project, autosave: true, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+
+ has_many :remote_mirrors, inverse_of: :project
accepts_nested_attributes_for :variables, allow_destroy: true
accepts_nested_attributes_for :project_feature, update_only: true
accepts_nested_attributes_for :import_data
accepts_nested_attributes_for :auto_devops, update_only: true
+ accepts_nested_attributes_for :remote_mirrors,
+ allow_destroy: true,
+ reject_if: ->(attrs) { attrs[:id].blank? && attrs[:url].blank? }
+
delegate :name, to: :owner, allow_nil: true, prefix: true
delegate :members, to: :team, prefix: true
delegate :add_user, :add_users, to: :team
delegate :add_guest, :add_reporter, :add_developer, :add_master, :add_role, to: :team
+ delegate :group_runners_enabled, :group_runners_enabled=, :group_runners_enabled?, to: :ci_cd_settings
# Validations
validates :creator, presence: true, on: :create
@@ -259,21 +291,22 @@ class Project < ActiveRecord::Base
validates :namespace, presence: true
validates :name, uniqueness: { scope: :namespace_id }
- validates :import_url, addressable_url: true, if: :external_import?
- validates :import_url, importable_url: true, if: [:external_import?, :import_url_changed?]
+ validates :import_url, url: { protocols: %w(http https ssh git),
+ allow_localhost: false,
+ enforce_user: true,
+ ports: VALID_IMPORT_PORTS }, if: [:external_import?, :import_url_changed?]
validates :star_count, numericality: { greater_than_or_equal_to: 0 }
validate :check_limit, on: :create
validate :check_repository_path_availability, on: :update, if: ->(project) { project.renamed? }
validate :visibility_level_allowed_by_group
validate :visibility_level_allowed_as_fork
validate :check_wiki_path_conflict
+ validate :validate_pages_https_only, if: -> { changes.has_key?(:pages_https_only) }
validates :repository_storage,
presence: true,
inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } }
validates :variables, variable_duplicates: { scope: :environment_scope }
- has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
-
# Scopes
scope :pending_delete, -> { where(pending_delete: true) }
scope :without_deleted, -> { where(pending_delete: false) }
@@ -321,9 +354,21 @@ class Project < ActiveRecord::Base
scope :with_issues_enabled, -> { with_feature_enabled(:issues) }
scope :with_issues_available_for_user, ->(current_user) { with_feature_available_for_user(:issues, current_user) }
scope :with_merge_requests_enabled, -> { with_feature_enabled(:merge_requests) }
+ scope :with_remote_mirrors, -> { joins(:remote_mirrors).where(remote_mirrors: { enabled: true }).distinct }
+
+ scope :with_group_runners_enabled, -> do
+ joins(:ci_cd_settings)
+ .where(project_ci_cd_settings: { group_runners_enabled: true })
+ end
enum auto_cancel_pending_pipelines: { disabled: 0, enabled: 1 }
+ chronic_duration_attr :build_timeout_human_readable, :build_timeout, default: 3600
+
+ validates :build_timeout, allow_nil: true,
+ numericality: { greater_than_or_equal_to: 600,
+ message: 'needs to be at least 10 minutes' }
+
# Returns a collection of projects that is either public or visible to the
# logged in user.
def self.public_or_visible_to_user(user = nil)
@@ -365,55 +410,9 @@ class Project < ActiveRecord::Base
scope :abandoned, -> { where('projects.last_activity_at < ?', 6.months.ago) }
scope :excluding_project, ->(project) { where.not(id: project) }
- scope :import_started, -> { where(import_status: 'started') }
-
- state_machine :import_status, initial: :none do
- event :import_schedule do
- transition [:none, :finished, :failed] => :scheduled
- end
-
- event :force_import_start do
- transition [:none, :finished, :failed] => :started
- end
- event :import_start do
- transition scheduled: :started
- end
-
- event :import_finish do
- transition started: :finished
- end
-
- event :import_fail do
- transition [:scheduled, :started] => :failed
- end
-
- event :import_retry do
- transition failed: :started
- end
-
- state :scheduled
- state :started
- state :finished
- state :failed
-
- after_transition [:none, :finished, :failed] => :scheduled do |project, _|
- project.run_after_commit do
- job_id = add_import_job
- update(import_jid: job_id) if job_id
- end
- end
-
- after_transition started: :finished do |project, _|
- project.reset_cache_and_import_attrs
-
- if Gitlab::ImportSources.importer_names.include?(project.import_type) && project.repo_exists?
- project.run_after_commit do
- Projects::AfterImportService.new(project).execute
- end
- end
- end
- end
+ scope :joins_import_state, -> { joins("LEFT JOIN project_mirror_data import_state ON import_state.project_id = projects.id") }
+ scope :import_started, -> { joins_import_state.where("import_state.status = 'started' OR projects.import_status = 'started'") }
class << self
# Searches for a list of projects based on the query given in `query`.
@@ -435,7 +434,7 @@ class Project < ActiveRecord::Base
Gitlab::VisibilityLevel.options
end
- def sort(method)
+ def sort_by_attribute(method)
case method.to_s
when 'storage_size_desc'
# storage_size is a joined column so we need to
@@ -502,10 +501,6 @@ class Project < ActiveRecord::Base
repository.empty?
end
- def repository_storage_path
- Gitlab.config.repositories.storages[repository_storage].try(:[], 'path')
- end
-
def team
@team ||= ProjectTeam.new(self)
end
@@ -565,9 +560,7 @@ class Project < ActiveRecord::Base
def add_import_job
job_id =
if forked?
- RepositoryForkWorker.perform_async(id,
- forked_from_project.repository_storage_path,
- forked_from_project.disk_path)
+ RepositoryForkWorker.perform_async(id)
elsif gitlab_project_import?
# Do not retry on Import/Export until https://gitlab.com/gitlab-org/gitlab-ce/issues/26189 is solved.
RepositoryImportWorker.set(retry: false).perform_async(self.id)
@@ -631,7 +624,7 @@ class Project < ActiveRecord::Base
end
def create_or_update_import_data(data: nil, credentials: nil)
- return unless import_url.present? && valid_import_url?
+ return if data.nil? && credentials.nil?
project_import_data = import_data || build_import_data
if data
@@ -649,10 +642,6 @@ class Project < ActiveRecord::Base
external_import? || forked? || gitlab_project_import? || bare_repository_import?
end
- def no_import?
- import_status == 'none'
- end
-
def external_import?
import_url.present?
end
@@ -665,6 +654,105 @@ class Project < ActiveRecord::Base
import_started? || import_scheduled?
end
+ def import_state_args
+ {
+ status: self[:import_status],
+ jid: self[:import_jid],
+ last_error: self[:import_error]
+ }
+ end
+
+ def ensure_import_state(force: false)
+ return if !force && (self[:import_status] == 'none' || self[:import_status].nil?)
+ return unless import_state.nil?
+
+ if persisted?
+ create_import_state(import_state_args)
+
+ update_column(:import_status, 'none')
+ else
+ build_import_state(import_state_args)
+
+ self[:import_status] = 'none'
+ end
+ end
+
+ def human_import_status_name
+ ensure_import_state
+
+ import_state.human_status_name
+ end
+
+ def import_schedule
+ ensure_import_state(force: true)
+
+ import_state.schedule
+ end
+
+ def force_import_start
+ ensure_import_state(force: true)
+
+ import_state.force_start
+ end
+
+ def import_start
+ ensure_import_state(force: true)
+
+ import_state.start
+ end
+
+ def import_fail
+ ensure_import_state(force: true)
+
+ import_state.fail_op
+ end
+
+ def import_finish
+ ensure_import_state(force: true)
+
+ import_state.finish
+ end
+
+ def import_jid=(new_jid)
+ ensure_import_state(force: true)
+
+ import_state.jid = new_jid
+ end
+
+ def import_jid
+ ensure_import_state
+
+ import_state&.jid
+ end
+
+ def import_error=(new_error)
+ ensure_import_state(force: true)
+
+ import_state.last_error = new_error
+ end
+
+ def import_error
+ ensure_import_state
+
+ import_state&.last_error
+ end
+
+ def import_status=(new_status)
+ ensure_import_state(force: true)
+
+ import_state.status = new_status
+ end
+
+ def import_status
+ ensure_import_state
+
+ import_state&.status || 'none'
+ end
+
+ def no_import?
+ import_status == 'none'
+ end
+
def import_started?
# import? does SQL work so only run it if it looks like there's an import running
import_status == 'started' && import?
@@ -698,6 +786,37 @@ class Project < ActiveRecord::Base
import_type == 'gitea'
end
+ def has_remote_mirror?
+ remote_mirror_available? && remote_mirrors.enabled.exists?
+ end
+
+ def updating_remote_mirror?
+ remote_mirrors.enabled.started.exists?
+ end
+
+ def update_remote_mirrors
+ return unless remote_mirror_available?
+
+ remote_mirrors.enabled.each(&:sync)
+ end
+
+ def mark_stuck_remote_mirrors_as_failed!
+ remote_mirrors.stuck.update_all(
+ update_status: :failed,
+ last_error: 'The remote mirror took to long to complete.',
+ last_update_at: Time.now
+ )
+ end
+
+ def mark_remote_mirrors_for_removal
+ remote_mirrors.each(&:mark_for_delete_if_blank_url)
+ end
+
+ def remote_mirror_available?
+ remote_mirror_available_overridden ||
+ ::Gitlab::CurrentSettings.mirror_available
+ end
+
def check_limit
unless creator.can_create_project? || namespace.kind == 'group'
projects_limit = creator.projects_limit
@@ -737,6 +856,26 @@ class Project < ActiveRecord::Base
end
end
+ def pages_https_only
+ return false unless Gitlab.config.pages.external_https
+
+ super
+ end
+
+ def pages_https_only?
+ return false unless Gitlab.config.pages.external_https
+
+ super
+ end
+
+ def validate_pages_https_only
+ return unless pages_https_only?
+
+ unless pages_domains.all?(&:https?)
+ errors.add(:pages_https_only, "cannot be enabled unless all domains have TLS certificates")
+ end
+ end
+
def to_param
if persisted? && errors.include?(:path)
path_was
@@ -766,6 +905,13 @@ class Project < ActiveRecord::Base
Gitlab::Routing.url_helpers.project_url(self)
end
+ def readme_url
+ readme = repository.readme
+ if readme
+ Gitlab::Routing.url_helpers.project_blob_url(self, File.join(default_branch, readme.path))
+ end
+ end
+
def new_issuable_address(author, address_type)
return unless Gitlab::IncomingEmail.supports_issue_creation? && author
@@ -869,7 +1015,7 @@ class Project < ActiveRecord::Base
available_services_names = Service.available_services_names - exceptions
- available_services_names.map do |service_name|
+ available_services = available_services_names.map do |service_name|
service = find_service(services, service_name)
if service
@@ -886,6 +1032,14 @@ class Project < ActiveRecord::Base
end
end
end
+
+ available_services.reject do |service|
+ disabled_services.include?(service.to_param)
+ end
+ end
+
+ def disabled_services
+ []
end
def find_or_initialize_service(name)
@@ -1013,13 +1167,6 @@ class Project < ActiveRecord::Base
"#{web_url}.git"
end
- def user_can_push_to_empty_repo?(user)
- return false unless empty_repo?
- return false unless Ability.allowed?(user, :push_code, self)
-
- !ProtectedBranch.default_branch_protected? || team.max_member_access(user.id) > Gitlab::Access::DEVELOPER
- end
-
def forked?
return true if fork_network && fork_network.root_project != self
@@ -1047,6 +1194,16 @@ class Project < ActiveRecord::Base
end
end
+ # This will return all `lfs_objects` that are accessible to the project.
+ # So this might be `self.lfs_objects` if the project is not part of a fork
+ # network, or it is the base of the fork network.
+ #
+ # TODO: refactor this to get the correct lfs objects when implementing
+ # https://gitlab.com/gitlab-org/gitlab-ce/issues/39769
+ def all_lfs_objects
+ lfs_storage_project.lfs_objects
+ end
+
def personal?
!group
end
@@ -1068,7 +1225,7 @@ class Project < ActiveRecord::Base
# Check if repository already exists on disk
def check_repository_path_availability
return true if skip_disk_validation
- return false unless repository_storage_path
+ return false unless repository_storage
expires_full_path_cache # we need to clear cache to validate renames correctly
@@ -1268,28 +1425,31 @@ class Project < ActiveRecord::Base
@shared_runners ||= shared_runners_available? ? Ci::Runner.shared : Ci::Runner.none
end
- def active_shared_runners
- @active_shared_runners ||= shared_runners.active
+ def group_runners
+ @group_runners ||= group_runners_enabled? ? Ci::Runner.belonging_to_parent_group_of_project(self.id) : Ci::Runner.none
end
- def any_runners?(&block)
- active_runners.any?(&block) || active_shared_runners.any?(&block)
+ def all_runners
+ union = Gitlab::SQL::Union.new([runners, group_runners, shared_runners])
+ Ci::Runner.from("(#{union.to_sql}) ci_runners")
end
- def valid_runners_token?(token)
- self.runners_token && ActiveSupport::SecurityUtils.variable_size_secure_compare(token, self.runners_token)
+ def active_runners
+ strong_memoize(:active_runners) do
+ all_runners.active
+ end
end
- def build_timeout_in_minutes
- build_timeout / 60
+ def any_runners?(&block)
+ active_runners.any?(&block)
end
- def build_timeout_in_minutes=(value)
- self.build_timeout = value.to_i * 60
+ def valid_runners_token?(token)
+ self.runners_token && ActiveSupport::SecurityUtils.variable_size_secure_compare(token, self.runners_token)
end
- def open_issues_count
- Projects::OpenIssuesCountService.new(self).count
+ def open_issues_count(current_user = nil)
+ Projects::OpenIssuesCountService.new(self, current_user).count
end
def open_merge_requests_count
@@ -1325,20 +1485,19 @@ class Project < ActiveRecord::Base
Dir.exist?(public_pages_path)
end
- def pages_url
- subdomain, _, url_path = full_path.partition('/')
-
- # The hostname always needs to be in downcased
- # All web servers convert hostname to lowercase
- host = "#{subdomain}.#{Settings.pages.host}".downcase
-
+ def pages_group_url
# The host in URL always needs to be downcased
- url = Gitlab.config.pages.url.sub(%r{^https?://}) do |prefix|
- "#{prefix}#{subdomain}."
+ Gitlab.config.pages.url.sub(%r{^https?://}) do |prefix|
+ "#{prefix}#{pages_subdomain}."
end.downcase
+ end
+
+ def pages_url
+ url = pages_group_url
+ url_path = full_path.partition('/').last
# If the project path is the same as host, we serve it as group page
- return url if host == url_path
+ return url if url == "#{Settings.pages.protocol}://#{url_path}"
"#{url}/#{url_path}"
end
@@ -1445,7 +1604,9 @@ class Project < ActiveRecord::Base
end
def rename_repo_notify!
- send_move_instructions(full_path_was)
+ # When we import a project overwriting the original project, there
+ # is a move operation. In that case we don't want to send the instructions.
+ send_move_instructions(full_path_was) unless import_started?
expires_full_path_cache
self.old_path_with_namespace = full_path_was
@@ -1456,10 +1617,12 @@ class Project < ActiveRecord::Base
def after_import
repository.after_import
+ wiki.repository.after_import
import_finish
remove_import_jid
update_project_counter_caches
after_create_default_branch
+ refresh_markdown_cache!
end
def update_project_counter_caches
@@ -1498,13 +1661,8 @@ class Project < ActiveRecord::Base
return unless import_jid
Gitlab::SidekiqStatus.unset(import_jid)
- update_column(:import_jid, nil)
- end
- def running_or_pending_build_count(force: false)
- Rails.cache.fetch(['projects', id, 'running_or_pending_build_count'], force: force) do
- builds.running_or_pending.count(:all)
- end
+ import_state.update_column(:jid, nil)
end
# Lazy loading of the `pipeline_status` attribute
@@ -1517,15 +1675,16 @@ class Project < ActiveRecord::Base
sanitized_message = Gitlab::UrlSanitizer.sanitize(error_message)
import_fail
- update_column(:import_error, sanitized_message)
+
+ import_state.update_column(:last_error, sanitized_message)
rescue ActiveRecord::ActiveRecordError => e
Rails.logger.error("Error setting import status to failed: #{e.message}. Original error: #{sanitized_message}")
ensure
@errors = original_errors
end
- def add_export_job(current_user:, params: {})
- job_id = ProjectExportWorker.perform_async(current_user.id, self.id, params)
+ def add_export_job(current_user:, after_export_strategy: nil, params: {})
+ job_id = ProjectExportWorker.perform_async(current_user.id, self.id, after_export_strategy, params)
if job_id
Rails.logger.info "Export job started for project ID #{self.id} with job ID #{job_id}"
@@ -1551,6 +1710,8 @@ class Project < ActiveRecord::Base
def export_status
if export_in_progress?
:started
+ elsif after_export_in_progress?
+ :after_export_action
elsif export_project_path
:finished
else
@@ -1562,12 +1723,22 @@ class Project < ActiveRecord::Base
import_export_shared.active_export_count > 0
end
+ def after_export_in_progress?
+ import_export_shared.after_export_in_progress?
+ end
+
def remove_exports
return nil unless export_path.present?
FileUtils.rm_rf(export_path)
end
+ def remove_exported_project_file
+ return unless export_project_path.present?
+
+ FileUtils.rm_f(export_project_path)
+ end
+
def full_path_slug
Gitlab::Utils.slugify(full_path.to_s)
end
@@ -1593,7 +1764,7 @@ class Project < ActiveRecord::Base
def container_registry_variables
Gitlab::Ci::Variables::Collection.new.tap do |variables|
- return variables unless Gitlab.config.registry.enabled
+ break variables unless Gitlab.config.registry.enabled
variables.append(key: 'CI_REGISTRY', value: Gitlab.config.registry.host_port)
@@ -1813,24 +1984,45 @@ class Project < ActiveRecord::Base
.limit(1)
.select(1)
source_of_merge_requests.opened
- .where(allow_maintainer_to_push: true)
+ .where(allow_collaboration: true)
.where('EXISTS (?)', developer_access_exists)
end
- def branch_allows_maintainer_push?(user, branch_name)
+ def branch_allows_collaboration?(user, branch_name)
return false unless user
cache_key = "user:#{user.id}:#{branch_name}:branch_allows_push"
- memoized_results = strong_memoize(:branch_allows_maintainer_push) do
+ memoized_results = strong_memoize(:branch_allows_collaboration) do
Hash.new do |result, cache_key|
- result[cache_key] = fetch_branch_allows_maintainer_push?(user, branch_name)
+ result[cache_key] = fetch_branch_allows_collaboration?(user, branch_name)
end
end
memoized_results[cache_key]
end
+ def licensed_features
+ []
+ end
+
+ def toggle_ci_cd_settings!(settings_attribute)
+ ci_cd_settings.toggle!(settings_attribute)
+ end
+
+ def gitlab_deploy_token
+ @gitlab_deploy_token ||= deploy_tokens.gitlab_deploy_token
+ end
+
+ def any_lfs_file_locks?
+ lfs_file_locks.any?
+ end
+ request_cache(:any_lfs_file_locks?) { self.id }
+
+ def auto_cancel_pending_pipelines?
+ auto_cancel_pending_pipelines == 'enabled'
+ end
+
private
def storage
@@ -1859,14 +2051,14 @@ class Project < ActiveRecord::Base
def check_repository_absence!
return if skip_disk_validation
- if repository_storage_path.blank? || repository_with_same_path_already_exists?
+ if repository_storage.blank? || repository_with_same_path_already_exists?
errors.add(:base, 'There is already a repository with that name on disk')
throw :abort
end
end
def repository_with_same_path_already_exists?
- gitlab_shell.exists?(repository_storage_path, "#{disk_path}.git")
+ gitlab_shell.exists?(repository_storage, "#{disk_path}.git")
end
# set last_activity_at to the same as created_at
@@ -1954,17 +2146,22 @@ class Project < ActiveRecord::Base
raise ex
end
- def fetch_branch_allows_maintainer_push?(user, branch_name)
+ def fetch_branch_allows_collaboration?(user, branch_name)
check_access = -> do
- merge_request = source_of_merge_requests.opened
- .where(allow_maintainer_to_push: true)
- .find_by(source_branch: branch_name)
+ next false if empty_repo?
+
+ merge_requests = source_of_merge_requests.opened
+ .where(allow_collaboration: true)
- merge_request&.can_be_merged_by?(user)
+ if branch_name
+ merge_requests.find_by(source_branch: branch_name)&.can_be_merged_by?(user)
+ else
+ merge_requests.any? { |merge_request| merge_request.can_be_merged_by?(user) }
+ end
end
if RequestStore.active?
- RequestStore.fetch("project-#{id}:branch-#{branch_name}:user-#{user.id}:branch_allows_maintainer_push") do
+ RequestStore.fetch("project-#{id}:branch-#{branch_name}:user-#{user.id}:branch_allows_collaboration") do
check_access.call
end
else
diff --git a/app/models/project_auto_devops.rb b/app/models/project_auto_devops.rb
index ed6c1eddbc1..faa831b1949 100644
--- a/app/models/project_auto_devops.rb
+++ b/app/models/project_auto_devops.rb
@@ -1,11 +1,18 @@
class ProjectAutoDevops < ActiveRecord::Base
belongs_to :project
+ enum deploy_strategy: {
+ continuous: 0,
+ manual: 1
+ }
+
scope :enabled, -> { where(enabled: true) }
scope :disabled, -> { where(enabled: false) }
validates :domain, allow_blank: true, hostname: { allow_numeric_hostname: true }
+ after_save :create_gitlab_deploy_token, if: :needs_to_create_deploy_token?
+
def instance_domain
Gitlab::CurrentSettings.auto_devops_domain
end
@@ -20,6 +27,30 @@ class ProjectAutoDevops < ActiveRecord::Base
variables.append(key: 'AUTO_DEVOPS_DOMAIN',
value: domain.presence || instance_domain)
end
+
+ if manual?
+ variables.append(key: 'STAGING_ENABLED', value: '1')
+ variables.append(key: 'INCREMENTAL_ROLLOUT_ENABLED', value: '1')
+ end
end
end
+
+ private
+
+ def create_gitlab_deploy_token
+ project.deploy_tokens.create!(
+ name: DeployToken::GITLAB_DEPLOY_TOKEN_NAME,
+ read_registry: true
+ )
+ end
+
+ def needs_to_create_deploy_token?
+ auto_devops_enabled? &&
+ !project.public? &&
+ !project.deploy_tokens.find_by(name: DeployToken::GITLAB_DEPLOY_TOKEN_NAME).present?
+ end
+
+ def auto_devops_enabled?
+ Gitlab::CurrentSettings.auto_devops_enabled? || enabled?
+ end
end
diff --git a/app/models/project_ci_cd_setting.rb b/app/models/project_ci_cd_setting.rb
new file mode 100644
index 00000000000..588cced5781
--- /dev/null
+++ b/app/models/project_ci_cd_setting.rb
@@ -0,0 +1,16 @@
+class ProjectCiCdSetting < ActiveRecord::Base
+ belongs_to :project, inverse_of: :ci_cd_settings
+
+ # The version of the schema that first introduced this model/table.
+ MINIMUM_SCHEMA_VERSION = 20180403035759
+
+ def self.available?
+ @available ||=
+ ActiveRecord::Migrator.current_version >= MINIMUM_SCHEMA_VERSION
+ end
+
+ def self.reset_column_information
+ @available = nil
+ super
+ end
+end
diff --git a/app/models/project_deploy_token.rb b/app/models/project_deploy_token.rb
new file mode 100644
index 00000000000..ab4482f0c0b
--- /dev/null
+++ b/app/models/project_deploy_token.rb
@@ -0,0 +1,8 @@
+class ProjectDeployToken < ActiveRecord::Base
+ belongs_to :project
+ belongs_to :deploy_token, inverse_of: :project_deploy_tokens
+
+ validates :deploy_token, presence: true
+ validates :project, presence: true
+ validates :deploy_token_id, uniqueness: { scope: [:project_id] }
+end
diff --git a/app/models/project_import_data.rb b/app/models/project_import_data.rb
index 6da6632f4f2..1d7089ccfc7 100644
--- a/app/models/project_import_data.rb
+++ b/app/models/project_import_data.rb
@@ -3,7 +3,7 @@ require 'carrierwave/orm/activerecord'
class ProjectImportData < ActiveRecord::Base
belongs_to :project, inverse_of: :import_data
attr_encrypted :credentials,
- key: Gitlab::Application.secrets.db_key_base,
+ key: Settings.attr_encrypted_db_key_base,
marshal: true,
encode: true,
mode: :per_attribute_iv_and_salt,
diff --git a/app/models/project_import_state.rb b/app/models/project_import_state.rb
new file mode 100644
index 00000000000..1605317ae14
--- /dev/null
+++ b/app/models/project_import_state.rb
@@ -0,0 +1,55 @@
+class ProjectImportState < ActiveRecord::Base
+ include AfterCommitQueue
+
+ self.table_name = "project_mirror_data"
+
+ belongs_to :project, inverse_of: :import_state
+
+ validates :project, presence: true
+
+ state_machine :status, initial: :none do
+ event :schedule do
+ transition [:none, :finished, :failed] => :scheduled
+ end
+
+ event :force_start do
+ transition [:none, :finished, :failed] => :started
+ end
+
+ event :start do
+ transition scheduled: :started
+ end
+
+ event :finish do
+ transition started: :finished
+ end
+
+ event :fail_op do
+ transition [:scheduled, :started] => :failed
+ end
+
+ state :scheduled
+ state :started
+ state :finished
+ state :failed
+
+ after_transition [:none, :finished, :failed] => :scheduled do |state, _|
+ state.run_after_commit do
+ job_id = project.add_import_job
+ update(jid: job_id) if job_id
+ end
+ end
+
+ after_transition started: :finished do |state, _|
+ project = state.project
+
+ project.reset_cache_and_import_attrs
+
+ if Gitlab::ImportSources.importer_names.include?(project.import_type) && project.repo_exists?
+ state.run_after_commit do
+ Projects::AfterImportService.new(project).execute
+ end
+ end
+ end
+ end
+end
diff --git a/app/models/project_services/bamboo_service.rb b/app/models/project_services/bamboo_service.rb
index 54e4b3278db..7f4c47a6d14 100644
--- a/app/models/project_services/bamboo_service.rb
+++ b/app/models/project_services/bamboo_service.rb
@@ -3,7 +3,7 @@ class BambooService < CiService
prop_accessor :bamboo_url, :build_key, :username, :password
- validates :bamboo_url, presence: true, url: true, if: :activated?
+ validates :bamboo_url, presence: true, public_url: true, if: :activated?
validates :build_key, presence: true, if: :activated?
validates :username,
presence: true,
diff --git a/app/models/project_services/bugzilla_service.rb b/app/models/project_services/bugzilla_service.rb
index 046e2809f45..e4e3a80976b 100644
--- a/app/models/project_services/bugzilla_service.rb
+++ b/app/models/project_services/bugzilla_service.rb
@@ -1,5 +1,5 @@
class BugzillaService < IssueTrackerService
- validates :project_url, :issues_url, :new_issue_url, presence: true, url: true, if: :activated?
+ validates :project_url, :issues_url, :new_issue_url, presence: true, public_url: true, if: :activated?
prop_accessor :title, :description, :project_url, :issues_url, :new_issue_url
diff --git a/app/models/project_services/buildkite_service.rb b/app/models/project_services/buildkite_service.rb
index d2aaff8817a..35884c4560c 100644
--- a/app/models/project_services/buildkite_service.rb
+++ b/app/models/project_services/buildkite_service.rb
@@ -8,7 +8,7 @@ class BuildkiteService < CiService
prop_accessor :project_url, :token
boolean_accessor :enable_ssl_verification
- validates :project_url, presence: true, url: true, if: :activated?
+ validates :project_url, presence: true, public_url: true, if: :activated?
validates :token, presence: true, if: :activated?
after_save :compose_service_hook, if: :activated?
diff --git a/app/models/project_services/chat_message/base_message.rb b/app/models/project_services/chat_message/base_message.rb
index 22a65b5145e..f710fa85b5d 100644
--- a/app/models/project_services/chat_message/base_message.rb
+++ b/app/models/project_services/chat_message/base_message.rb
@@ -26,13 +26,18 @@ module ChatMessage
end
end
- def pretext
+ def summary
return message if markdown
format(message)
end
+ def pretext
+ summary
+ end
+
def fallback
+ format(message)
end
def attachments
diff --git a/app/models/project_services/chat_message/pipeline_message.rb b/app/models/project_services/chat_message/pipeline_message.rb
index 2135122278a..96fd23aede3 100644
--- a/app/models/project_services/chat_message/pipeline_message.rb
+++ b/app/models/project_services/chat_message/pipeline_message.rb
@@ -23,10 +23,6 @@ module ChatMessage
''
end
- def fallback
- format(message)
- end
-
def attachments
return message if markdown
diff --git a/app/models/project_services/chat_notification_service.rb b/app/models/project_services/chat_notification_service.rb
index dab0ea1a681..a60b4c7fd0d 100644
--- a/app/models/project_services/chat_notification_service.rb
+++ b/app/models/project_services/chat_notification_service.rb
@@ -8,7 +8,7 @@ class ChatNotificationService < Service
prop_accessor :webhook, :username, :channel
boolean_accessor :notify_only_broken_pipelines, :notify_only_default_branch
- validates :webhook, presence: true, url: true, if: :activated?
+ validates :webhook, presence: true, public_url: true, if: :activated?
def initialize_properties
# Custom serialized properties initialization
@@ -21,8 +21,16 @@ class ChatNotificationService < Service
end
end
+ def confidential_issue_channel
+ properties['confidential_issue_channel'].presence || properties['issue_channel']
+ end
+
+ def confidential_note_channel
+ properties['confidential_note_channel'].presence || properties['note_channel']
+ end
+
def self.supported_events
- %w[push issue confidential_issue merge_request note tag_push
+ %w[push issue confidential_issue merge_request note confidential_note tag_push
pipeline wiki_page]
end
@@ -55,7 +63,9 @@ class ChatNotificationService < Service
return false unless message
- channel_name = get_channel_field(object_kind).presence || channel
+ event_type = data[:event_type] || object_kind
+
+ channel_name = get_channel_field(event_type).presence || channel
opts = {}
opts[:channel] = channel_name if channel_name
@@ -145,6 +155,7 @@ class ChatNotificationService < Service
end
def notify_for_ref?(data)
+ return true if data[:object_kind] == 'tag_push'
return true if data.dig(:object_attributes, :tag)
return true unless notify_only_default_branch?
diff --git a/app/models/project_services/custom_issue_tracker_service.rb b/app/models/project_services/custom_issue_tracker_service.rb
index b9e3e982b64..456c7f5cee2 100644
--- a/app/models/project_services/custom_issue_tracker_service.rb
+++ b/app/models/project_services/custom_issue_tracker_service.rb
@@ -1,5 +1,5 @@
class CustomIssueTrackerService < IssueTrackerService
- validates :project_url, :issues_url, :new_issue_url, presence: true, url: true, if: :activated?
+ validates :project_url, :issues_url, :new_issue_url, presence: true, public_url: true, if: :activated?
prop_accessor :title, :description, :project_url, :issues_url, :new_issue_url
diff --git a/app/models/project_services/drone_ci_service.rb b/app/models/project_services/drone_ci_service.rb
index 71b10fc6bc1..ab4e46da89f 100644
--- a/app/models/project_services/drone_ci_service.rb
+++ b/app/models/project_services/drone_ci_service.rb
@@ -4,7 +4,7 @@ class DroneCiService < CiService
prop_accessor :drone_url, :token
boolean_accessor :enable_ssl_verification
- validates :drone_url, presence: true, url: true, if: :activated?
+ validates :drone_url, presence: true, public_url: true, if: :activated?
validates :token, presence: true, if: :activated?
after_save :compose_service_hook, if: :activated?
@@ -115,6 +115,6 @@ class DroneCiService < CiService
def merge_request_valid?(data)
data[:object_attributes][:state] == 'opened' &&
- data[:object_attributes][:merge_status] == 'unchecked'
+ MergeRequest.state_machines[:merge_status].check_state?(data[:object_attributes][:merge_status])
end
end
diff --git a/app/models/project_services/external_wiki_service.rb b/app/models/project_services/external_wiki_service.rb
index 1553f169827..a4b1ef09e93 100644
--- a/app/models/project_services/external_wiki_service.rb
+++ b/app/models/project_services/external_wiki_service.rb
@@ -1,7 +1,7 @@
class ExternalWikiService < Service
prop_accessor :external_wiki_url
- validates :external_wiki_url, presence: true, url: true, if: :activated?
+ validates :external_wiki_url, presence: true, public_url: true, if: :activated?
def title
'External Wiki'
diff --git a/app/models/project_services/flowdock_service.rb b/app/models/project_services/flowdock_service.rb
index 4d23a17a545..da01ac1b7cf 100644
--- a/app/models/project_services/flowdock_service.rb
+++ b/app/models/project_services/flowdock_service.rb
@@ -1,5 +1,51 @@
require "flowdock-git-hook"
+# Flow dock depends on Grit to compute the number of commits between two given
+# commits. To make this depend on Gitaly, a monkey patch is applied
+module Flowdock
+ class Git
+ # pass down a Repository all the way down
+ def repo
+ @options[:repo]
+ end
+
+ def config
+ {}
+ end
+
+ def messages
+ Git::Builder.new(repo: repo,
+ ref: @ref,
+ before: @from,
+ after: @to,
+ commit_url: @commit_url,
+ branch_url: @branch_url,
+ diff_url: @diff_url,
+ repo_url: @repo_url,
+ repo_name: @repo_name,
+ permanent_refs: @permanent_refs,
+ tags: tags
+ ).to_hashes
+ end
+
+ class Builder
+ def commits
+ @repo.commits_between(@before, @after).map do |commit|
+ {
+ url: @opts[:commit_url] ? @opts[:commit_url] % [commit.sha] : nil,
+ id: commit.sha,
+ message: commit.message,
+ author: {
+ name: commit.author_name,
+ email: commit.author_email
+ }
+ }
+ end
+ end
+ end
+ end
+end
+
class FlowdockService < Service
prop_accessor :token
validates :token, presence: true, if: :activated?
@@ -34,7 +80,7 @@ class FlowdockService < Service
data[:before],
data[:after],
token: token,
- repo: project.repository.path_to_repo,
+ repo: project.repository,
repo_url: "#{Gitlab.config.gitlab.url}/#{project.full_path}",
commit_url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/commit/%s",
diff_url: "#{Gitlab.config.gitlab.url}/#{project.full_path}/compare/%s...%s"
diff --git a/app/models/project_services/gemnasium_service.rb b/app/models/project_services/gemnasium_service.rb
index 017a9b2df6e..8a6b0ed1a5f 100644
--- a/app/models/project_services/gemnasium_service.rb
+++ b/app/models/project_services/gemnasium_service.rb
@@ -3,6 +3,7 @@ require "gemnasium/gitlab_service"
class GemnasiumService < Service
prop_accessor :token, :api_key
validates :token, :api_key, presence: true, if: :activated?
+ validate :deprecation_validation
def title
'Gemnasium'
@@ -27,16 +28,33 @@ class GemnasiumService < Service
%w(push)
end
+ def deprecated?
+ true
+ end
+
+ def deprecation_message
+ "Gemnasium has been acquired by GitLab in January 2018. Since May 15, 2018, the service provided by Gemnasium is no longer available."
+ end
+
+ def deprecation_validation
+ errors[:base] << deprecation_message
+ end
+
def execute(data)
return unless supported_events.include?(data[:object_kind])
+ # Gitaly: this class will be removed https://gitlab.com/gitlab-org/gitlab-ee/issues/6010
+ repo_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ project.repository.path_to_repo
+ end
+
Gemnasium::GitlabService.execute(
ref: data[:ref],
before: data[:before],
after: data[:after],
token: token,
api_key: api_key,
- repo: project.repository.path_to_repo
+ repo: repo_path
)
end
end
diff --git a/app/models/project_services/gitlab_issue_tracker_service.rb b/app/models/project_services/gitlab_issue_tracker_service.rb
index 88c428b4aae..16e32a4139e 100644
--- a/app/models/project_services/gitlab_issue_tracker_service.rb
+++ b/app/models/project_services/gitlab_issue_tracker_service.rb
@@ -1,7 +1,7 @@
class GitlabIssueTrackerService < IssueTrackerService
include Gitlab::Routing
- validates :project_url, :issues_url, :new_issue_url, presence: true, url: true, if: :activated?
+ validates :project_url, :issues_url, :new_issue_url, presence: true, public_url: true, if: :activated?
prop_accessor :title, :description, :project_url, :issues_url, :new_issue_url
diff --git a/app/models/project_services/hipchat_service.rb b/app/models/project_services/hipchat_service.rb
index f31c3f02af2..dce878e485f 100644
--- a/app/models/project_services/hipchat_service.rb
+++ b/app/models/project_services/hipchat_service.rb
@@ -46,7 +46,7 @@ class HipchatService < Service
end
def self.supported_events
- %w(push issue confidential_issue merge_request note tag_push pipeline)
+ %w(push issue confidential_issue merge_request note confidential_note tag_push pipeline)
end
def execute(data)
diff --git a/app/models/project_services/jira_service.rb b/app/models/project_services/jira_service.rb
index ed4bbfb6cfc..412d62388f0 100644
--- a/app/models/project_services/jira_service.rb
+++ b/app/models/project_services/jira_service.rb
@@ -3,8 +3,8 @@ class JiraService < IssueTrackerService
include ApplicationHelper
include ActionView::Helpers::AssetUrlHelper
- validates :url, url: true, presence: true, if: :activated?
- validates :api_url, url: true, allow_blank: true
+ validates :url, public_url: true, presence: true, if: :activated?
+ validates :api_url, public_url: true, allow_blank: true
validates :username, presence: true, if: :activated?
validates :password, presence: true, if: :activated?
@@ -265,7 +265,7 @@ class JiraService < IssueTrackerService
title: title,
status: status,
icon: {
- title: 'GitLab', url16x16: asset_url('favicon.ico', host: gitlab_config.url)
+ title: 'GitLab', url16x16: asset_url(Gitlab::Favicon.main, host: gitlab_config.url)
}
}
}
diff --git a/app/models/project_services/kubernetes_service.rb b/app/models/project_services/kubernetes_service.rb
index 20fed432e55..ddd4026019b 100644
--- a/app/models/project_services/kubernetes_service.rb
+++ b/app/models/project_services/kubernetes_service.rb
@@ -24,7 +24,7 @@ class KubernetesService < DeploymentService
prop_accessor :ca_pem
with_options presence: true, if: :activated? do
- validates :api_url, url: true
+ validates :api_url, public_url: true
validates :token
end
diff --git a/app/models/project_services/microsoft_teams_service.rb b/app/models/project_services/microsoft_teams_service.rb
index 2facff53e26..99500caec0e 100644
--- a/app/models/project_services/microsoft_teams_service.rb
+++ b/app/models/project_services/microsoft_teams_service.rb
@@ -44,7 +44,7 @@ class MicrosoftTeamsService < ChatNotificationService
def notify(message, opts)
MicrosoftTeams::Notifier.new(webhook).ping(
title: message.project_name,
- pretext: message.pretext,
+ summary: message.summary,
activity: message.activity,
attachments: message.attachments
)
diff --git a/app/models/project_services/mock_ci_service.rb b/app/models/project_services/mock_ci_service.rb
index 2221459c90b..b89dc07a73e 100644
--- a/app/models/project_services/mock_ci_service.rb
+++ b/app/models/project_services/mock_ci_service.rb
@@ -3,7 +3,7 @@ class MockCiService < CiService
ALLOWED_STATES = %w[failed canceled running pending success success_with_warnings skipped not_found].freeze
prop_accessor :mock_service_url
- validates :mock_service_url, presence: true, url: true, if: :activated?
+ validates :mock_service_url, presence: true, public_url: true, if: :activated?
def title
'MockCI'
diff --git a/app/models/project_services/prometheus_service.rb b/app/models/project_services/prometheus_service.rb
index dcaeb65dc32..df4254e0523 100644
--- a/app/models/project_services/prometheus_service.rb
+++ b/app/models/project_services/prometheus_service.rb
@@ -6,7 +6,7 @@ class PrometheusService < MonitoringService
boolean_accessor :manual_configuration
with_options presence: true, if: :manual_configuration? do
- validates :api_url, url: true
+ validates :api_url, public_url: true
end
before_save :synchronize_service_state
diff --git a/app/models/project_services/redmine_service.rb b/app/models/project_services/redmine_service.rb
index 6acf611eba5..3721093a6d1 100644
--- a/app/models/project_services/redmine_service.rb
+++ b/app/models/project_services/redmine_service.rb
@@ -1,5 +1,5 @@
class RedmineService < IssueTrackerService
- validates :project_url, :issues_url, :new_issue_url, presence: true, url: true, if: :activated?
+ validates :project_url, :issues_url, :new_issue_url, presence: true, public_url: true, if: :activated?
prop_accessor :title, :description, :project_url, :issues_url, :new_issue_url
diff --git a/app/models/project_services/teamcity_service.rb b/app/models/project_services/teamcity_service.rb
index 145313b8e71..802678147cf 100644
--- a/app/models/project_services/teamcity_service.rb
+++ b/app/models/project_services/teamcity_service.rb
@@ -3,7 +3,7 @@ class TeamcityService < CiService
prop_accessor :teamcity_url, :build_type, :username, :password
- validates :teamcity_url, presence: true, url: true, if: :activated?
+ validates :teamcity_url, presence: true, public_url: true, if: :activated?
validates :build_type, presence: true, if: :activated?
validates :username,
presence: true,
diff --git a/app/models/project_statistics.rb b/app/models/project_statistics.rb
index 87a4350f022..5d4e3c34b39 100644
--- a/app/models/project_statistics.rb
+++ b/app/models/project_statistics.rb
@@ -4,15 +4,15 @@ class ProjectStatistics < ActiveRecord::Base
before_save :update_storage_size
- STORAGE_COLUMNS = [:repository_size, :lfs_objects_size, :build_artifacts_size].freeze
- STATISTICS_COLUMNS = [:commit_count] + STORAGE_COLUMNS
+ COLUMNS_TO_REFRESH = [:repository_size, :lfs_objects_size, :commit_count].freeze
+ INCREMENTABLE_COLUMNS = [:build_artifacts_size].freeze
def total_repository_size
repository_size + lfs_objects_size
end
def refresh!(only: nil)
- STATISTICS_COLUMNS.each do |column, generator|
+ COLUMNS_TO_REFRESH.each do |column, generator|
if only.blank? || only.include?(column)
public_send("update_#{column}") # rubocop:disable GitlabSecurity/PublicSend
end
@@ -34,13 +34,15 @@ class ProjectStatistics < ActiveRecord::Base
self.lfs_objects_size = project.lfs_objects.sum(:size)
end
- def update_build_artifacts_size
- self.build_artifacts_size =
- project.builds.sum(:artifacts_size) +
- Ci::JobArtifact.artifacts_size_for(self.project)
+ def update_storage_size
+ self.storage_size = repository_size + lfs_objects_size + build_artifacts_size
end
- def update_storage_size
- self.storage_size = STORAGE_COLUMNS.sum(&method(:read_attribute))
+ def self.increment_statistic(project_id, key, amount)
+ raise ArgumentError, "Cannot increment attribute: #{key}" unless key.in?(INCREMENTABLE_COLUMNS)
+ return if amount == 0
+
+ where(project_id: project_id)
+ .update_all(["#{key} = COALESCE(#{key}, 0) + (?)", amount])
end
end
diff --git a/app/models/project_team.rb b/app/models/project_team.rb
index 33280eda0b9..9a38806baab 100644
--- a/app/models/project_team.rb
+++ b/app/models/project_team.rb
@@ -24,7 +24,7 @@ class ProjectTeam
end
def add_role(user, role, current_user: nil)
- send(:"add_#{role}", user, current_user: current_user) # rubocop:disable GitlabSecurity/PublicSend
+ public_send(:"add_#{role}", user, current_user: current_user) # rubocop:disable GitlabSecurity/PublicSend
end
def find_member(user_id)
diff --git a/app/models/project_wiki.rb b/app/models/project_wiki.rb
index e70b7dc2325..05b07804ea8 100644
--- a/app/models/project_wiki.rb
+++ b/app/models/project_wiki.rb
@@ -24,7 +24,7 @@ class ProjectWiki
end
delegate :empty?, to: :pages
- delegate :repository_storage_path, :hashed_storage?, to: :project
+ delegate :repository_storage, :hashed_storage?, to: :project
def path
@project.path + '.wiki'
@@ -147,10 +147,6 @@ class ProjectWiki
[title, title_array.join("/")]
end
- def search_files(query)
- repository.search_files_by_content(query, default_branch)
- end
-
def repository
@repository ||= Repository.new(full_path, @project, disk_path: disk_path, is_wiki: true)
end
@@ -186,7 +182,11 @@ class ProjectWiki
def commit_details(action, message = nil, title = nil)
commit_message = message || default_message(action, title)
- Gitlab::Git::Wiki::CommitDetails.new(@user.name, @user.email, commit_message)
+ Gitlab::Git::Wiki::CommitDetails.new(@user.id,
+ @user.username,
+ @user.name,
+ @user.email,
+ commit_message)
end
def default_message(action, title)
diff --git a/app/models/protected_branch.rb b/app/models/protected_branch.rb
index 609780c5587..dff99cfca35 100644
--- a/app/models/protected_branch.rb
+++ b/app/models/protected_branch.rb
@@ -4,6 +4,15 @@ class ProtectedBranch < ActiveRecord::Base
protected_ref_access_levels :merge, :push
+ def self.protected_ref_accessible_to?(ref, user, project:, action:, protected_refs: nil)
+ # Maintainers, owners and admins are allowed to create the default branch
+ if default_branch_protected? && project.empty_repo?
+ return true if user.admin? || project.team.max_member_access(user.id) > Gitlab::Access::DEVELOPER
+ end
+
+ super
+ end
+
# Check if branch name is marked as protected in the system
def self.protected?(project, ref_name)
return true if project.empty_repo? && default_branch_protected?
diff --git a/app/models/redirect_route.rb b/app/models/redirect_route.rb
index 20532527346..31de204d824 100644
--- a/app/models/redirect_route.rb
+++ b/app/models/redirect_route.rb
@@ -17,32 +17,4 @@ class RedirectRoute < ActiveRecord::Base
where(wheres, path, "#{sanitize_sql_like(path)}/%")
end
-
- scope :permanent, -> do
- if column_permanent_exists?
- where(permanent: true)
- else
- none
- end
- end
-
- scope :temporary, -> do
- if column_permanent_exists?
- where(permanent: [false, nil])
- else
- all
- end
- end
-
- default_value_for :permanent, false
-
- def permanent=(value)
- if self.class.column_permanent_exists?
- super
- end
- end
-
- def self.column_permanent_exists?
- ActiveRecord::Base.connection.column_exists?(:redirect_routes, :permanent)
- end
end
diff --git a/app/models/remote_mirror.rb b/app/models/remote_mirror.rb
new file mode 100644
index 00000000000..c4b5dd2dc96
--- /dev/null
+++ b/app/models/remote_mirror.rb
@@ -0,0 +1,218 @@
+class RemoteMirror < ActiveRecord::Base
+ include AfterCommitQueue
+
+ PROTECTED_BACKOFF_DELAY = 1.minute
+ UNPROTECTED_BACKOFF_DELAY = 5.minutes
+
+ attr_encrypted :credentials,
+ key: Settings.attr_encrypted_db_key_base,
+ marshal: true,
+ encode: true,
+ mode: :per_attribute_iv_and_salt,
+ insecure_mode: true,
+ algorithm: 'aes-256-cbc'
+
+ default_value_for :only_protected_branches, true
+
+ belongs_to :project, inverse_of: :remote_mirrors
+
+ validates :url, presence: true, url: { protocols: %w(ssh git http https), allow_blank: true, enforce_user: true }
+
+ before_save :set_new_remote_name, if: :mirror_url_changed?
+
+ after_save :set_override_remote_mirror_available, unless: -> { Gitlab::CurrentSettings.current_application_settings.mirror_available }
+ after_save :refresh_remote, if: :mirror_url_changed?
+ after_update :reset_fields, if: :mirror_url_changed?
+
+ after_commit :remove_remote, on: :destroy
+
+ scope :enabled, -> { where(enabled: true) }
+ scope :started, -> { with_update_status(:started) }
+ scope :stuck, -> { started.where('last_update_at < ? OR (last_update_at IS NULL AND updated_at < ?)', 1.day.ago, 1.day.ago) }
+
+ state_machine :update_status, initial: :none do
+ event :update_start do
+ transition [:none, :finished, :failed] => :started
+ end
+
+ event :update_finish do
+ transition started: :finished
+ end
+
+ event :update_fail do
+ transition started: :failed
+ end
+
+ state :started
+ state :finished
+ state :failed
+
+ after_transition any => :started do |remote_mirror, _|
+ Gitlab::Metrics.add_event(:remote_mirrors_running, path: remote_mirror.project.full_path)
+
+ remote_mirror.update(last_update_started_at: Time.now)
+ end
+
+ after_transition started: :finished do |remote_mirror, _|
+ Gitlab::Metrics.add_event(:remote_mirrors_finished, path: remote_mirror.project.full_path)
+
+ timestamp = Time.now
+ remote_mirror.update_attributes!(
+ last_update_at: timestamp, last_successful_update_at: timestamp, last_error: nil
+ )
+ end
+
+ after_transition started: :failed do |remote_mirror, _|
+ Gitlab::Metrics.add_event(:remote_mirrors_failed, path: remote_mirror.project.full_path)
+
+ remote_mirror.update(last_update_at: Time.now)
+ end
+ end
+
+ def remote_name
+ super || fallback_remote_name
+ end
+
+ def update_failed?
+ update_status == 'failed'
+ end
+
+ def update_in_progress?
+ update_status == 'started'
+ end
+
+ def update_repository(options)
+ raw.update(options)
+ end
+
+ def sync?
+ enabled?
+ end
+
+ def sync
+ return unless sync?
+
+ if recently_scheduled?
+ RepositoryUpdateRemoteMirrorWorker.perform_in(backoff_delay, self.id, Time.now)
+ else
+ RepositoryUpdateRemoteMirrorWorker.perform_async(self.id, Time.now)
+ end
+ end
+
+ def enabled
+ return false unless project && super
+ return false unless project.remote_mirror_available?
+ return false unless project.repository_exists?
+ return false if project.pending_delete?
+
+ true
+ end
+ alias_method :enabled?, :enabled
+
+ def updated_since?(timestamp)
+ last_update_started_at && last_update_started_at > timestamp && !update_failed?
+ end
+
+ def mark_for_delete_if_blank_url
+ mark_for_destruction if url.blank?
+ end
+
+ def mark_as_failed(error_message)
+ update_fail
+ update_column(:last_error, Gitlab::UrlSanitizer.sanitize(error_message))
+ end
+
+ def url=(value)
+ super(value) && return unless Gitlab::UrlSanitizer.valid?(value)
+
+ mirror_url = Gitlab::UrlSanitizer.new(value)
+ self.credentials = mirror_url.credentials
+
+ super(mirror_url.sanitized_url)
+ end
+
+ def url
+ if super
+ Gitlab::UrlSanitizer.new(super, credentials: credentials).full_url
+ end
+ rescue
+ super
+ end
+
+ def safe_url
+ return if url.nil?
+
+ result = URI.parse(url)
+ result.password = '*****' if result.password
+ result.user = '*****' if result.user && result.user != "git" # tokens or other data may be saved as user
+ result.to_s
+ end
+
+ private
+
+ def raw
+ @raw ||= Gitlab::Git::RemoteMirror.new(project.repository.raw, remote_name)
+ end
+
+ def fallback_remote_name
+ return unless id
+
+ "remote_mirror_#{id}"
+ end
+
+ def recently_scheduled?
+ return false unless self.last_update_started_at
+
+ self.last_update_started_at >= Time.now - backoff_delay
+ end
+
+ def backoff_delay
+ if self.only_protected_branches
+ PROTECTED_BACKOFF_DELAY
+ else
+ UNPROTECTED_BACKOFF_DELAY
+ end
+ end
+
+ def reset_fields
+ update_columns(
+ last_error: nil,
+ last_update_at: nil,
+ last_successful_update_at: nil,
+ update_status: 'finished'
+ )
+ end
+
+ def set_override_remote_mirror_available
+ enabled = read_attribute(:enabled)
+
+ project.update(remote_mirror_available_overridden: enabled)
+ end
+
+ def set_new_remote_name
+ self.remote_name = "remote_mirror_#{SecureRandom.hex}"
+ end
+
+ def refresh_remote
+ return unless project
+
+ # Before adding a new remote we have to delete the data from
+ # the previous remote name
+ prev_remote_name = remote_name_was || fallback_remote_name
+ run_after_commit do
+ project.repository.async_remove_remote(prev_remote_name)
+ end
+
+ project.repository.add_remote(remote_name, url)
+ end
+
+ def remove_remote
+ return unless project # could be pending to delete so don't need to touch the git repository
+
+ project.repository.async_remove_remote(remote_name)
+ end
+
+ def mirror_url_changed?
+ url_changed? || encrypted_credentials_changed?
+ end
+end
diff --git a/app/models/repository.rb b/app/models/repository.rb
index 42f1ac43e29..3056c20516a 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -21,7 +21,7 @@ class Repository
attr_accessor :full_path, :disk_path, :project, :is_wiki
delegate :ref_name_for_sha, to: :raw_repository
- delegate :bundle_to_disk, :create_from_bundle, to: :raw_repository
+ delegate :bundle_to_disk, to: :raw_repository
CreateTreeError = Class.new(StandardError)
@@ -37,7 +37,7 @@ class Repository
changelog license_blob license_key gitignore koding_yml
gitlab_ci_yml branch_names tag_names branch_count
tag_count avatar exists? root_ref has_visible_content?
- issue_template_names merge_request_template_names).freeze
+ issue_template_names merge_request_template_names xcode_project?).freeze
# Methods that use cache_method but only memoize the value
MEMOIZED_CACHED_METHODS = %i(license).freeze
@@ -55,7 +55,8 @@ class Repository
gitlab_ci: :gitlab_ci_yml,
avatar: :avatar,
issue_template: :issue_template_names,
- merge_request_template: :merge_request_template_names
+ merge_request_template: :merge_request_template_names,
+ xcode_config: :xcode_project?
}.freeze
def initialize(full_path, project, disk_path: nil, is_wiki: false)
@@ -84,19 +85,20 @@ class Repository
# Return absolute path to repository
def path_to_repo
- @path_to_repo ||= File.expand_path(
- File.join(repository_storage_path, disk_path + '.git')
- )
+ @path_to_repo ||=
+ begin
+ storage = Gitlab.config.repositories.storages[@project.repository_storage]
+
+ File.expand_path(
+ File.join(storage.legacy_disk_path, disk_path + '.git')
+ )
+ end
end
def inspect
"#<#{self.class.name}:#{@disk_path}>"
end
- def create_hooks
- Gitlab::Git::Repository.create_hooks(path_to_repo, Gitlab.config.gitlab_shell.hooks_path)
- end
-
def commit(ref = 'HEAD')
return nil unless exists?
return ref if ref.is_a?(::Commit)
@@ -152,7 +154,10 @@ class Repository
# Returns a list of commits that are not present in any reference
def new_commits(newrev)
- refs = ::Gitlab::Git::RevList.new(raw, newrev: newrev).new_refs
+ # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/1233
+ refs = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ ::Gitlab::Git::RevList.new(raw, newrev: newrev).new_refs
+ end
refs.map { |sha| commit(sha.strip) }
end
@@ -253,13 +258,13 @@ class Repository
end
def diverging_commit_counts(branch)
- root_ref_hash = raw_repository.commit(root_ref).id
+ @root_ref_hash ||= raw_repository.commit(root_ref).id
cache.fetch(:"diverging_commit_counts_#{branch.name}") do
# Rugged seems to throw a `ReferenceError` when given branch_names rather
# than SHA-1 hashes
number_commits_behind, number_commits_ahead =
raw_repository.count_commits_between(
- root_ref_hash,
+ @root_ref_hash,
branch.dereferenced_target.sha,
left_right: true,
max_count: MAX_DIVERGING_COUNT)
@@ -268,6 +273,16 @@ class Repository
end
end
+ def archive_metadata(ref, storage_path, format = "tar.gz", append_sha:)
+ raw_repository.archive_metadata(
+ ref,
+ storage_path,
+ project.path,
+ format,
+ append_sha: append_sha
+ )
+ end
+
def expire_tags_cache
expire_method_caches(%i(tag_names tag_count))
@tags = nil
@@ -335,6 +350,7 @@ class Repository
return unless empty?
expire_method_caches(%i(has_visible_content?))
+ raw_repository.expire_has_local_branches_cache
end
def lookup_cache
@@ -592,6 +608,11 @@ class Repository
end
cache_method :gitlab_ci_yml
+ def xcode_project?
+ file_on_head(:xcode_config, :tree).present?
+ end
+ cache_method :xcode_project?
+
def head_commit
@head_commit ||= commit(self.root_ref)
end
@@ -829,7 +850,7 @@ class Repository
@root_ref_sha ||= commit(root_ref).sha
end
- delegate :merged_branch_names, :can_be_merged?, to: :raw_repository
+ delegate :merged_branch_names, to: :raw_repository
def merge_base(first_commit_id, second_commit_id)
first_commit_id = commit(first_commit_id).try(:id) || first_commit_id
@@ -852,13 +873,27 @@ class Repository
add_remote(remote_name, url, mirror_refmap: refmap)
fetch_remote(remote_name, forced: forced, prune: prune)
ensure
- remove_remote(remote_name) if tmp_remote_name
+ async_remove_remote(remote_name) if tmp_remote_name
end
def fetch_remote(remote, forced: false, ssh_auth: nil, no_tags: false, prune: true)
gitlab_shell.fetch_remote(raw_repository, remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags, prune: prune)
end
+ def async_remove_remote(remote_name)
+ return unless remote_name
+
+ job_id = RepositoryRemoveRemoteWorker.perform_async(project.id, remote_name)
+
+ if job_id
+ Rails.logger.info("Remove remote job scheduled for #{project.id} with remote name: #{remote_name} job ID #{job_id}.")
+ else
+ Rails.logger.info("Remove remote job failed to create for #{project.id} with remote name #{remote_name}.")
+ end
+
+ job_id
+ end
+
def fetch_source_branch!(source_repository, source_branch, local_ref)
raw_repository.fetch_source_branch!(source_repository.raw_repository, source_branch, local_ref)
end
@@ -898,11 +933,21 @@ class Repository
end
end
- def file_on_head(type)
- if head = tree(:head)
- head.blobs.find do |blob|
- Gitlab::FileDetector.type_of(blob.path) == type
+ def file_on_head(type, object_type = :blob)
+ return unless head = tree(:head)
+
+ objects =
+ case object_type
+ when :blob
+ head.blobs
+ when :tree
+ head.trees
+ else
+ raise ArgumentError, "Object type #{object_type} is not supported"
end
+
+ objects.find do |object|
+ Gitlab::FileDetector.type_of(object.path) == type
end
end
@@ -914,12 +959,12 @@ class Repository
blob_data_at(sha, path)
end
- def fetch_ref(source_repository, source_ref:, target_ref:)
- raw_repository.fetch_ref(source_repository.raw_repository, source_ref: source_ref, target_ref: target_ref)
+ def lfsconfig_for(sha)
+ blob_data_at(sha, '.lfsconfig')
end
- def repository_storage_path
- @project.repository_storage_path
+ def fetch_ref(source_repository, source_ref:, target_ref:)
+ raw_repository.fetch_ref(source_repository.raw_repository, source_ref: source_ref, target_ref: target_ref)
end
def rebase(user, merge_request)
@@ -929,6 +974,22 @@ class Repository
remote_branch: merge_request.target_branch)
end
+ def blob_data_at(sha, path)
+ blob = blob_at(sha, path)
+ return unless blob
+
+ blob.load_all_data!
+ blob.data
+ end
+
+ def squash(user, merge_request)
+ raw.squash(user, merge_request.id, branch: merge_request.target_branch,
+ start_sha: merge_request.diff_start_sha,
+ end_sha: merge_request.diff_head_sha,
+ author: merge_request.author,
+ message: merge_request.title)
+ end
+
private
# TODO Generice finder, later split this on finders by Ref or Oid
@@ -943,14 +1004,6 @@ class Repository
::Commit.new(commit, @project) if commit
end
- def blob_data_at(sha, path)
- blob = blob_at(sha, path)
- return unless blob
-
- blob.load_all_data!
- blob.data
- end
-
def cache
@cache ||= Gitlab::RepositoryCache.new(self)
end
diff --git a/app/models/route.rb b/app/models/route.rb
index 07d96c21cf1..2d609920051 100644
--- a/app/models/route.rb
+++ b/app/models/route.rb
@@ -10,8 +10,6 @@ class Route < ActiveRecord::Base
presence: true,
uniqueness: { case_sensitive: false }
- validate :ensure_permanent_paths, if: :path_changed?
-
before_validation :delete_conflicting_orphaned_routes
after_create :delete_conflicting_redirects
after_update :delete_conflicting_redirects, if: :path_changed?
@@ -45,7 +43,7 @@ class Route < ActiveRecord::Base
# We are not calling route.delete_conflicting_redirects here, in hopes
# of avoiding deadlocks. The parent (self, in this method) already
# called it, which deletes conflicts for all descendants.
- route.create_redirect(old_path, permanent: permanent_redirect?) if attributes[:path]
+ route.create_redirect(old_path) if attributes[:path]
end
end
end
@@ -55,31 +53,17 @@ class Route < ActiveRecord::Base
end
def conflicting_redirects
- RedirectRoute.temporary.matching_path_and_descendants(path)
+ RedirectRoute.matching_path_and_descendants(path)
end
- def create_redirect(path, permanent: false)
- RedirectRoute.create(source: source, path: path, permanent: permanent)
+ def create_redirect(path)
+ RedirectRoute.create(source: source, path: path)
end
private
def create_redirect_for_old_path
- create_redirect(path_was, permanent: permanent_redirect?) if path_changed?
- end
-
- def permanent_redirect?
- source_type != "Project"
- end
-
- def ensure_permanent_paths
- return if path.nil?
-
- errors.add(:path, "has been taken before") if conflicting_redirect_exists?
- end
-
- def conflicting_redirect_exists?
- RedirectRoute.permanent.matching_path_and_descendants(path).exists?
+ create_redirect(path_was) if path_changed?
end
def delete_conflicting_orphaned_routes
diff --git a/app/models/sent_notification.rb b/app/models/sent_notification.rb
index 6e311806be1..3da7c301d28 100644
--- a/app/models/sent_notification.rb
+++ b/app/models/sent_notification.rb
@@ -5,14 +5,14 @@ class SentNotification < ActiveRecord::Base
belongs_to :noteable, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
belongs_to :recipient, class_name: "User"
- validates :project, :recipient, presence: true
+ validates :recipient, presence: true
validates :reply_key, presence: true, uniqueness: true
validates :noteable_id, presence: true, unless: :for_commit?
validates :commit_id, presence: true, if: :for_commit?
validates :in_reply_to_discussion_id, format: { with: /\A\h{40}\z/, allow_nil: true }
validate :note_valid
- after_save :keep_around_commit
+ after_save :keep_around_commit, if: :for_commit?
class << self
def reply_key
diff --git a/app/models/service.rb b/app/models/service.rb
index 1dcb79157a2..1d259bcfec7 100644
--- a/app/models/service.rb
+++ b/app/models/service.rb
@@ -14,6 +14,7 @@ class Service < ActiveRecord::Base
default_value_for :merge_requests_events, true
default_value_for :tag_push_events, true
default_value_for :note_events, true
+ default_value_for :confidential_note_events, true
default_value_for :job_events, true
default_value_for :pipeline_events, true
default_value_for :wiki_page_events, true
@@ -42,6 +43,7 @@ class Service < ActiveRecord::Base
scope :confidential_issue_hooks, -> { where(confidential_issues_events: true, active: true) }
scope :merge_request_hooks, -> { where(merge_requests_events: true, active: true) }
scope :note_hooks, -> { where(note_events: true, active: true) }
+ scope :confidential_note_hooks, -> { where(confidential_note_events: true, active: true) }
scope :job_hooks, -> { where(job_events: true, active: true) }
scope :pipeline_hooks, -> { where(pipeline_events: true, active: true) }
scope :wiki_page_hooks, -> { where(wiki_page_events: true, active: true) }
@@ -168,8 +170,10 @@ class Service < ActiveRecord::Base
def self.prop_accessor(*args)
args.each do |arg|
class_eval %{
- def #{arg}
- properties['#{arg}']
+ unless method_defined?(arg)
+ def #{arg}
+ properties['#{arg}']
+ end
end
def #{arg}=(value)
@@ -202,7 +206,12 @@ class Service < ActiveRecord::Base
args.each do |arg|
class_eval %{
def #{arg}?
- ActiveRecord::ConnectionAdapters::Column::TRUE_VALUES.include?(#{arg})
+ # '!!' is used because nil or empty string is converted to nil
+ if Gitlab.rails5?
+ !!ActiveRecord::Type::Boolean.new.cast(#{arg})
+ else
+ !!ActiveRecord::Type::Boolean.new.type_cast_from_database(#{arg})
+ end
end
}
end
@@ -245,7 +254,6 @@ class Service < ActiveRecord::Base
emails_on_push
external_wiki
flowdock
- gemnasium
hipchat
irker
jira
@@ -273,6 +281,7 @@ class Service < ActiveRecord::Base
def self.build_from_template(project_id, template)
service = template.dup
+ service.active = false unless service.valid?
service.template = false
service.project_id = project_id
service
diff --git a/app/models/storage/hashed_project.rb b/app/models/storage/hashed_project.rb
index fae1b64961a..26b4b78ac64 100644
--- a/app/models/storage/hashed_project.rb
+++ b/app/models/storage/hashed_project.rb
@@ -1,7 +1,7 @@
module Storage
class HashedProject
attr_accessor :project
- delegate :gitlab_shell, :repository_storage_path, to: :project
+ delegate :gitlab_shell, :repository_storage, to: :project
ROOT_PATH_PREFIX = '@hashed'.freeze
@@ -24,7 +24,7 @@ module Storage
end
def ensure_storage_path_exists
- gitlab_shell.add_namespace(repository_storage_path, base_dir)
+ gitlab_shell.add_namespace(repository_storage, base_dir)
end
def rename_repo
diff --git a/app/models/storage/legacy_project.rb b/app/models/storage/legacy_project.rb
index 9d9e5e1d352..27cb388c702 100644
--- a/app/models/storage/legacy_project.rb
+++ b/app/models/storage/legacy_project.rb
@@ -1,7 +1,7 @@
module Storage
class LegacyProject
attr_accessor :project
- delegate :namespace, :gitlab_shell, :repository_storage_path, to: :project
+ delegate :namespace, :gitlab_shell, :repository_storage, to: :project
def initialize(project)
@project = project
@@ -24,18 +24,18 @@ module Storage
def ensure_storage_path_exists
return unless namespace
- gitlab_shell.add_namespace(repository_storage_path, base_dir)
+ gitlab_shell.add_namespace(repository_storage, base_dir)
end
def rename_repo
new_full_path = project.build_full_path
- if gitlab_shell.mv_repository(repository_storage_path, project.full_path_was, new_full_path)
+ if gitlab_shell.mv_repository(repository_storage, project.full_path_was, new_full_path)
# If repository moved successfully we need to send update instructions to users.
# However we cannot allow rollback since we moved repository
# So we basically we mute exceptions in next actions
begin
- gitlab_shell.mv_repository(repository_storage_path, "#{project.full_path_was}.wiki", "#{new_full_path}.wiki")
+ gitlab_shell.mv_repository(repository_storage, "#{project.full_path_was}.wiki", "#{new_full_path}.wiki")
return true
rescue => e
Rails.logger.error "Exception renaming #{project.full_path_was} -> #{new_full_path}: #{e}"
diff --git a/app/models/system_note_metadata.rb b/app/models/system_note_metadata.rb
index 29035480371..1c2161accc4 100644
--- a/app/models/system_note_metadata.rb
+++ b/app/models/system_note_metadata.rb
@@ -17,7 +17,11 @@ class SystemNoteMetadata < ActiveRecord::Base
].freeze
validates :note, presence: true
- validates :action, inclusion: ICON_TYPES, allow_nil: true
+ validates :action, inclusion: { in: :icon_types }, allow_nil: true
belongs_to :note
+
+ def icon_types
+ ICON_TYPES
+ end
end
diff --git a/app/models/term_agreement.rb b/app/models/term_agreement.rb
new file mode 100644
index 00000000000..c317bd0c90b
--- /dev/null
+++ b/app/models/term_agreement.rb
@@ -0,0 +1,8 @@
+class TermAgreement < ActiveRecord::Base
+ belongs_to :term, class_name: 'ApplicationSetting::Term'
+ belongs_to :user
+
+ scope :accepted, -> { where(accepted: true) }
+
+ validates :user, :term, presence: true
+end
diff --git a/app/models/timelog.rb b/app/models/timelog.rb
index e166cf69703..659146f43e4 100644
--- a/app/models/timelog.rb
+++ b/app/models/timelog.rb
@@ -2,8 +2,8 @@ class Timelog < ActiveRecord::Base
validates :time_spent, :user, presence: true
validate :issuable_id_is_present
- belongs_to :issue
- belongs_to :merge_request
+ belongs_to :issue, touch: true
+ belongs_to :merge_request, touch: true
belongs_to :user
def issuable
@@ -19,4 +19,9 @@ class Timelog < ActiveRecord::Base
errors.add(:base, 'Issue or Merge Request ID is required')
end
end
+
+ # Rails5 defaults to :touch_later, overwrite for normal touch
+ def belongs_to_touch_method
+ :touch
+ end
end
diff --git a/app/models/todo.rb b/app/models/todo.rb
index 8afacd188e0..a2ab405fdbe 100644
--- a/app/models/todo.rb
+++ b/app/models/todo.rb
@@ -50,7 +50,7 @@ class Todo < ActiveRecord::Base
# Priority sorting isn't displayed in the dropdown, because we don't show
# milestones, but still show something if the user has a URL with that
# selected.
- def sort(method)
+ def sort_by_attribute(method)
sorted =
case method.to_s
when 'priority', 'label_priority' then order_by_labels_priority
diff --git a/app/models/upload.rb b/app/models/upload.rb
index 99ad37dc892..cf71a7b76fc 100644
--- a/app/models/upload.rb
+++ b/app/models/upload.rb
@@ -9,6 +9,8 @@ class Upload < ActiveRecord::Base
validates :model, presence: true
validates :uploader, presence: true
+ scope :with_files_stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
+
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
@@ -21,6 +23,7 @@ class Upload < ActiveRecord::Base
end
def absolute_path
+ raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
@@ -30,11 +33,11 @@ class Upload < ActiveRecord::Base
self.checksum = nil
return unless checksummable?
- self.checksum = self.class.hexdigest(absolute_path)
+ self.checksum = Digest::SHA256.file(absolute_path).hexdigest
end
- def build_uploader
- uploader_class.new(model, mount_point, **uploader_context).tap do |uploader|
+ def build_uploader(mounted_as = nil)
+ uploader_class.new(model, mounted_as || mount_point).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
@@ -51,6 +54,12 @@ class Upload < ActiveRecord::Base
}.compact
end
+ def local?
+ return true if store.nil?
+
+ store == ObjectStorage::Store::LOCAL
+ end
+
private
def delete_file!
@@ -61,10 +70,6 @@ class Upload < ActiveRecord::Base
checksum.nil? && local? && exist?
end
- def local?
- true
- end
-
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
diff --git a/app/models/user.rb b/app/models/user.rb
index b8c55205ab8..8e0dc91b2a7 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -17,6 +17,7 @@ class User < ActiveRecord::Base
include IgnorableColumn
include BulkMemberAccessLoad
include BlocksJsonSerialization
+ include WithUploads
DEFAULT_NOTIFICATION_LEVEL = :participating
@@ -25,7 +26,7 @@ class User < ActiveRecord::Base
ignore_column :authentication_token
add_authentication_token_field :incoming_email_token
- add_authentication_token_field :rss_token
+ add_authentication_token_field :feed_token
default_value_for :admin, false
default_value_for(:external) { Gitlab::CurrentSettings.user_default_external }
@@ -82,11 +83,8 @@ class User < ActiveRecord::Base
has_one :namespace, -> { where(type: nil) }, dependent: :destroy, foreign_key: :owner_id, inverse_of: :owner, autosave: true # rubocop:disable Cop/ActiveRecordDependent
# Profile
- has_many :keys, -> do
- type = Key.arel_table[:type]
- where(type.not_eq('DeployKey').or(type.eq(nil)))
- end, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
- has_many :deploy_keys, -> { where(type: 'DeployKey') }, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :keys, -> { where(type: ['Key', nil]) }, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :deploy_keys, -> { where(type: 'DeployKey') }, dependent: :nullify # rubocop:disable Cop/ActiveRecordDependent
has_many :gpg_keys
has_many :emails, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
@@ -100,8 +98,8 @@ class User < ActiveRecord::Base
has_many :members
has_many :group_members, -> { where(requested_at: nil) }, source: 'GroupMember'
has_many :groups, through: :group_members
- has_many :owned_groups, -> { where members: { access_level: Gitlab::Access::OWNER } }, through: :group_members, source: :group
- has_many :masters_groups, -> { where members: { access_level: Gitlab::Access::MASTER } }, through: :group_members, source: :group
+ has_many :owned_groups, -> { where(members: { access_level: Gitlab::Access::OWNER }) }, through: :group_members, source: :group
+ has_many :masters_groups, -> { where(members: { access_level: Gitlab::Access::MASTER }) }, through: :group_members, source: :group
# Projects
has_many :groups_projects, through: :groups, source: :projects
@@ -111,7 +109,7 @@ class User < ActiveRecord::Base
has_many :created_projects, foreign_key: :creator_id, class_name: 'Project'
has_many :users_star_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :starred_projects, through: :users_star_projects, source: :project
- has_many :project_authorizations
+ has_many :project_authorizations, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
has_many :authorized_projects, through: :project_authorizations, source: :project
has_many :user_interacted_projects
@@ -140,7 +138,8 @@ class User < ActiveRecord::Base
has_many :custom_attributes, class_name: 'UserCustomAttribute'
has_many :callouts, class_name: 'UserCallout'
- has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :term_agreements
+ belongs_to :accepted_term, class_name: 'ApplicationSetting::Term'
#
# Validations
@@ -166,19 +165,34 @@ class User < ActiveRecord::Base
validate :signup_domain_valid?, on: :create, if: ->(user) { !user.created_by_id }
before_validation :sanitize_attrs
- before_validation :set_notification_email, if: :email_changed?
+ before_validation :set_notification_email, if: :new_record?
before_validation :set_public_email, if: :public_email_changed?
+ before_save :set_public_email, if: :public_email_changed? # in case validation is skipped
before_save :ensure_incoming_email_token
before_save :ensure_user_rights_and_limits, if: ->(user) { user.new_record? || user.external_changed? }
before_save :skip_reconfirmation!, if: ->(user) { user.email_changed? && user.read_only_attribute?(:email) }
before_save :check_for_verified_email, if: ->(user) { user.email_changed? && !user.new_record? }
before_validation :ensure_namespace_correct
+ before_save :ensure_namespace_correct # in case validation is skipped
after_validation :set_username_errors
after_update :username_changed_hook, if: :username_changed?
after_destroy :post_destroy_hook
after_destroy :remove_key_cache
- after_commit :update_emails_with_primary_email, on: :update, if: -> { previous_changes.key?('email') }
- after_commit :update_invalid_gpg_signatures, on: :update, if: -> { previous_changes.key?('email') }
+ after_commit(on: :update) do
+ if previous_changes.key?('email')
+ # Grab previous_email here since previous_changes changes after
+ # #update_emails_with_primary_email and #update_notification_email are called
+ previous_email = previous_changes[:email][0]
+
+ update_emails_with_primary_email(previous_email)
+ update_invalid_gpg_signatures
+
+ if previous_email == notification_email
+ self.notification_email = email
+ save
+ end
+ end
+ end
after_initialize :set_projects_limit
@@ -187,7 +201,7 @@ class User < ActiveRecord::Base
# User's Dashboard preference
# Note: When adding an option, it MUST go on the end of the array.
- enum dashboard: [:projects, :stars, :project_activity, :starred_project_activity, :groups, :todos]
+ enum dashboard: [:projects, :stars, :project_activity, :starred_project_activity, :groups, :todos, :issues, :merge_requests]
# User's Project preference
# Note: When adding an option, it MUST go on the end of the array.
@@ -235,14 +249,18 @@ class User < ActiveRecord::Base
scope :order_recent_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'DESC')) }
scope :order_oldest_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'ASC')) }
- def self.with_two_factor
+ def self.with_two_factor_indistinct
joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id")
- .where("u2f.id IS NOT NULL OR otp_required_for_login = ?", true).distinct(arel_table[:id])
+ .where("u2f.id IS NOT NULL OR users.otp_required_for_login = ?", true)
+ end
+
+ def self.with_two_factor
+ with_two_factor_indistinct.distinct(arel_table[:id])
end
def self.without_two_factor
joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id")
- .where("u2f.id IS NULL AND otp_required_for_login = ?", false)
+ .where("u2f.id IS NULL AND users.otp_required_for_login = ?", false)
end
#
@@ -259,7 +277,7 @@ class User < ActiveRecord::Base
end
end
- def sort(method)
+ def sort_by_attribute(method)
order_method = method || 'id_desc'
case order_method.to_s
@@ -411,7 +429,6 @@ class User < ActiveRecord::Base
unique_internal(where(ghost: true), 'ghost', email) do |u|
u.bio = 'This is a "Ghost User", created to hold all issues authored by users that have since been deleted. This user cannot be removed.'
u.name = 'Ghost User'
- u.notification_email = email
end
end
end
@@ -541,8 +558,7 @@ class User < ActiveRecord::Base
# hash and `_was` variables getting munged.
# By using an `after_commit` instead of `after_update`, we avoid the recursive callback
# scenario, though it then requires us to use the `previous_changes` hash
- def update_emails_with_primary_email
- previous_email = previous_changes[:email][0] # grab this before the DestroyService is called
+ def update_emails_with_primary_email(previous_email)
primary_email_record = emails.find_by(email: email)
Emails::DestroyService.new(self, user: self).execute(primary_email_record) if primary_email_record
@@ -623,9 +639,7 @@ class User < ActiveRecord::Base
end
def owned_projects
- @owned_projects ||=
- Project.where('namespace_id IN (?) OR namespace_id = ?',
- owned_groups.select(:id), namespace.id).joins(:namespace)
+ @owned_projects ||= Project.from("(#{owned_projects_union.to_sql}) AS projects")
end
# Returns projects which user can admin issues on (for example to move an issue to that project).
@@ -703,10 +717,6 @@ class User < ActiveRecord::Base
projects_limit - personal_projects_count
end
- def personal_projects_count
- @personal_projects_count ||= personal_projects.count
- end
-
def recent_push(project = nil)
service = Users::LastPushEventService.new(self)
@@ -773,13 +783,13 @@ class User < ActiveRecord::Base
end
def set_notification_email
- if notification_email.blank? || !all_emails.include?(notification_email)
+ if notification_email.blank? || all_emails.exclude?(notification_email)
self.notification_email = email
end
end
def set_public_email
- if public_email.blank? || !all_emails.include?(public_email)
+ if public_email.blank? || all_emails.exclude?(public_email)
self.public_email = ''
end
end
@@ -861,6 +871,16 @@ class User < ActiveRecord::Base
confirmed? && !temp_oauth_email?
end
+ def accept_pending_invitations!
+ pending_invitations.select do |member|
+ member.accept_invite!(self)
+ end
+ end
+
+ def pending_invitations
+ Member.where(invite_email: verified_emails).invite
+ end
+
def all_emails
all_emails = []
all_emails << email unless temp_oauth_email?
@@ -917,7 +937,7 @@ class User < ActiveRecord::Base
def delete_async(deleted_by:, params: {})
block if params[:hard_delete]
- DeleteUserWorker.perform_async(deleted_by.id, id, params)
+ DeleteUserWorker.perform_async(deleted_by.id, id, params.to_h)
end
def notification_service
@@ -954,10 +974,13 @@ class User < ActiveRecord::Base
end
def manageable_groups
- union = Gitlab::SQL::Union.new([owned_groups.select(:id),
- masters_groups.select(:id)])
- arel_union = Arel::Nodes::SqlLiteral.new(union.to_sql)
- owned_and_master_groups = Group.where(Group.arel_table[:id].in(arel_union))
+ union_sql = Gitlab::SQL::Union.new([owned_groups.select(:id), masters_groups.select(:id)]).to_sql
+
+ # Update this line to not use raw SQL when migrated to Rails 5.2.
+ # Either ActiveRecord or Arel constructions are fine.
+ # This was replaced with the raw SQL construction because of bugs in the arel gem.
+ # Bugs were fixed in arel 9.0.0 (Rails 5.2).
+ owned_and_master_groups = Group.where("namespaces.id IN (#{union_sql})") # rubocop:disable GitlabSecurity/SqlInjection
Gitlab::GroupHierarchy.new(owned_and_master_groups).base_and_descendants
end
@@ -997,18 +1020,28 @@ class User < ActiveRecord::Base
!solo_owned_groups.present?
end
- def ci_authorized_runners
- @ci_authorized_runners ||= begin
- runner_ids = Ci::RunnerProject
- .where("ci_runner_projects.project_id IN (#{ci_projects_union.to_sql})") # rubocop:disable GitlabSecurity/SqlInjection
+ def ci_owned_runners
+ @ci_owned_runners ||= begin
+ project_runner_ids = Ci::RunnerProject
+ .where(project: authorized_projects(Gitlab::Access::MASTER))
.select(:runner_id)
- Ci::Runner.specific.where(id: runner_ids)
+
+ group_runner_ids = Ci::RunnerNamespace
+ .where(namespace_id: owned_or_masters_groups.select(:id))
+ .select(:runner_id)
+
+ union = Gitlab::SQL::Union.new([project_runner_ids, group_runner_ids])
+
+ Ci::Runner.specific.where("ci_runners.id IN (#{union.to_sql})") # rubocop:disable GitlabSecurity/SqlInjection
end
end
def notification_settings_for(source)
if notification_settings.loaded?
- notification_settings.find { |notification| notification.source == source }
+ notification_settings.find do |notification|
+ notification.source_type == source.class.base_class.name &&
+ notification.source_id == source.id
+ end
else
notification_settings.find_or_initialize_by(source: source)
end
@@ -1049,9 +1082,10 @@ class User < ActiveRecord::Base
end
end
- def update_cache_counts
- assigned_open_merge_requests_count(force: true)
- assigned_open_issues_count(force: true)
+ def personal_projects_count(force: false)
+ Rails.cache.fetch(['users', id, 'personal_projects_count'], force: force, expires_in: 24.hours, raw: true) do
+ personal_projects.count
+ end.to_i
end
def update_todos_count_cache
@@ -1064,6 +1098,7 @@ class User < ActiveRecord::Base
invalidate_merge_request_cache_counts
invalidate_todos_done_count
invalidate_todos_pending_count
+ invalidate_personal_projects_count
end
def invalidate_issue_cache_counts
@@ -1082,6 +1117,10 @@ class User < ActiveRecord::Base
Rails.cache.delete(['users', id, 'todos_pending_count'])
end
+ def invalidate_personal_projects_count
+ Rails.cache.delete(['users', id, 'personal_projects_count'])
+ end
+
# This is copied from Devise::Models::Lockable#valid_for_authentication?, as our auth
# flow means we don't call that automatically (and can't conveniently do so).
#
@@ -1089,8 +1128,11 @@ class User < ActiveRecord::Base
# <https://github.com/plataformatec/devise/blob/v4.0.0/lib/devise/models/lockable.rb#L92>
#
def increment_failed_attempts!
+ return if ::Gitlab::Database.read_only?
+
self.failed_attempts ||= 0
self.failed_attempts += 1
+
if attempts_exceeded?
lock_access! unless access_locked?
else
@@ -1128,11 +1170,11 @@ class User < ActiveRecord::Base
save
end
- # each existing user needs to have an `rss_token`.
+ # each existing user needs to have an `feed_token`.
# we do this on read since migrating all existing users is not a feasible
# solution.
- def rss_token
- ensure_rss_token!
+ def feed_token
+ ensure_feed_token!
end
def sync_attribute?(attribute)
@@ -1185,6 +1227,20 @@ class User < ActiveRecord::Base
max_member_access_for_group_ids([group_id])[group_id]
end
+ def terms_accepted?
+ accepted_term_id.present?
+ end
+
+ def required_terms_not_accepted?
+ Gitlab::CurrentSettings.current_application_settings.enforce_terms? &&
+ !terms_accepted?
+ end
+
+ def owned_or_masters_groups
+ union = Gitlab::SQL::Union.new([owned_groups, masters_groups])
+ Group.from("(#{union.to_sql}) namespaces")
+ end
+
protected
# override, from Devise::Validatable
@@ -1196,13 +1252,13 @@ class User < ActiveRecord::Base
private
- def ci_projects_union
- scope = { access_level: [Gitlab::Access::MASTER, Gitlab::Access::OWNER] }
- groups = groups_projects.where(members: scope)
- other = projects.where(members: scope)
-
- Gitlab::SQL::Union.new([personal_projects.select(:id), groups.select(:id),
- other.select(:id)])
+ def owned_projects_union
+ Gitlab::SQL::Union.new([
+ Project.where(namespace: namespace),
+ Project.joins(:project_authorizations)
+ .where("projects.namespace_id <> ?", namespace.id)
+ .where(project_authorizations: { user_id: id, access_level: Gitlab::Access::OWNER })
+ ], remove_duplicates: false)
end
# Added according to https://github.com/plataformatec/devise/blob/7df57d5081f9884849ca15e4fde179ef164a575f/README.md#activejob-integration
diff --git a/app/models/user_callout.rb b/app/models/user_callout.rb
index e4b69382626..9d461c6750a 100644
--- a/app/models/user_callout.rb
+++ b/app/models/user_callout.rb
@@ -2,7 +2,8 @@ class UserCallout < ActiveRecord::Base
belongs_to :user
enum feature_name: {
- gke_cluster_integration: 1
+ gke_cluster_integration: 1,
+ gcp_signup_offer: 2
}
validates :user, presence: true
diff --git a/app/models/wiki_page.rb b/app/models/wiki_page.rb
index 0f5536415f7..cde79b95062 100644
--- a/app/models/wiki_page.rb
+++ b/app/models/wiki_page.rb
@@ -265,6 +265,15 @@ class WikiPage
title.present? && self.class.unhyphenize(@page.url_path) != title
end
+ # Updates the current @attributes hash by merging a hash of params
+ def update_attributes(attrs)
+ attrs[:title] = process_title(attrs[:title]) if attrs[:title].present?
+
+ attrs.slice!(:content, :format, :message, :title)
+
+ @attributes.merge!(attrs)
+ end
+
private
# Process and format the title based on the user input.
@@ -290,15 +299,6 @@ class WikiPage
File.join(components)
end
- # Updates the current @attributes hash by merging a hash of params
- def update_attributes(attrs)
- attrs[:title] = process_title(attrs[:title]) if attrs[:title].present?
-
- attrs.slice!(:content, :format, :message, :title)
-
- @attributes.merge!(attrs)
- end
-
def set_attributes
attributes[:slug] = @page.url_path
attributes[:title] = @page.title