Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorLin Jen-Shin <godfat@godfat.org>2017-12-15 12:14:26 +0300
committerLin Jen-Shin <godfat@godfat.org>2017-12-15 12:14:26 +0300
commit59ac184fcf64f1812fbfd88a00ea029ca3c1f4e7 (patch)
tree5db0594a6f568f02b4f54c6bf4eabe01229a9f95 /lib
parent85be6d83be4632c76760e373da131a90afb093b9 (diff)
parent1baea77438779e74657b49ca26810d6c8f041b41 (diff)
Merge remote-tracking branch 'upstream/master' into no-ivar-in-modules
* upstream/master: (671 commits) Make rubocop happy Use guard clause Improve language Prettify Use temp branch Pass info about who started the job and which job triggered it Docs: add indexes for monitoring and performance monitoring clearer-documentation-on-inline-diffs Add docs for commit diff discussion in merge requests sorting for tags api Clear BatchLoader after each spec to prevent holding onto records longer than necessary Include project in BatchLoader key to prevent returning blobs for the wrong project moved lfs_blob_ids method into ExtractsPath module Converted JS modules into exported modules spec fixes Bump gitlab-shell version to 5.10.3 Clear caches before updating MR diffs Use new Ruby version 2.4 in GitLab QA images moved lfs blob fetch from extractspath file Update GitLab QA dependencies ...
Diffstat (limited to 'lib')
-rw-r--r--lib/after_commit_queue.rb26
-rw-r--r--lib/api/circuit_breakers.rb2
-rw-r--r--lib/api/entities.rb82
-rw-r--r--lib/api/groups.rb14
-rw-r--r--lib/api/helpers/pagination.rb10
-rw-r--r--lib/api/internal.rb13
-rw-r--r--lib/api/issues.rb2
-rw-r--r--lib/api/projects.rb29
-rw-r--r--lib/api/projects_relation_builder.rb34
-rw-r--r--lib/api/protected_branches.rb18
-rw-r--r--lib/api/repositories.rb4
-rw-r--r--lib/api/runner.rb10
-rw-r--r--lib/api/tags.rb7
-rw-r--r--lib/api/users.rb2
-rw-r--r--lib/backup/artifacts.rb2
-rw-r--r--lib/backup/repository.rb9
-rw-r--r--lib/banzai/cross_project_reference.rb2
-rw-r--r--lib/banzai/filter/abstract_reference_filter.rb98
-rw-r--r--lib/banzai/filter/commit_reference_filter.rb34
-rw-r--r--lib/banzai/filter/epic_reference_filter.rb12
-rw-r--r--lib/banzai/filter/issuable_reference_filter.rb31
-rw-r--r--lib/banzai/filter/issue_reference_filter.rb32
-rw-r--r--lib/banzai/filter/label_reference_filter.rb4
-rw-r--r--lib/banzai/filter/merge_request_reference_filter.rb37
-rw-r--r--lib/banzai/filter/milestone_reference_filter.rb2
-rw-r--r--lib/banzai/filter/table_of_contents_filter.rb1
-rw-r--r--lib/banzai/filter/upload_link_filter.rb18
-rw-r--r--lib/banzai/issuable_extractor.rb4
-rw-r--r--lib/banzai/object_renderer.rb12
-rw-r--r--lib/banzai/reference_parser/epic_parser.rb12
-rw-r--r--lib/banzai/reference_parser/issuable_parser.rb25
-rw-r--r--lib/banzai/reference_parser/issue_parser.rb12
-rw-r--r--lib/banzai/reference_parser/merge_request_parser.rb24
-rw-r--r--lib/extracts_path.rb6
-rw-r--r--lib/feature.rb9
-rw-r--r--lib/gitlab/background_migration/populate_untracked_uploads.rb259
-rw-r--r--lib/gitlab/background_migration/prepare_untracked_uploads.rb163
-rw-r--r--lib/gitlab/bare_repository_import/importer.rb1
-rw-r--r--lib/gitlab/bare_repository_import/repository.rb2
-rw-r--r--lib/gitlab/checks/project_moved.rb65
-rw-r--r--lib/gitlab/ci/pipeline/chain/base.rb7
-rw-r--r--lib/gitlab/ci/pipeline/chain/build.rb30
-rw-r--r--lib/gitlab/ci/pipeline/chain/command.rb61
-rw-r--r--lib/gitlab/ci/pipeline/chain/create.rb16
-rw-r--r--lib/gitlab/ci/pipeline/chain/helpers.rb14
-rw-r--r--lib/gitlab/ci/pipeline/chain/sequence.rb13
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/abilities.rb12
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/repository.rb7
-rw-r--r--lib/gitlab/conflict/file_collection.rb2
-rw-r--r--lib/gitlab/database.rb14
-rw-r--r--lib/gitlab/database/migration_helpers.rb4
-rw-r--r--lib/gitlab/diff/diff_refs.rb22
-rw-r--r--lib/gitlab/diff/inline_diff.rb2
-rw-r--r--lib/gitlab/ee_compat_check.rb6
-rw-r--r--lib/gitlab/email/handler.rb2
-rw-r--r--lib/gitlab/email/handler/create_merge_request_handler.rb69
-rw-r--r--lib/gitlab/email/receiver.rb6
-rw-r--r--lib/gitlab/git.rb12
-rw-r--r--lib/gitlab/git/blob.rb1
-rw-r--r--lib/gitlab/git/commit.rb39
-rw-r--r--lib/gitlab/git/conflict/file.rb2
-rw-r--r--lib/gitlab/git/conflict/resolver.rb2
-rw-r--r--lib/gitlab/git/operation_service.rb4
-rw-r--r--lib/gitlab/git/remote_repository.rb6
-rw-r--r--lib/gitlab/git/repository.rb398
-rw-r--r--lib/gitlab/git/repository_mirroring.rb41
-rw-r--r--lib/gitlab/git/storage.rb1
-rw-r--r--lib/gitlab/git/storage/checker.rb120
-rw-r--r--lib/gitlab/git/storage/circuit_breaker.rb102
-rw-r--r--lib/gitlab/git/storage/circuit_breaker_settings.rb12
-rw-r--r--lib/gitlab/git/storage/failure_info.rb39
-rw-r--r--lib/gitlab/git/storage/health.rb25
-rw-r--r--lib/gitlab/git/storage/null_circuit_breaker.rb22
-rw-r--r--lib/gitlab/git_access.rb23
-rw-r--r--lib/gitlab/git_access_wiki.rb6
-rw-r--r--lib/gitlab/gitaly_client.rb8
-rw-r--r--lib/gitlab/gitaly_client/commit_service.rb20
-rw-r--r--lib/gitlab/gitaly_client/operation_service.rb58
-rw-r--r--lib/gitlab/gitaly_client/repository_service.rb21
-rw-r--r--lib/gitlab/gitaly_client/wiki_service.rb10
-rw-r--r--lib/gitlab/identifier.rb5
-rw-r--r--lib/gitlab/kubernetes/helm.rb2
-rw-r--r--lib/gitlab/metrics/method_call.rb17
-rw-r--r--lib/gitlab/metrics/samplers/influx_sampler.rb24
-rw-r--r--lib/gitlab/metrics/samplers/ruby_sampler.rb33
-rw-r--r--lib/gitlab/project_search_results.rb7
-rw-r--r--lib/gitlab/prometheus/queries/query_additional_metrics.rb2
-rw-r--r--lib/gitlab/reference_extractor.rb2
-rw-r--r--lib/gitlab/seeder.rb3
-rw-r--r--lib/gitlab/shell.rb23
-rw-r--r--lib/gitlab/sidekiq_config.rb61
-rw-r--r--lib/gitlab/sidekiq_versioning.rb25
-rw-r--r--lib/gitlab/sidekiq_versioning/manager.rb12
-rw-r--r--lib/gitlab/storage_check.rb11
-rw-r--r--lib/gitlab/storage_check/cli.rb69
-rw-r--r--lib/gitlab/storage_check/gitlab_caller.rb39
-rw-r--r--lib/gitlab/storage_check/option_parser.rb39
-rw-r--r--lib/gitlab/storage_check/response.rb77
-rw-r--r--lib/gitlab/tcp_checker.rb45
-rw-r--r--lib/gitlab/utils.rb17
-rw-r--r--lib/gitlab/utils/strong_memoize.rb20
-rw-r--r--lib/gitlab/view/presenter/factory.rb2
-rw-r--r--lib/gitlab/workhorse.rb2
-rw-r--r--lib/google_api/cloud_platform/client.rb12
-rw-r--r--lib/tasks/gitlab/tcp_check.rake20
105 files changed, 2322 insertions, 635 deletions
diff --git a/lib/after_commit_queue.rb b/lib/after_commit_queue.rb
index 4750a2c373a..db63c5038ae 100644
--- a/lib/after_commit_queue.rb
+++ b/lib/after_commit_queue.rb
@@ -6,12 +6,34 @@ module AfterCommitQueue
after_rollback :_clear_after_commit_queue
end
- def run_after_commit(method = nil, &block)
- _after_commit_queue << proc { self.send(method) } if method # rubocop:disable GitlabSecurity/PublicSend
+ def run_after_commit(&block)
_after_commit_queue << block if block
+
+ true
+ end
+
+ def run_after_commit_or_now(&block)
+ if AfterCommitQueue.inside_transaction?
+ run_after_commit(&block)
+ else
+ instance_eval(&block)
+ end
+
true
end
+ def self.open_transactions_baseline
+ if ::Rails.env.test?
+ return DatabaseCleaner.connections.count { |conn| conn.strategy.is_a?(DatabaseCleaner::ActiveRecord::Transaction) }
+ end
+
+ 0
+ end
+
+ def self.inside_transaction?
+ ActiveRecord::Base.connection.open_transactions > open_transactions_baseline
+ end
+
protected
def _run_after_commit_queue
diff --git a/lib/api/circuit_breakers.rb b/lib/api/circuit_breakers.rb
index 118883f5ea5..598c76f6168 100644
--- a/lib/api/circuit_breakers.rb
+++ b/lib/api/circuit_breakers.rb
@@ -41,7 +41,7 @@ module API
detail 'This feature was introduced in GitLab 9.5'
end
delete do
- Gitlab::Git::Storage::CircuitBreaker.reset_all!
+ Gitlab::Git::Storage::FailureInfo.reset_all!
end
end
end
diff --git a/lib/api/entities.rb b/lib/api/entities.rb
index ce332fe85d2..928706dfda7 100644
--- a/lib/api/entities.rb
+++ b/lib/api/entities.rb
@@ -16,10 +16,13 @@ module API
class UserBasic < UserSafe
expose :state
+
expose :avatar_url do |user, options|
user.avatar_url(only_path: false)
end
+ expose :avatar_path, if: ->(user, options) { options.fetch(:only_path, false) && user.avatar_path }
+
expose :web_url do |user, options|
Gitlab::Routing.url_helpers.user_url(user)
end
@@ -88,13 +91,29 @@ module API
end
class BasicProjectDetails < ProjectIdentity
- expose :default_branch, :tag_list
+ include ::API::ProjectsRelationBuilder
+
+ expose :default_branch
+ # Avoids an N+1 query: https://github.com/mbleigh/acts-as-taggable-on/issues/91#issuecomment-168273770
+ expose :tag_list do |project|
+ # project.tags.order(:name).pluck(:name) is the most suitable option
+ # to avoid loading all the ActiveRecord objects but, if we use it here
+ # it override the preloaded associations and makes a query
+ # (fixed in https://github.com/rails/rails/pull/25976).
+ project.tags.map(&:name).sort
+ end
expose :ssh_url_to_repo, :http_url_to_repo, :web_url
expose :avatar_url do |project, options|
project.avatar_url(only_path: false)
end
expose :star_count, :forks_count
expose :last_activity_at
+
+ def self.preload_relation(projects_relation, options = {})
+ projects_relation.preload(:project_feature, :route)
+ .preload(namespace: [:route, :owner],
+ tags: :taggings)
+ end
end
class Project < BasicProjectDetails
@@ -146,7 +165,7 @@ module API
expose :shared_runners_enabled
expose :lfs_enabled?, as: :lfs_enabled
expose :creator_id
- expose :namespace, using: 'API::Entities::Namespace'
+ expose :namespace, using: 'API::Entities::NamespaceBasic'
expose :forked_from_project, using: Entities::BasicProjectDetails, if: lambda { |project, options| project.forked? }
expose :import_status
expose :import_error, if: lambda { |_project, options| options[:user_can_admin_project] }
@@ -156,7 +175,7 @@ module API
expose :public_builds, as: :public_jobs
expose :ci_config_path
expose :shared_with_groups do |project, options|
- SharedGroup.represent(project.project_group_links.all, options)
+ SharedGroup.represent(project.project_group_links, options)
end
expose :only_allow_merge_if_pipeline_succeeds
expose :request_access_enabled
@@ -164,6 +183,18 @@ module API
expose :printing_merge_request_link_enabled
expose :statistics, using: 'API::Entities::ProjectStatistics', if: :statistics
+
+ def self.preload_relation(projects_relation, options = {})
+ super(projects_relation).preload(:group)
+ .preload(project_group_links: :group,
+ fork_network: :root_project,
+ forked_project_link: :forked_from_project,
+ forked_from_project: [:route, :forks, namespace: :route, tags: :taggings])
+ end
+
+ def self.forks_counting_projects(projects_relation)
+ projects_relation + projects_relation.map(&:forked_from_project).compact
+ end
end
class ProjectStatistics < Grape::Entity
@@ -217,8 +248,21 @@ module API
end
class GroupDetail < Group
- expose :projects, using: Entities::Project
- expose :shared_projects, using: Entities::Project
+ expose :projects, using: Entities::Project do |group, options|
+ GroupProjectsFinder.new(
+ group: group,
+ current_user: options[:current_user],
+ options: { only_owned: true }
+ ).execute
+ end
+
+ expose :shared_projects, using: Entities::Project do |group, options|
+ GroupProjectsFinder.new(
+ group: group,
+ current_user: options[:current_user],
+ options: { only_shared: true }
+ ).execute
+ end
end
class Commit < Grape::Entity
@@ -618,9 +662,11 @@ module API
expose :created_at
end
- class Namespace < Grape::Entity
+ class NamespaceBasic < Grape::Entity
expose :id, :name, :path, :kind, :full_path, :parent_id
+ end
+ class Namespace < NamespaceBasic
expose :members_count_with_descendants, if: -> (namespace, opts) { expose_members_count_with_descendants?(namespace, opts) } do |namespace, _|
namespace.users_with_descendants.count
end
@@ -680,7 +726,7 @@ module API
if options.key?(:project_members)
(options[:project_members] || []).find { |member| member.source_id == project.id }
else
- project.project_members.find_by(user_id: options[:current_user].id)
+ project.project_member(options[:current_user])
end
end
@@ -689,11 +735,25 @@ module API
if options.key?(:group_members)
(options[:group_members] || []).find { |member| member.source_id == project.namespace_id }
else
- project.group.group_members.find_by(user_id: options[:current_user].id)
+ project.group.group_member(options[:current_user])
end
end
end
end
+
+ def self.preload_relation(projects_relation, options = {})
+ relation = super(projects_relation, options)
+
+ unless options.key?(:group_members)
+ relation = relation.preload(group: [group_members: [:source, user: [notification_settings: :source]]])
+ end
+
+ unless options.key?(:project_members)
+ relation = relation.preload(project_members: [:source, user: [notification_settings: :source]])
+ end
+
+ relation
+ end
end
class LabelBasic < Grape::Entity
@@ -1006,13 +1066,9 @@ module API
expose :type, :url, :username, :password
end
- class ArtifactFile < Grape::Entity
- expose :filename, :size
- end
-
class Dependency < Grape::Entity
expose :id, :name, :token
- expose :artifacts_file, using: ArtifactFile, if: ->(job, _) { job.artifacts? }
+ expose :artifacts_file, using: JobArtifactFile, if: ->(job, _) { job.artifacts? }
end
class Response < Grape::Entity
diff --git a/lib/api/groups.rb b/lib/api/groups.rb
index bcf2e6dae1d..b81f07a1770 100644
--- a/lib/api/groups.rb
+++ b/lib/api/groups.rb
@@ -52,6 +52,13 @@ module API
groups
end
+ def find_group_projects(params)
+ group = find_group!(params[:id])
+ projects = GroupProjectsFinder.new(group: group, current_user: current_user, params: project_finder_params).execute
+ projects = reorder_projects(projects)
+ paginate(projects)
+ end
+
def present_groups(params, groups)
options = {
with: Entities::Group,
@@ -170,11 +177,10 @@ module API
use :pagination
end
get ":id/projects" do
- group = find_group!(params[:id])
- projects = GroupProjectsFinder.new(group: group, current_user: current_user, params: project_finder_params).execute
- projects = reorder_projects(projects)
+ projects = find_group_projects(params)
entity = params[:simple] ? Entities::BasicProjectDetails : Entities::Project
- present paginate(projects), with: entity, current_user: current_user
+
+ present entity.prepare_relation(projects), with: entity, current_user: current_user
end
desc 'Get a list of subgroups in this group.' do
diff --git a/lib/api/helpers/pagination.rb b/lib/api/helpers/pagination.rb
index 95108292aac..bb70370ba77 100644
--- a/lib/api/helpers/pagination.rb
+++ b/lib/api/helpers/pagination.rb
@@ -2,6 +2,8 @@ module API
module Helpers
module Pagination
def paginate(relation)
+ relation = add_default_order(relation)
+
relation.page(params[:page]).per(params[:per_page]).tap do |data|
add_pagination_headers(data)
end
@@ -45,6 +47,14 @@ module API
# Ensure there is in total at least 1 page
[paginated_data.total_pages, 1].max
end
+
+ def add_default_order(relation)
+ if relation.is_a?(ActiveRecord::Relation) && relation.order_values.empty?
+ relation = relation.order(:id)
+ end
+
+ relation
+ end
end
end
end
diff --git a/lib/api/internal.rb b/lib/api/internal.rb
index 451121a4cea..ccaaeca10d4 100644
--- a/lib/api/internal.rb
+++ b/lib/api/internal.rb
@@ -4,6 +4,7 @@ module API
before { authenticate_by_gitlab_shell_token! }
helpers ::API::Helpers::InternalHelpers
+ helpers ::Gitlab::Identifier
namespace 'internal' do
# Check if git command is allowed to project
@@ -176,17 +177,25 @@ module API
post '/post_receive' do
status 200
-
PostReceive.perform_async(params[:gl_repository], params[:identifier],
params[:changes])
broadcast_message = BroadcastMessage.current&.last&.message
reference_counter_decreased = Gitlab::ReferenceCounter.new(params[:gl_repository]).decrease
- {
+ output = {
merge_request_urls: merge_request_urls,
broadcast_message: broadcast_message,
reference_counter_decreased: reference_counter_decreased
}
+
+ project = Gitlab::GlRepository.parse(params[:gl_repository]).first
+ user = identify(params[:identifier])
+ redirect_message = Gitlab::Checks::ProjectMoved.fetch_redirect_message(user.id, project.id)
+ if redirect_message
+ output[:redirected_message] = redirect_message
+ end
+
+ output
end
end
end
diff --git a/lib/api/issues.rb b/lib/api/issues.rb
index e60e00d7956..5f943ba27d1 100644
--- a/lib/api/issues.rb
+++ b/lib/api/issues.rb
@@ -161,6 +161,8 @@ module API
use :issue_params
end
post ':id/issues' do
+ authorize! :create_issue, user_project
+
# Setting created_at time only allowed for admins and project owners
unless current_user.admin? || user_project.owner == current_user
params.delete(:created_at)
diff --git a/lib/api/projects.rb b/lib/api/projects.rb
index 4cd7e714aa2..fa222bf2b1c 100644
--- a/lib/api/projects.rb
+++ b/lib/api/projects.rb
@@ -79,11 +79,11 @@ module API
projects = projects.with_statistics if params[:statistics]
projects = projects.with_issues_enabled if params[:with_issues_enabled]
projects = projects.with_merge_requests_enabled if params[:with_merge_requests_enabled]
+ projects = paginate(projects)
if current_user
- projects = projects.includes(:route, :taggings, namespace: :route)
- project_members = current_user.project_members
- group_members = current_user.group_members
+ project_members = current_user.project_members.preload(:source, user: [notification_settings: :source])
+ group_members = current_user.group_members.preload(:source, user: [notification_settings: :source])
end
options = options.reverse_merge(
@@ -95,7 +95,7 @@ module API
)
options[:with] = Entities::BasicProjectDetails if params[:simple]
- present paginate(projects), options
+ present options[:with].prepare_relation(projects, options), options
end
end
@@ -367,15 +367,16 @@ module API
post ":id/fork/:forked_from_id" do
authenticated_as_admin!
- forked_from_project = find_project!(params[:forked_from_id])
- not_found!("Source Project") unless forked_from_project
+ fork_from_project = find_project!(params[:forked_from_id])
- if user_project.forked_from_project.nil?
- user_project.create_forked_project_link(forked_to_project_id: user_project.id, forked_from_project_id: forked_from_project.id)
+ not_found!("Source Project") unless fork_from_project
- ::Projects::ForksCountService.new(forked_from_project).refresh_cache
+ result = ::Projects::ForkService.new(fork_from_project, current_user).execute(user_project)
+
+ if result
+ present user_project.reload, with: Entities::Project
else
- render_api_error!("Project already forked", 409)
+ render_api_error!("Project already forked", 409) if user_project.forked?
end
end
@@ -383,11 +384,11 @@ module API
delete ":id/fork" do
authorize! :remove_fork_project, user_project
- if user_project.forked?
- destroy_conditionally!(user_project.forked_project_link)
- else
- not_modified!
+ result = destroy_conditionally!(user_project) do
+ ::Projects::UnlinkForkService.new(user_project, current_user).execute
end
+
+ result ? status(204) : not_modified!
end
desc 'Share the project with a group' do
diff --git a/lib/api/projects_relation_builder.rb b/lib/api/projects_relation_builder.rb
new file mode 100644
index 00000000000..6482fd94ab8
--- /dev/null
+++ b/lib/api/projects_relation_builder.rb
@@ -0,0 +1,34 @@
+module API
+ module ProjectsRelationBuilder
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ def prepare_relation(projects_relation, options = {})
+ projects_relation = preload_relation(projects_relation, options)
+ execute_batch_counting(projects_relation)
+ projects_relation
+ end
+
+ def preload_relation(projects_relation, options = {})
+ projects_relation
+ end
+
+ def forks_counting_projects(projects_relation)
+ projects_relation
+ end
+
+ def batch_forks_counting(projects_relation)
+ ::Projects::BatchForksCountService.new(forks_counting_projects(projects_relation)).refresh_cache
+ end
+
+ def batch_open_issues_counting(projects_relation)
+ ::Projects::BatchOpenIssuesCountService.new(projects_relation).refresh_cache
+ end
+
+ def execute_batch_counting(projects_relation)
+ batch_forks_counting(projects_relation)
+ batch_open_issues_counting(projects_relation)
+ end
+ end
+ end
+end
diff --git a/lib/api/protected_branches.rb b/lib/api/protected_branches.rb
index b5021e8a712..614822509f0 100644
--- a/lib/api/protected_branches.rb
+++ b/lib/api/protected_branches.rb
@@ -39,10 +39,10 @@ module API
end
params do
requires :name, type: String, desc: 'The name of the protected branch'
- optional :push_access_level, type: Integer, default: Gitlab::Access::MASTER,
+ optional :push_access_level, type: Integer,
values: ProtectedRefAccess::ALLOWED_ACCESS_LEVELS,
desc: 'Access levels allowed to push (defaults: `40`, master access level)'
- optional :merge_access_level, type: Integer, default: Gitlab::Access::MASTER,
+ optional :merge_access_level, type: Integer,
values: ProtectedRefAccess::ALLOWED_ACCESS_LEVELS,
desc: 'Access levels allowed to merge (defaults: `40`, master access level)'
end
@@ -52,15 +52,13 @@ module API
conflict!("Protected branch '#{params[:name]}' already exists")
end
- protected_branch_params = {
- name: params[:name],
- push_access_levels_attributes: [{ access_level: params[:push_access_level] }],
- merge_access_levels_attributes: [{ access_level: params[:merge_access_level] }]
- }
+ # Replace with `declared(params)` after updating to grape v1.0.2
+ # See https://github.com/ruby-grape/grape/pull/1710
+ # and https://gitlab.com/gitlab-org/gitlab-ce/issues/40843
+ declared_params = params.slice("name", "push_access_level", "merge_access_level", "allowed_to_push", "allowed_to_merge")
- service_args = [user_project, current_user, protected_branch_params]
-
- protected_branch = ::ProtectedBranches::CreateService.new(*service_args).execute
+ api_service = ::ProtectedBranches::ApiService.new(user_project, current_user, declared_params)
+ protected_branch = api_service.create
if protected_branch.persisted?
present protected_branch, with: Entities::ProtectedBranch, project: user_project
diff --git a/lib/api/repositories.rb b/lib/api/repositories.rb
index 7887b886c03..4f36bbd760f 100644
--- a/lib/api/repositories.rb
+++ b/lib/api/repositories.rb
@@ -110,10 +110,12 @@ module API
end
params do
use :pagination
+ optional :order_by, type: String, values: %w[email name commits], default: nil, desc: 'Return contributors ordered by `name` or `email` or `commits`'
+ optional :sort, type: String, values: %w[asc desc], default: nil, desc: 'Sort by asc (ascending) or desc (descending)'
end
get ':id/repository/contributors' do
begin
- contributors = ::Kaminari.paginate_array(user_project.repository.contributors)
+ contributors = ::Kaminari.paginate_array(user_project.repository.contributors(order_by: params[:order_by], sort: params[:sort]))
present paginate(contributors), with: Entities::Contributor
rescue
not_found!
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index a3987c560dd..80feb629d54 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -215,18 +215,20 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
- artifacts_upload_path = ArtifactUploader.artifacts_upload_path
+ artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
- job.artifacts_file = artifacts
- job.artifacts_metadata = metadata
- job.artifacts_expire_in = params['expire_in'] ||
+ expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
+ job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, expire_in: expire_in)
+ job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, expire_in: expire_in) if metadata
+ job.artifacts_expire_in = expire_in
+
if job.save
present job, with: Entities::JobRequest::Response
else
diff --git a/lib/api/tags.rb b/lib/api/tags.rb
index 0d394a7b441..5e0afc6a7e4 100644
--- a/lib/api/tags.rb
+++ b/lib/api/tags.rb
@@ -14,10 +14,15 @@ module API
success Entities::Tag
end
params do
+ optional :sort, type: String, values: %w[asc desc], default: 'desc',
+ desc: 'Return tags sorted in updated by `asc` or `desc` order.'
+ optional :order_by, type: String, values: %w[name updated], default: 'updated',
+ desc: 'Return tags ordered by `name` or `updated` fields.'
use :pagination
end
get ':id/repository/tags' do
- tags = ::Kaminari.paginate_array(user_project.repository.tags.sort_by(&:name).reverse)
+ tags = ::Kaminari.paginate_array(::TagsFinder.new(user_project.repository, sort: "#{params[:order_by]}_#{params[:sort]}").execute)
+
present paginate(tags), with: Entities::Tag, project: user_project
end
diff --git a/lib/api/users.rb b/lib/api/users.rb
index 0cd89b1bcf8..e5de31ad51b 100644
--- a/lib/api/users.rb
+++ b/lib/api/users.rb
@@ -76,6 +76,8 @@ module API
forbidden!("Not authorized to access /api/v4/users") unless authorized
entity = current_user&.admin? ? Entities::UserWithAdmin : Entities::UserBasic
+ users = users.preload(:identities, :u2f_registrations) if entity == Entities::UserWithAdmin
+
present paginate(users), with: entity
end
diff --git a/lib/backup/artifacts.rb b/lib/backup/artifacts.rb
index 1f4bda6f588..7a582a20056 100644
--- a/lib/backup/artifacts.rb
+++ b/lib/backup/artifacts.rb
@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
- super('artifacts', ArtifactUploader.local_artifacts_store)
+ super('artifacts', LegacyArtifactUploader.local_store_path)
end
def create_files_dir
diff --git a/lib/backup/repository.rb b/lib/backup/repository.rb
index b6d273b98c2..2a04c03919d 100644
--- a/lib/backup/repository.rb
+++ b/lib/backup/repository.rb
@@ -193,12 +193,9 @@ module Backup
end
def empty_repo?(project_or_wiki)
- project_or_wiki.repository.expire_exists_cache # protect backups from stale cache
- project_or_wiki.repository.empty_repo?
- rescue => e
- progress.puts "Ignoring repository error and continuing backing up project: #{display_repo_path(project_or_wiki)} - #{e.message}".color(:orange)
-
- false
+ # Protect against stale caches
+ project_or_wiki.repository.expire_emptiness_caches
+ project_or_wiki.repository.empty?
end
def repository_storage_paths_args
diff --git a/lib/banzai/cross_project_reference.rb b/lib/banzai/cross_project_reference.rb
index e2b57adf611..d8fb7705b2a 100644
--- a/lib/banzai/cross_project_reference.rb
+++ b/lib/banzai/cross_project_reference.rb
@@ -11,7 +11,7 @@ module Banzai
# ref - String reference.
#
# Returns a Project, or nil if the reference can't be found
- def project_from_ref(ref)
+ def parent_from_ref(ref)
return context[:project] unless ref
Project.find_by_full_path(ref)
diff --git a/lib/banzai/filter/abstract_reference_filter.rb b/lib/banzai/filter/abstract_reference_filter.rb
index 8975395aff1..e7e6a90b5fd 100644
--- a/lib/banzai/filter/abstract_reference_filter.rb
+++ b/lib/banzai/filter/abstract_reference_filter.rb
@@ -82,9 +82,9 @@ module Banzai
end
end
- def project_from_ref_cached(ref)
- cached_call(:banzai_project_refs, ref) do
- project_from_ref(ref)
+ def from_ref_cached(ref)
+ cached_call("banzai_#{parent_type}_refs".to_sym, ref) do
+ parent_from_ref(ref)
end
end
@@ -153,15 +153,20 @@ module Banzai
# have `gfm` and `gfm-OBJECT_NAME` class names attached for styling.
def object_link_filter(text, pattern, link_content: nil, link_reference: false)
references_in(text, pattern) do |match, id, project_ref, namespace_ref, matches|
- project_path = full_project_path(namespace_ref, project_ref)
- project = project_from_ref_cached(project_path)
+ parent_path = if parent_type == :group
+ full_group_path(namespace_ref)
+ else
+ full_project_path(namespace_ref, project_ref)
+ end
- if project
+ parent = from_ref_cached(parent_path)
+
+ if parent
object =
if link_reference
- find_object_from_link_cached(project, id)
+ find_object_from_link_cached(parent, id)
else
- find_object_cached(project, id)
+ find_object_cached(parent, id)
end
end
@@ -169,13 +174,13 @@ module Banzai
title = object_link_title(object)
klass = reference_class(object_sym)
- data = data_attributes_for(link_content || match, project, object, link: !!link_content)
+ data = data_attributes_for(link_content || match, parent, object, link: !!link_content)
url =
if matches.names.include?("url") && matches[:url]
matches[:url]
else
- url_for_object_cached(object, project)
+ url_for_object_cached(object, parent)
end
content = link_content || object_link_text(object, matches)
@@ -224,17 +229,24 @@ module Banzai
# Returns a Hash containing all object references (e.g. issue IDs) per the
# project they belong to.
- def references_per_project
- @references_per_project ||= begin
+ def references_per_parent
+ @references_per ||= {}
+
+ @references_per[parent_type] ||= begin
refs = Hash.new { |hash, key| hash[key] = Set.new }
regex = Regexp.union(object_class.reference_pattern, object_class.link_reference_pattern)
nodes.each do |node|
node.to_html.scan(regex) do
- project_path = full_project_path($~[:namespace], $~[:project])
+ path = if parent_type == :project
+ full_project_path($~[:namespace], $~[:project])
+ else
+ full_group_path($~[:group])
+ end
+
symbol = $~[object_sym]
- refs[project_path] << symbol if object_class.reference_valid?(symbol)
+ refs[path] << symbol if object_class.reference_valid?(symbol)
end
end
@@ -244,35 +256,41 @@ module Banzai
# Returns a Hash containing referenced projects grouped per their full
# path.
- def projects_per_reference
- @projects_per_reference ||= begin
+ def parent_per_reference
+ @per_reference ||= {}
+
+ @per_reference[parent_type] ||= begin
refs = Set.new
- references_per_project.each do |project_ref, _|
- refs << project_ref
+ references_per_parent.each do |ref, _|
+ refs << ref
end
- find_projects_for_paths(refs.to_a).index_by(&:full_path)
+ find_for_paths(refs.to_a).index_by(&:full_path)
end
end
- def projects_relation_for_paths(paths)
- Project.where_full_path_in(paths).includes(:namespace)
+ def relation_for_paths(paths)
+ klass = parent_type.to_s.camelize.constantize
+ result = klass.where_full_path_in(paths)
+ return result if parent_type == :group
+
+ result.includes(:namespace) if parent_type == :project
end
# Returns projects for the given paths.
- def find_projects_for_paths(paths)
+ def find_for_paths(paths)
if RequestStore.active?
- cache = project_refs_cache
+ cache = refs_cache
to_query = paths - cache.keys
unless to_query.empty?
- projects = projects_relation_for_paths(to_query)
+ records = relation_for_paths(to_query)
found = []
- projects.each do |project|
- ref = project.full_path
- get_or_set_cache(cache, ref) { project }
+ records.each do |record|
+ ref = record.full_path
+ get_or_set_cache(cache, ref) { record }
found << ref
end
@@ -284,33 +302,37 @@ module Banzai
cache.slice(*paths).values.compact
else
- projects_relation_for_paths(paths)
+ relation_for_paths(paths)
end
end
- def current_project_path
- return unless project
-
- @current_project_path ||= project.full_path
+ def current_parent_path
+ @current_parent_path ||= parent&.full_path
end
def current_project_namespace_path
- return unless project
-
- @current_project_namespace_path ||= project.namespace.full_path
+ @current_project_namespace_path ||= project&.namespace&.full_path
end
private
def full_project_path(namespace, project_ref)
- return current_project_path unless project_ref
+ return current_parent_path unless project_ref
namespace_ref = namespace || current_project_namespace_path
"#{namespace_ref}/#{project_ref}"
end
- def project_refs_cache
- RequestStore[:banzai_project_refs] ||= {}
+ def refs_cache
+ RequestStore["banzai_#{parent_type}_refs".to_sym] ||= {}
+ end
+
+ def parent_type
+ :project
+ end
+
+ def parent
+ parent_type == :project ? project : group
end
end
end
diff --git a/lib/banzai/filter/commit_reference_filter.rb b/lib/banzai/filter/commit_reference_filter.rb
index 714e0319025..eedb95197aa 100644
--- a/lib/banzai/filter/commit_reference_filter.rb
+++ b/lib/banzai/filter/commit_reference_filter.rb
@@ -22,10 +22,30 @@ module Banzai
end
end
+ def referenced_merge_request_commit_shas
+ return [] unless noteable.is_a?(MergeRequest)
+
+ @referenced_merge_request_commit_shas ||= begin
+ referenced_shas = references_per_parent.values.reduce(:|).to_a
+ noteable.all_commit_shas.select do |sha|
+ referenced_shas.any? { |ref| Gitlab::Git.shas_eql?(sha, ref) }
+ end
+ end
+ end
+
def url_for_object(commit, project)
h = Gitlab::Routing.url_helpers
- h.project_commit_url(project, commit,
- only_path: context[:only_path])
+
+ if referenced_merge_request_commit_shas.include?(commit.id)
+ h.diffs_project_merge_request_url(project,
+ noteable,
+ commit_id: commit.id,
+ only_path: only_path?)
+ else
+ h.project_commit_url(project,
+ commit,
+ only_path: only_path?)
+ end
end
def object_link_text_extras(object, matches)
@@ -38,6 +58,16 @@ module Banzai
extras
end
+
+ private
+
+ def noteable
+ context[:noteable]
+ end
+
+ def only_path?
+ context[:only_path]
+ end
end
end
end
diff --git a/lib/banzai/filter/epic_reference_filter.rb b/lib/banzai/filter/epic_reference_filter.rb
new file mode 100644
index 00000000000..265924abe24
--- /dev/null
+++ b/lib/banzai/filter/epic_reference_filter.rb
@@ -0,0 +1,12 @@
+module Banzai
+ module Filter
+ # The actual filter is implemented in the EE mixin
+ class EpicReferenceFilter < IssuableReferenceFilter
+ self.reference_type = :epic
+
+ def self.object_class
+ Epic
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/issuable_reference_filter.rb b/lib/banzai/filter/issuable_reference_filter.rb
new file mode 100644
index 00000000000..7addf09be73
--- /dev/null
+++ b/lib/banzai/filter/issuable_reference_filter.rb
@@ -0,0 +1,31 @@
+module Banzai
+ module Filter
+ class IssuableReferenceFilter < AbstractReferenceFilter
+ def records_per_parent
+ @records_per_project ||= {}
+
+ @records_per_project[object_class.to_s.underscore] ||= begin
+ hash = Hash.new { |h, k| h[k] = {} }
+
+ parent_per_reference.each do |path, parent|
+ record_ids = references_per_parent[path]
+
+ parent_records(parent, record_ids).each do |record|
+ hash[parent][record.iid.to_i] = record
+ end
+ end
+
+ hash
+ end
+ end
+
+ def find_object(parent, iid)
+ records_per_parent[parent][iid]
+ end
+
+ def parent_from_ref(ref)
+ parent_per_reference[ref || current_parent_path]
+ end
+ end
+ end
+end
diff --git a/lib/banzai/filter/issue_reference_filter.rb b/lib/banzai/filter/issue_reference_filter.rb
index ce1ab977d3b..6877cae8c55 100644
--- a/lib/banzai/filter/issue_reference_filter.rb
+++ b/lib/banzai/filter/issue_reference_filter.rb
@@ -8,46 +8,24 @@ module Banzai
# When external issues tracker like Jira is activated we should not
# use issue reference pattern, but we should still be able
# to reference issues from other GitLab projects.
- class IssueReferenceFilter < AbstractReferenceFilter
+ class IssueReferenceFilter < IssuableReferenceFilter
self.reference_type = :issue
def self.object_class
Issue
end
- def find_object(project, iid)
- issues_per_project[project][iid]
- end
-
def url_for_object(issue, project)
IssuesHelper.url_for_issue(issue.iid, project, only_path: context[:only_path], internal: true)
end
- def project_from_ref(ref)
- projects_per_reference[ref || current_project_path]
- end
-
- # Returns a Hash containing the issues per Project instance.
- def issues_per_project
- @issues_per_project ||= begin
- hash = Hash.new { |h, k| h[k] = {} }
-
- projects_per_reference.each do |path, project|
- issue_ids = references_per_project[path]
- issues = project.issues.where(iid: issue_ids.to_a)
-
- issues.each do |issue|
- hash[project][issue.iid.to_i] = issue
- end
- end
-
- hash
- end
- end
-
def projects_relation_for_paths(paths)
super(paths).includes(:gitlab_issue_tracker_service)
end
+
+ def parent_records(parent, ids)
+ parent.issues.where(iid: ids.to_a)
+ end
end
end
end
diff --git a/lib/banzai/filter/label_reference_filter.rb b/lib/banzai/filter/label_reference_filter.rb
index 5364984c9d3..d5360ad8f68 100644
--- a/lib/banzai/filter/label_reference_filter.rb
+++ b/lib/banzai/filter/label_reference_filter.rb
@@ -33,7 +33,7 @@ module Banzai
end
def find_label(project_ref, label_id, label_name)
- project = project_from_ref(project_ref)
+ project = parent_from_ref(project_ref)
return unless project
label_params = label_params(label_id, label_name)
@@ -66,7 +66,7 @@ module Banzai
def object_link_text(object, matches)
project_path = full_project_path(matches[:namespace], matches[:project])
- project_from_ref = project_from_ref_cached(project_path)
+ project_from_ref = from_ref_cached(project_path)
reference = project_from_ref.to_human_reference(project)
label_suffix = " <i>in #{reference}</i>" if reference.present?
diff --git a/lib/banzai/filter/merge_request_reference_filter.rb b/lib/banzai/filter/merge_request_reference_filter.rb
index 0eab865ac04..b3cfa97d0e0 100644
--- a/lib/banzai/filter/merge_request_reference_filter.rb
+++ b/lib/banzai/filter/merge_request_reference_filter.rb
@@ -4,48 +4,19 @@ module Banzai
# to merge requests that do not exist are ignored.
#
# This filter supports cross-project references.
- class MergeRequestReferenceFilter < AbstractReferenceFilter
+ class MergeRequestReferenceFilter < IssuableReferenceFilter
self.reference_type = :merge_request
def self.object_class
MergeRequest
end
- def find_object(project, iid)
- merge_requests_per_project[project][iid]
- end
-
def url_for_object(mr, project)
h = Gitlab::Routing.url_helpers
h.project_merge_request_url(project, mr,
only_path: context[:only_path])
end
- def project_from_ref(ref)
- projects_per_reference[ref || current_project_path]
- end
-
- # Returns a Hash containing the merge_requests per Project instance.
- def merge_requests_per_project
- @merge_requests_per_project ||= begin
- hash = Hash.new { |h, k| h[k] = {} }
-
- projects_per_reference.each do |path, project|
- merge_request_ids = references_per_project[path]
-
- merge_requests = project.merge_requests
- .where(iid: merge_request_ids.to_a)
- .includes(target_project: :namespace)
-
- merge_requests.each do |merge_request|
- hash[project][merge_request.iid.to_i] = merge_request
- end
- end
-
- hash
- end
- end
-
def object_link_text_extras(object, matches)
extras = super
@@ -61,6 +32,12 @@ module Banzai
extras
end
+
+ def parent_records(parent, ids)
+ parent.merge_requests
+ .where(iid: ids.to_a)
+ .includes(target_project: :namespace)
+ end
end
end
end
diff --git a/lib/banzai/filter/milestone_reference_filter.rb b/lib/banzai/filter/milestone_reference_filter.rb
index bb5da310e09..2a6b0964ac5 100644
--- a/lib/banzai/filter/milestone_reference_filter.rb
+++ b/lib/banzai/filter/milestone_reference_filter.rb
@@ -38,7 +38,7 @@ module Banzai
def find_milestone(project_ref, namespace_ref, milestone_id, milestone_name)
project_path = full_project_path(namespace_ref, project_ref)
- project = project_from_ref(project_path)
+ project = parent_from_ref(project_path)
return unless project
diff --git a/lib/banzai/filter/table_of_contents_filter.rb b/lib/banzai/filter/table_of_contents_filter.rb
index 47151626208..97244159985 100644
--- a/lib/banzai/filter/table_of_contents_filter.rb
+++ b/lib/banzai/filter/table_of_contents_filter.rb
@@ -32,6 +32,7 @@ module Banzai
.gsub(PUNCTUATION_REGEXP, '') # remove punctuation
.tr(' ', '-') # replace spaces with dash
.squeeze('-') # replace multiple dashes with one
+ .gsub(/\A(\d+)\z/, 'anchor-\1') # digits-only hrefs conflict with issue refs
uniq = headers[id] > 0 ? "-#{headers[id]}" : ''
headers[id] += 1
diff --git a/lib/banzai/filter/upload_link_filter.rb b/lib/banzai/filter/upload_link_filter.rb
index 09844931be5..d64f9ac4eb6 100644
--- a/lib/banzai/filter/upload_link_filter.rb
+++ b/lib/banzai/filter/upload_link_filter.rb
@@ -8,7 +8,7 @@ module Banzai
#
class UploadLinkFilter < HTML::Pipeline::Filter
def call
- return doc unless project
+ return doc unless project || group
doc.xpath('descendant-or-self::a[starts-with(@href, "/uploads/")]').each do |el|
process_link_attr el.attribute('href')
@@ -28,13 +28,27 @@ module Banzai
end
def build_url(uri)
- File.join(Gitlab.config.gitlab.url, project.full_path, uri)
+ base_path = Gitlab.config.gitlab.url
+
+ if group
+ urls = Gitlab::Routing.url_helpers
+ # we need to get last 2 parts of the uri which are secret and filename
+ uri_parts = uri.split(File::SEPARATOR)
+ file_path = urls.show_group_uploads_path(group, uri_parts[-2], uri_parts[-1])
+ File.join(base_path, file_path)
+ else
+ File.join(base_path, project.full_path, uri)
+ end
end
def project
context[:project]
end
+ def group
+ context[:group]
+ end
+
# Ensure that a :project key exists in context
#
# Note that while the key might exist, its value could be nil!
diff --git a/lib/banzai/issuable_extractor.rb b/lib/banzai/issuable_extractor.rb
index cbabf9156de..49603d0b363 100644
--- a/lib/banzai/issuable_extractor.rb
+++ b/lib/banzai/issuable_extractor.rb
@@ -28,8 +28,8 @@ module Banzai
issue_parser = Banzai::ReferenceParser::IssueParser.new(project, user)
merge_request_parser = Banzai::ReferenceParser::MergeRequestParser.new(project, user)
- issuables_for_nodes = issue_parser.issues_for_nodes(nodes).merge(
- merge_request_parser.merge_requests_for_nodes(nodes)
+ issuables_for_nodes = issue_parser.records_for_nodes(nodes).merge(
+ merge_request_parser.records_for_nodes(nodes)
)
# The project for the issue/MR might be pending for deletion!
diff --git a/lib/banzai/object_renderer.rb b/lib/banzai/object_renderer.rb
index ecb3affbba5..2691be81623 100644
--- a/lib/banzai/object_renderer.rb
+++ b/lib/banzai/object_renderer.rb
@@ -17,11 +17,11 @@ module Banzai
# project - A Project to use for redacting Markdown.
# user - The user viewing the Markdown/HTML documents, if any.
- # context - A Hash containing extra attributes to use during redaction
+ # redaction_context - A Hash containing extra attributes to use during redaction
def initialize(project, user = nil, redaction_context = {})
@project = project
@user = user
- @redaction_context = redaction_context
+ @redaction_context = base_context.merge(redaction_context)
end
# Renders and redacts an Array of objects.
@@ -73,19 +73,19 @@ module Banzai
# Returns a Banzai context for the given object and attribute.
def context_for(object, attribute)
- base_context.merge(object.banzai_render_context(attribute))
+ @redaction_context.merge(object.banzai_render_context(attribute))
end
def base_context
- @base_context ||= @redaction_context.merge(
+ {
current_user: user,
project: project,
skip_redaction: true
- )
+ }
end
def save_options
- return {} unless base_context[:xhtml]
+ return {} unless @redaction_context[:xhtml]
{ save_with: Nokogiri::XML::Node::SaveOptions::AS_XHTML }
end
diff --git a/lib/banzai/reference_parser/epic_parser.rb b/lib/banzai/reference_parser/epic_parser.rb
new file mode 100644
index 00000000000..08b8a4c9a0f
--- /dev/null
+++ b/lib/banzai/reference_parser/epic_parser.rb
@@ -0,0 +1,12 @@
+module Banzai
+ module ReferenceParser
+ # The actual parser is implemented in the EE mixin
+ class EpicParser < IssuableParser
+ self.reference_type = :epic
+
+ def records_for_nodes(_nodes)
+ {}
+ end
+ end
+ end
+end
diff --git a/lib/banzai/reference_parser/issuable_parser.rb b/lib/banzai/reference_parser/issuable_parser.rb
new file mode 100644
index 00000000000..3953867eb83
--- /dev/null
+++ b/lib/banzai/reference_parser/issuable_parser.rb
@@ -0,0 +1,25 @@
+module Banzai
+ module ReferenceParser
+ class IssuableParser < BaseParser
+ def nodes_visible_to_user(user, nodes)
+ records = records_for_nodes(nodes)
+
+ nodes.select do |node|
+ issuable = records[node]
+
+ issuable && can_read_reference?(user, issuable)
+ end
+ end
+
+ def referenced_by(nodes)
+ records = records_for_nodes(nodes)
+
+ nodes.map { |node| records[node] }.compact.uniq
+ end
+
+ def can_read_reference?(user, issuable)
+ can?(user, "read_#{issuable.class.to_s.underscore}".to_sym, issuable)
+ end
+ end
+ end
+end
diff --git a/lib/banzai/reference_parser/issue_parser.rb b/lib/banzai/reference_parser/issue_parser.rb
index e0a8ca653cb..38d4e3f3e44 100644
--- a/lib/banzai/reference_parser/issue_parser.rb
+++ b/lib/banzai/reference_parser/issue_parser.rb
@@ -1,10 +1,10 @@
module Banzai
module ReferenceParser
- class IssueParser < BaseParser
+ class IssueParser < IssuableParser
self.reference_type = :issue
def nodes_visible_to_user(user, nodes)
- issues = issues_for_nodes(nodes)
+ issues = records_for_nodes(nodes)
readable_issues = Ability
.issues_readable_by_user(issues.values, user).to_set
@@ -14,13 +14,7 @@ module Banzai
end
end
- def referenced_by(nodes)
- issues = issues_for_nodes(nodes)
-
- nodes.map { |node| issues[node] }.compact.uniq
- end
-
- def issues_for_nodes(nodes)
+ def records_for_nodes(nodes)
@issues_for_nodes ||= grouped_objects_for_nodes(
nodes,
Issue.all.includes(
diff --git a/lib/banzai/reference_parser/merge_request_parser.rb b/lib/banzai/reference_parser/merge_request_parser.rb
index 75cbc7fdac4..a370ff5b5b3 100644
--- a/lib/banzai/reference_parser/merge_request_parser.rb
+++ b/lib/banzai/reference_parser/merge_request_parser.rb
@@ -1,25 +1,9 @@
module Banzai
module ReferenceParser
- class MergeRequestParser < BaseParser
+ class MergeRequestParser < IssuableParser
self.reference_type = :merge_request
- def nodes_visible_to_user(user, nodes)
- merge_requests = merge_requests_for_nodes(nodes)
-
- nodes.select do |node|
- merge_request = merge_requests[node]
-
- merge_request && can?(user, :read_merge_request, merge_request.project)
- end
- end
-
- def referenced_by(nodes)
- merge_requests = merge_requests_for_nodes(nodes)
-
- nodes.map { |node| merge_requests[node] }.compact.uniq
- end
-
- def merge_requests_for_nodes(nodes)
+ def records_for_nodes(nodes)
@merge_requests_for_nodes ||= grouped_objects_for_nodes(
nodes,
MergeRequest.includes(
@@ -40,10 +24,6 @@ module Banzai
self.class.data_attribute
)
end
-
- def can_read_reference?(user, ref_project, node)
- can?(user, :read_merge_request, ref_project)
- end
end
end
end
diff --git a/lib/extracts_path.rb b/lib/extracts_path.rb
index 26f699f4c9d..aa9996c7685 100644
--- a/lib/extracts_path.rb
+++ b/lib/extracts_path.rb
@@ -128,7 +128,6 @@ module ExtractsPath
@hex_path = Digest::SHA1.hexdigest(@path)
@logs_path = logs_file_project_ref_path(@project, @ref, @path)
-
rescue RuntimeError, NoMethodError, InvalidPathError
render_404
end
@@ -138,6 +137,11 @@ module ExtractsPath
@tree ||= @repo.tree(@commit.id, @path) # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
+ def lfs_blob_ids
+ blob_ids = tree.blobs.map(&:id)
+ @lfs_blob_ids = Gitlab::Git::Blob.batch_lfs_pointers(@project.repository, blob_ids).map(&:id)
+ end
+
private
# overriden in subclasses, do not remove
diff --git a/lib/feature.rb b/lib/feature.rb
index ac3bc65c0d5..8e9ba5c530a 100644
--- a/lib/feature.rb
+++ b/lib/feature.rb
@@ -1,5 +1,3 @@
-require 'flipper/adapters/active_record'
-
class Feature
# Classes to override flipper table names
class FlipperFeature < Flipper::Adapters::ActiveRecord::Feature
@@ -62,12 +60,7 @@ class Feature
end
def flipper
- @flipper ||= begin
- adapter = Flipper::Adapters::ActiveRecord.new(
- feature_class: FlipperFeature, gate_class: FlipperGate)
-
- Flipper.new(adapter)
- end
+ @flipper ||= Flipper.instance
end
# This method is called from config/initializers/flipper.rb and can be used
diff --git a/lib/gitlab/background_migration/populate_untracked_uploads.rb b/lib/gitlab/background_migration/populate_untracked_uploads.rb
new file mode 100644
index 00000000000..81e95e5832d
--- /dev/null
+++ b/lib/gitlab/background_migration/populate_untracked_uploads.rb
@@ -0,0 +1,259 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # This class processes a batch of rows in `untracked_files_for_uploads` by
+ # adding each file to the `uploads` table if it does not exist.
+ class PopulateUntrackedUploads # rubocop:disable Metrics/ClassLength
+ # This class is responsible for producing the attributes necessary to
+ # track an uploaded file in the `uploads` table.
+ class UntrackedFile < ActiveRecord::Base # rubocop:disable Metrics/ClassLength, Metrics/LineLength
+ self.table_name = 'untracked_files_for_uploads'
+
+ # Ends with /:random_hex/:filename
+ FILE_UPLOADER_PATH = %r{/\h+/[^/]+\z}
+ FULL_PATH_CAPTURE = %r{\A(.+)#{FILE_UPLOADER_PATH}}
+
+ # These regex patterns are tested against a relative path, relative to
+ # the upload directory.
+ # For convenience, if there exists a capture group in the pattern, then
+ # it indicates the model_id.
+ PATH_PATTERNS = [
+ {
+ pattern: %r{\A-/system/appearance/logo/(\d+)/},
+ uploader: 'AttachmentUploader',
+ model_type: 'Appearance'
+ },
+ {
+ pattern: %r{\A-/system/appearance/header_logo/(\d+)/},
+ uploader: 'AttachmentUploader',
+ model_type: 'Appearance'
+ },
+ {
+ pattern: %r{\A-/system/note/attachment/(\d+)/},
+ uploader: 'AttachmentUploader',
+ model_type: 'Note'
+ },
+ {
+ pattern: %r{\A-/system/user/avatar/(\d+)/},
+ uploader: 'AvatarUploader',
+ model_type: 'User'
+ },
+ {
+ pattern: %r{\A-/system/group/avatar/(\d+)/},
+ uploader: 'AvatarUploader',
+ model_type: 'Namespace'
+ },
+ {
+ pattern: %r{\A-/system/project/avatar/(\d+)/},
+ uploader: 'AvatarUploader',
+ model_type: 'Project'
+ },
+ {
+ pattern: FILE_UPLOADER_PATH,
+ uploader: 'FileUploader',
+ model_type: 'Project'
+ }
+ ].freeze
+
+ def to_h
+ @upload_hash ||= {
+ path: upload_path,
+ uploader: uploader,
+ model_type: model_type,
+ model_id: model_id,
+ size: file_size,
+ checksum: checksum
+ }
+ end
+
+ def upload_path
+ # UntrackedFile#path is absolute, but Upload#path depends on uploader
+ @upload_path ||=
+ if uploader == 'FileUploader'
+ # Path relative to project directory in uploads
+ matchd = path_relative_to_upload_dir.match(FILE_UPLOADER_PATH)
+ matchd[0].sub(%r{\A/}, '') # remove leading slash
+ else
+ path
+ end
+ end
+
+ def uploader
+ matching_pattern_map[:uploader]
+ end
+
+ def model_type
+ matching_pattern_map[:model_type]
+ end
+
+ def model_id
+ return @model_id if defined?(@model_id)
+
+ pattern = matching_pattern_map[:pattern]
+ matchd = path_relative_to_upload_dir.match(pattern)
+
+ # If something is captured (matchd[1] is not nil), it is a model_id
+ # Only the FileUploader pattern will not match an ID
+ @model_id = matchd[1] ? matchd[1].to_i : file_uploader_model_id
+ end
+
+ def file_size
+ File.size(absolute_path)
+ end
+
+ def checksum
+ Digest::SHA256.file(absolute_path).hexdigest
+ end
+
+ private
+
+ def matching_pattern_map
+ @matching_pattern_map ||= PATH_PATTERNS.find do |path_pattern_map|
+ path_relative_to_upload_dir.match(path_pattern_map[:pattern])
+ end
+
+ unless @matching_pattern_map
+ raise "Unknown upload path pattern \"#{path}\""
+ end
+
+ @matching_pattern_map
+ end
+
+ def file_uploader_model_id
+ matchd = path_relative_to_upload_dir.match(FULL_PATH_CAPTURE)
+ not_found_msg = <<~MSG
+ Could not capture project full_path from a FileUploader path:
+ "#{path_relative_to_upload_dir}"
+ MSG
+ raise not_found_msg unless matchd
+
+ full_path = matchd[1]
+ project = Project.find_by_full_path(full_path)
+ return nil unless project
+
+ project.id
+ end
+
+ # Not including a leading slash
+ def path_relative_to_upload_dir
+ upload_dir = Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR # rubocop:disable Metrics/LineLength
+ base = %r{\A#{Regexp.escape(upload_dir)}/}
+ @path_relative_to_upload_dir ||= path.sub(base, '')
+ end
+
+ def absolute_path
+ File.join(CarrierWave.root, path)
+ end
+ end
+
+ # This class is used to query the `uploads` table.
+ class Upload < ActiveRecord::Base
+ self.table_name = 'uploads'
+ end
+
+ def perform(start_id, end_id)
+ return unless migrate?
+
+ files = UntrackedFile.where(id: start_id..end_id)
+ processed_files = insert_uploads_if_needed(files)
+ processed_files.delete_all
+
+ drop_temp_table_if_finished
+ end
+
+ private
+
+ def migrate?
+ UntrackedFile.table_exists? && Upload.table_exists?
+ end
+
+ def insert_uploads_if_needed(files)
+ filtered_files, error_files = filter_error_files(files)
+ filtered_files = filter_existing_uploads(filtered_files)
+ filtered_files = filter_deleted_models(filtered_files)
+ insert(filtered_files)
+
+ processed_files = files.where.not(id: error_files.map(&:id))
+ processed_files
+ end
+
+ def filter_error_files(files)
+ files.partition do |file|
+ begin
+ file.to_h
+ true
+ rescue => e
+ msg = <<~MSG
+ Error parsing path "#{file.path}":
+ #{e.message}
+ #{e.backtrace.join("\n ")}
+ MSG
+ Rails.logger.error(msg)
+ false
+ end
+ end
+ end
+
+ def filter_existing_uploads(files)
+ paths = files.map(&:upload_path)
+ existing_paths = Upload.where(path: paths).pluck(:path).to_set
+
+ files.reject do |file|
+ existing_paths.include?(file.upload_path)
+ end
+ end
+
+ # There are files on disk that are not in the uploads table because their
+ # model was deleted, and we don't delete the files on disk.
+ def filter_deleted_models(files)
+ ids = deleted_model_ids(files)
+
+ files.reject do |file|
+ ids[file.model_type].include?(file.model_id)
+ end
+ end
+
+ def deleted_model_ids(files)
+ ids = {
+ 'Appearance' => [],
+ 'Namespace' => [],
+ 'Note' => [],
+ 'Project' => [],
+ 'User' => []
+ }
+
+ # group model IDs by model type
+ files.each do |file|
+ ids[file.model_type] << file.model_id
+ end
+
+ ids.each do |model_type, model_ids|
+ model_class = Object.const_get(model_type)
+ found_ids = model_class.where(id: model_ids.uniq).pluck(:id)
+ deleted_ids = ids[model_type] - found_ids
+ ids[model_type] = deleted_ids
+ end
+
+ ids
+ end
+
+ def insert(files)
+ rows = files.map do |file|
+ file.to_h.merge(created_at: 'NOW()')
+ end
+
+ Gitlab::Database.bulk_insert('uploads',
+ rows,
+ disable_quote: :created_at)
+ end
+
+ def drop_temp_table_if_finished
+ if UntrackedFile.all.empty?
+ UntrackedFile.connection.drop_table(:untracked_files_for_uploads,
+ if_exists: true)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/background_migration/prepare_untracked_uploads.rb b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
new file mode 100644
index 00000000000..476c46341ae
--- /dev/null
+++ b/lib/gitlab/background_migration/prepare_untracked_uploads.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # This class finds all non-hashed uploaded file paths and saves them to a
+ # `untracked_files_for_uploads` table.
+ class PrepareUntrackedUploads # rubocop:disable Metrics/ClassLength
+ # For bulk_queue_background_migration_jobs_by_range
+ include Database::MigrationHelpers
+
+ FIND_BATCH_SIZE = 500
+ RELATIVE_UPLOAD_DIR = "uploads".freeze
+ ABSOLUTE_UPLOAD_DIR = "#{CarrierWave.root}/#{RELATIVE_UPLOAD_DIR}".freeze
+ FOLLOW_UP_MIGRATION = 'PopulateUntrackedUploads'.freeze
+ START_WITH_CARRIERWAVE_ROOT_REGEX = %r{\A#{CarrierWave.root}/}
+ EXCLUDED_HASHED_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/@hashed/*".freeze
+ EXCLUDED_TMP_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/tmp/*".freeze
+
+ # This class is used to iterate over batches of
+ # `untracked_files_for_uploads` rows.
+ class UntrackedFile < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'untracked_files_for_uploads'
+ end
+
+ def perform
+ ensure_temporary_tracking_table_exists
+
+ # Since Postgres < 9.5 does not have ON CONFLICT DO NOTHING, and since
+ # doing inserts-if-not-exists without ON CONFLICT DO NOTHING would be
+ # slow, start with an empty table for Postgres < 9.5.
+ # That way we can do bulk inserts at ~30x the speed of individual
+ # inserts (~20 minutes worth of inserts at GitLab.com scale instead of
+ # ~10 hours).
+ # In all other cases, installations will get both bulk inserts and the
+ # ability for these jobs to retry without having to clear and reinsert.
+ clear_untracked_file_paths unless can_bulk_insert_and_ignore_duplicates?
+
+ store_untracked_file_paths
+
+ schedule_populate_untracked_uploads_jobs
+ end
+
+ private
+
+ def ensure_temporary_tracking_table_exists
+ table_name = :untracked_files_for_uploads
+ unless UntrackedFile.connection.table_exists?(table_name)
+ UntrackedFile.connection.create_table table_name do |t|
+ t.string :path, limit: 600, null: false
+ t.index :path, unique: true
+ end
+ end
+ end
+
+ def clear_untracked_file_paths
+ UntrackedFile.delete_all
+ end
+
+ def store_untracked_file_paths
+ return unless Dir.exist?(ABSOLUTE_UPLOAD_DIR)
+
+ each_file_batch(ABSOLUTE_UPLOAD_DIR, FIND_BATCH_SIZE) do |file_paths|
+ insert_file_paths(file_paths)
+ end
+ end
+
+ def each_file_batch(search_dir, batch_size, &block)
+ cmd = build_find_command(search_dir)
+
+ Open3.popen2(*cmd) do |stdin, stdout, status_thread|
+ yield_paths_in_batches(stdout, batch_size, &block)
+
+ raise "Find command failed" unless status_thread.value.success?
+ end
+ end
+
+ def yield_paths_in_batches(stdout, batch_size, &block)
+ paths = []
+
+ stdout.each_line("\0") do |line|
+ paths << line.chomp("\0").sub(START_WITH_CARRIERWAVE_ROOT_REGEX, '')
+
+ if paths.size >= batch_size
+ yield(paths)
+ paths = []
+ end
+ end
+
+ yield(paths)
+ end
+
+ def build_find_command(search_dir)
+ cmd = %W[find -L #{search_dir}
+ -type f
+ ! ( -path #{EXCLUDED_HASHED_UPLOADS_PATH} -prune )
+ ! ( -path #{EXCLUDED_TMP_UPLOADS_PATH} -prune )
+ -print0]
+
+ ionice = which_ionice
+ cmd = %W[#{ionice} -c Idle] + cmd if ionice
+
+ log_msg = "PrepareUntrackedUploads find command: \"#{cmd.join(' ')}\""
+ Rails.logger.info log_msg
+
+ cmd
+ end
+
+ def which_ionice
+ Gitlab::Utils.which('ionice')
+ rescue StandardError
+ # In this case, returning false is relatively safe,
+ # even though it isn't very nice
+ false
+ end
+
+ def insert_file_paths(file_paths)
+ sql = insert_sql(file_paths)
+
+ ActiveRecord::Base.connection.execute(sql)
+ end
+
+ def insert_sql(file_paths)
+ if postgresql_pre_9_5?
+ "INSERT INTO #{table_columns_and_values_for_insert(file_paths)};"
+ elsif postgresql?
+ "INSERT INTO #{table_columns_and_values_for_insert(file_paths)}"\
+ " ON CONFLICT DO NOTHING;"
+ else # MySQL
+ "INSERT IGNORE INTO"\
+ " #{table_columns_and_values_for_insert(file_paths)};"
+ end
+ end
+
+ def table_columns_and_values_for_insert(file_paths)
+ values = file_paths.map do |file_path|
+ ActiveRecord::Base.send(:sanitize_sql_array, ['(?)', file_path]) # rubocop:disable GitlabSecurity/PublicSend, Metrics/LineLength
+ end.join(', ')
+
+ "#{UntrackedFile.table_name} (path) VALUES #{values}"
+ end
+
+ def postgresql?
+ @postgresql ||= Gitlab::Database.postgresql?
+ end
+
+ def can_bulk_insert_and_ignore_duplicates?
+ !postgresql_pre_9_5?
+ end
+
+ def postgresql_pre_9_5?
+ @postgresql_pre_9_5 ||= postgresql? &&
+ Gitlab::Database.version.to_f < 9.5
+ end
+
+ def schedule_populate_untracked_uploads_jobs
+ bulk_queue_background_migration_jobs_by_range(
+ UntrackedFile, FOLLOW_UP_MIGRATION)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/bare_repository_import/importer.rb b/lib/gitlab/bare_repository_import/importer.rb
index 196de667805..298409d8b5a 100644
--- a/lib/gitlab/bare_repository_import/importer.rb
+++ b/lib/gitlab/bare_repository_import/importer.rb
@@ -55,6 +55,7 @@ module Gitlab
name: project_name,
path: project_name,
skip_disk_validation: true,
+ import_type: 'gitlab_project',
namespace_id: group&.id).execute
if project.persisted? && mv_repo(project)
diff --git a/lib/gitlab/bare_repository_import/repository.rb b/lib/gitlab/bare_repository_import/repository.rb
index 8574ac6eb30..fa7891c8dcc 100644
--- a/lib/gitlab/bare_repository_import/repository.rb
+++ b/lib/gitlab/bare_repository_import/repository.rb
@@ -7,6 +7,8 @@ module Gitlab
@root_path = root_path
@repo_path = repo_path
+ @root_path << '/' unless root_path.ends_with?('/')
+
# Split path into 'all/the/namespaces' and 'project_name'
@group_path, _, @project_name = repo_relative_path.rpartition('/')
end
diff --git a/lib/gitlab/checks/project_moved.rb b/lib/gitlab/checks/project_moved.rb
new file mode 100644
index 00000000000..3a1c0a3455e
--- /dev/null
+++ b/lib/gitlab/checks/project_moved.rb
@@ -0,0 +1,65 @@
+module Gitlab
+ module Checks
+ class ProjectMoved
+ REDIRECT_NAMESPACE = "redirect_namespace".freeze
+
+ def initialize(project, user, redirected_path, protocol)
+ @project = project
+ @user = user
+ @redirected_path = redirected_path
+ @protocol = protocol
+ end
+
+ def self.fetch_redirect_message(user_id, project_id)
+ redirect_key = redirect_message_key(user_id, project_id)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ message = redis.get(redirect_key)
+ redis.del(redirect_key)
+ message
+ end
+ end
+
+ def add_redirect_message
+ Gitlab::Redis::SharedState.with do |redis|
+ key = self.class.redirect_message_key(user.id, project.id)
+ redis.setex(key, 5.minutes, redirect_message)
+ end
+ end
+
+ def redirect_message(rejected: false)
+ <<~MESSAGE.strip_heredoc
+ Project '#{redirected_path}' was moved to '#{project.full_path}'.
+
+ Please update your Git remote:
+
+ #{remote_url_message(rejected)}
+ MESSAGE
+ end
+
+ def permanent_redirect?
+ RedirectRoute.permanent.exists?(path: redirected_path)
+ end
+
+ private
+
+ attr_reader :project, :redirected_path, :protocol, :user
+
+ def self.redirect_message_key(user_id, project_id)
+ "#{REDIRECT_NAMESPACE}:#{user_id}:#{project_id}"
+ end
+
+ def remote_url_message(rejected)
+ if rejected
+ "git remote set-url origin #{url} and try again."
+ else
+ "git remote set-url origin #{url}"
+ end
+ end
+
+ def url
+ protocol == 'ssh' ? project.ssh_url_to_repo : project.http_url_to_repo
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/base.rb b/lib/gitlab/ci/pipeline/chain/base.rb
index 8d82e1b288d..efed19da21c 100644
--- a/lib/gitlab/ci/pipeline/chain/base.rb
+++ b/lib/gitlab/ci/pipeline/chain/base.rb
@@ -3,14 +3,13 @@ module Gitlab
module Pipeline
module Chain
class Base
- attr_reader :pipeline, :project, :current_user
+ attr_reader :pipeline, :command
+
+ delegate :project, :current_user, to: :command
def initialize(pipeline, command)
@pipeline = pipeline
@command = command
-
- @project = command.project
- @current_user = command.current_user
end
def perform!
diff --git a/lib/gitlab/ci/pipeline/chain/build.rb b/lib/gitlab/ci/pipeline/chain/build.rb
new file mode 100644
index 00000000000..70732d26bbd
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/build.rb
@@ -0,0 +1,30 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ class Build < Chain::Base
+ def perform!
+ @pipeline.assign_attributes(
+ source: @command.source,
+ project: @command.project,
+ ref: @command.ref,
+ sha: @command.sha,
+ before_sha: @command.before_sha,
+ tag: @command.tag_exists?,
+ trigger_requests: Array(@command.trigger_request),
+ user: @command.current_user,
+ pipeline_schedule: @command.schedule,
+ protected: @command.protected_ref?
+ )
+
+ @pipeline.set_config_source
+ end
+
+ def break?
+ false
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/command.rb b/lib/gitlab/ci/pipeline/chain/command.rb
new file mode 100644
index 00000000000..7b19b10e05b
--- /dev/null
+++ b/lib/gitlab/ci/pipeline/chain/command.rb
@@ -0,0 +1,61 @@
+module Gitlab
+ module Ci
+ module Pipeline
+ module Chain
+ Command = Struct.new(
+ :source, :project, :current_user,
+ :origin_ref, :checkout_sha, :after_sha, :before_sha,
+ :trigger_request, :schedule,
+ :ignore_skip_ci, :save_incompleted,
+ :seeds_block
+ ) do
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(**params)
+ params.each do |key, value|
+ self[key] = value
+ end
+ end
+
+ def branch_exists?
+ strong_memoize(:is_branch) do
+ project.repository.branch_exists?(ref)
+ end
+ end
+
+ def tag_exists?
+ strong_memoize(:is_tag) do
+ project.repository.tag_exists?(ref)
+ end
+ end
+
+ def ref
+ strong_memoize(:ref) do
+ Gitlab::Git.ref_name(origin_ref)
+ end
+ end
+
+ def sha
+ strong_memoize(:sha) do
+ project.commit(origin_sha || origin_ref).try(:id)
+ end
+ end
+
+ def origin_sha
+ checkout_sha || after_sha
+ end
+
+ def before_sha
+ self[:before_sha] || checkout_sha || Gitlab::Git::BLANK_SHA
+ end
+
+ def protected_ref?
+ strong_memoize(:protected_ref) do
+ project.protected_for?(ref)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/create.rb b/lib/gitlab/ci/pipeline/chain/create.rb
index d5e17a123df..d19a2519803 100644
--- a/lib/gitlab/ci/pipeline/chain/create.rb
+++ b/lib/gitlab/ci/pipeline/chain/create.rb
@@ -17,11 +17,27 @@ module Gitlab
end
rescue ActiveRecord::RecordInvalid => e
error("Failed to persist the pipeline: #{e}")
+ ensure
+ if pipeline.builds.where(stage_id: nil).any?
+ invalid_builds_counter.increment(node: hostname)
+ end
end
def break?
!pipeline.persisted?
end
+
+ private
+
+ def invalid_builds_counter
+ @counter ||= Gitlab::Metrics
+ .counter(:gitlab_ci_invalid_builds_total,
+ 'Invalid builds without stage assigned counter')
+ end
+
+ def hostname
+ @hostname ||= Socket.gethostname
+ end
end
end
end
diff --git a/lib/gitlab/ci/pipeline/chain/helpers.rb b/lib/gitlab/ci/pipeline/chain/helpers.rb
index 7ab7a64c7e3..bf1380a1da9 100644
--- a/lib/gitlab/ci/pipeline/chain/helpers.rb
+++ b/lib/gitlab/ci/pipeline/chain/helpers.rb
@@ -3,20 +3,6 @@ module Gitlab
module Pipeline
module Chain
module Helpers
- include Gitlab::Utils::StrongMemoize
-
- def branch_exists?
- strong_memoize(:is_branch) do
- project.repository.branch_exists?(pipeline.ref)
- end
- end
-
- def tag_exists?
- strong_memoize(:is_tag) do
- project.repository.tag_exists?(pipeline.ref)
- end
- end
-
def error(message)
pipeline.errors.add(:base, message)
end
diff --git a/lib/gitlab/ci/pipeline/chain/sequence.rb b/lib/gitlab/ci/pipeline/chain/sequence.rb
index 015f2988327..e24630656d3 100644
--- a/lib/gitlab/ci/pipeline/chain/sequence.rb
+++ b/lib/gitlab/ci/pipeline/chain/sequence.rb
@@ -5,20 +5,19 @@ module Gitlab
class Sequence
def initialize(pipeline, command, sequence)
@pipeline = pipeline
+ @command = command
+ @sequence = sequence
@completed = []
-
- @sequence = sequence.map do |chain|
- chain.new(pipeline, command)
- end
end
def build!
- @sequence.each do |step|
- step.perform!
+ @sequence.each do |chain|
+ step = chain.new(@pipeline, @command)
+ step.perform!
break if step.break?
- @completed << step
+ @completed.push(step)
end
@pipeline.tap do
diff --git a/lib/gitlab/ci/pipeline/chain/validate/abilities.rb b/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
index 4913a604079..13c6fedd831 100644
--- a/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
+++ b/lib/gitlab/ci/pipeline/chain/validate/abilities.rb
@@ -14,7 +14,7 @@ module Gitlab
unless allowed_to_trigger_pipeline?
if can?(current_user, :create_pipeline, project)
- return error("Insufficient permissions for protected ref '#{pipeline.ref}'")
+ return error("Insufficient permissions for protected ref '#{command.ref}'")
else
return error('Insufficient permissions to create a new pipeline')
end
@@ -29,7 +29,7 @@ module Gitlab
if current_user
allowed_to_create?
else # legacy triggers don't have a corresponding user
- !project.protected_for?(@pipeline.ref)
+ !@command.protected_ref?
end
end
@@ -38,10 +38,10 @@ module Gitlab
access = Gitlab::UserAccess.new(current_user, project: project)
- if branch_exists?
- access.can_update_branch?(@pipeline.ref)
- elsif tag_exists?
- access.can_create_tag?(@pipeline.ref)
+ if @command.branch_exists?
+ access.can_update_branch?(@command.ref)
+ elsif @command.tag_exists?
+ access.can_create_tag?(@command.ref)
else
true # Allow it for now and we'll reject when we check ref existence
end
diff --git a/lib/gitlab/ci/pipeline/chain/validate/repository.rb b/lib/gitlab/ci/pipeline/chain/validate/repository.rb
index 70a4cfdbdea..9699c24e5b6 100644
--- a/lib/gitlab/ci/pipeline/chain/validate/repository.rb
+++ b/lib/gitlab/ci/pipeline/chain/validate/repository.rb
@@ -7,14 +7,11 @@ module Gitlab
include Chain::Helpers
def perform!
- unless branch_exists? || tag_exists?
+ unless @command.branch_exists? || @command.tag_exists?
return error('Reference not found')
end
- ## TODO, we check commit in the service, that is why
- # there is no repository access here.
- #
- unless pipeline.sha
+ unless @command.sha
return error('Commit not found')
end
end
diff --git a/lib/gitlab/conflict/file_collection.rb b/lib/gitlab/conflict/file_collection.rb
index fb28e80ff73..b9099ce256a 100644
--- a/lib/gitlab/conflict/file_collection.rb
+++ b/lib/gitlab/conflict/file_collection.rb
@@ -19,6 +19,8 @@ module Gitlab
commit_message: commit_message || default_commit_message
}
resolver.resolve_conflicts(user, files, args)
+ ensure
+ @merge_request.clear_memoized_shas
end
def files
diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb
index cd7b4c043da..e51794fef99 100644
--- a/lib/gitlab/database.rb
+++ b/lib/gitlab/database.rb
@@ -50,6 +50,10 @@ module Gitlab
postgresql? && version.to_f >= 9.3
end
+ def self.replication_slots_supported?
+ postgresql? && version.to_f >= 9.4
+ end
+
def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}"
@@ -116,15 +120,21 @@ module Gitlab
# values.
# return_ids - When set to true the return value will be an Array of IDs of
# the inserted rows, this only works on PostgreSQL.
- def self.bulk_insert(table, rows, return_ids: false)
+ # disable_quote - A key or an Array of keys to exclude from quoting (You
+ # become responsible for protection from SQL injection for
+ # these keys!)
+ def self.bulk_insert(table, rows, return_ids: false, disable_quote: [])
return if rows.empty?
keys = rows.first.keys
columns = keys.map { |key| connection.quote_column_name(key) }
return_ids = false if mysql?
+ disable_quote = Array(disable_quote).to_set
tuples = rows.map do |row|
- row.values_at(*keys).map { |value| connection.quote(value) }
+ keys.map do |k|
+ disable_quote.include?(k) ? row[k] : connection.quote(row[k])
+ end
end
sql = <<-EOF
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index c276c3566b4..3f65bc912de 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -703,14 +703,14 @@ into similar problems in the future (e.g. when new tables are created).
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
- BackgroundMigrationWorker.perform_bulk(jobs)
+ BackgroundMigrationWorker.bulk_perform_async(jobs)
jobs.clear
end
jobs << [job_class_name, [start_id, end_id]]
end
- BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
+ BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
end
# Queues background migration jobs for an entire table, batched by ID range.
diff --git a/lib/gitlab/diff/diff_refs.rb b/lib/gitlab/diff/diff_refs.rb
index c98eefbce25..88e0db830f6 100644
--- a/lib/gitlab/diff/diff_refs.rb
+++ b/lib/gitlab/diff/diff_refs.rb
@@ -13,9 +13,9 @@ module Gitlab
def ==(other)
other.is_a?(self.class) &&
- shas_equal?(base_sha, other.base_sha) &&
- shas_equal?(start_sha, other.start_sha) &&
- shas_equal?(head_sha, other.head_sha)
+ Git.shas_eql?(base_sha, other.base_sha) &&
+ Git.shas_eql?(start_sha, other.start_sha) &&
+ Git.shas_eql?(head_sha, other.head_sha)
end
alias_method :eql?, :==
@@ -47,22 +47,6 @@ module Gitlab
CompareService.new(project, head_sha).execute(project, start_sha, straight: straight)
end
end
-
- private
-
- def shas_equal?(sha1, sha2)
- return true if sha1 == sha2
- return false if sha1.nil? || sha2.nil?
- return false unless sha1.class == sha2.class
-
- length = [sha1.length, sha2.length].min
-
- # If either of the shas is below the minimum length, we cannot be sure
- # that they actually refer to the same commit because of hash collision.
- return false if length < Commit::MIN_SHA_LENGTH
-
- sha1[0, length] == sha2[0, length]
- end
end
end
end
diff --git a/lib/gitlab/diff/inline_diff.rb b/lib/gitlab/diff/inline_diff.rb
index 2d7b57120a6..54783a07919 100644
--- a/lib/gitlab/diff/inline_diff.rb
+++ b/lib/gitlab/diff/inline_diff.rb
@@ -70,7 +70,7 @@ module Gitlab
def find_changed_line_pairs(lines)
# Prefixes of all diff lines, indicating their types
# For example: `" - + -+ ---+++ --+ -++"`
- line_prefixes = lines.each_with_object("") { |line, s| s << line[0] }.gsub(/[^ +-]/, ' ')
+ line_prefixes = lines.each_with_object("") { |line, s| s << (line[0] || ' ') }.gsub(/[^ +-]/, ' ')
changed_line_pairs = []
line_prefixes.scan(LINE_PAIRS_PATTERN) do
diff --git a/lib/gitlab/ee_compat_check.rb b/lib/gitlab/ee_compat_check.rb
index 4a9d3e52fae..37face8e7d0 100644
--- a/lib/gitlab/ee_compat_check.rb
+++ b/lib/gitlab/ee_compat_check.rb
@@ -280,7 +280,7 @@ module Gitlab
The `#{branch}` branch applies cleanly to EE/master!
Much ❤️! For more information, see
- https://docs.gitlab.com/ce/development/limit_ee_conflicts.html#check-the-rake-ee_compat_check-in-your-merge-requests
+ https://docs.gitlab.com/ce/development/automatic_ce_ee_merge.html
#{THANKS_FOR_READING_BANNER}
}
end
@@ -357,7 +357,7 @@ module Gitlab
Once this is done, you can retry this failed build, and it should pass.
Stay 💪 ! For more information, see
- https://docs.gitlab.com/ce/development/limit_ee_conflicts.html#check-the-rake-ee_compat_check-in-your-merge-requests
+ https://docs.gitlab.com/ce/development/automatic_ce_ee_merge.html
#{THANKS_FOR_READING_BANNER}
}
end
@@ -378,7 +378,7 @@ module Gitlab
retry this build.
Stay 💪 ! For more information, see
- https://docs.gitlab.com/ce/development/limit_ee_conflicts.html#check-the-rake-ee_compat_check-in-your-merge-requests
+ https://docs.gitlab.com/ce/development/automatic_ce_ee_merge.html
#{THANKS_FOR_READING_BANNER}
}
end
diff --git a/lib/gitlab/email/handler.rb b/lib/gitlab/email/handler.rb
index b07c68d1498..e08b5be8984 100644
--- a/lib/gitlab/email/handler.rb
+++ b/lib/gitlab/email/handler.rb
@@ -1,3 +1,4 @@
+require 'gitlab/email/handler/create_merge_request_handler'
require 'gitlab/email/handler/create_note_handler'
require 'gitlab/email/handler/create_issue_handler'
require 'gitlab/email/handler/unsubscribe_handler'
@@ -8,6 +9,7 @@ module Gitlab
HANDLERS = [
UnsubscribeHandler,
CreateNoteHandler,
+ CreateMergeRequestHandler,
CreateIssueHandler
].freeze
diff --git a/lib/gitlab/email/handler/create_merge_request_handler.rb b/lib/gitlab/email/handler/create_merge_request_handler.rb
new file mode 100644
index 00000000000..e2f7c1d0257
--- /dev/null
+++ b/lib/gitlab/email/handler/create_merge_request_handler.rb
@@ -0,0 +1,69 @@
+require 'gitlab/email/handler/base_handler'
+require 'gitlab/email/handler/reply_processing'
+
+module Gitlab
+ module Email
+ module Handler
+ class CreateMergeRequestHandler < BaseHandler
+ include ReplyProcessing
+ attr_reader :project_path, :incoming_email_token
+
+ def initialize(mail, mail_key)
+ super(mail, mail_key)
+ if m = /\A([^\+]*)\+merge-request\+(.*)/.match(mail_key.to_s)
+ @project_path, @incoming_email_token = m.captures
+ end
+ end
+
+ def can_handle?
+ @project_path && @incoming_email_token
+ end
+
+ def execute
+ raise ProjectNotFound unless project
+
+ validate_permission!(:create_merge_request)
+
+ verify_record!(
+ record: create_merge_request,
+ invalid_exception: InvalidMergeRequestError,
+ record_name: 'merge_request')
+ end
+
+ def author
+ @author ||= User.find_by(incoming_email_token: incoming_email_token)
+ end
+
+ def project
+ @project ||= Project.find_by_full_path(project_path)
+ end
+
+ def metrics_params
+ super.merge(project: project&.full_path)
+ end
+
+ private
+
+ def create_merge_request
+ merge_request = MergeRequests::BuildService.new(project, author, merge_request_params).execute
+
+ if merge_request.errors.any?
+ merge_request
+ else
+ MergeRequests::CreateService.new(project, author).create(merge_request)
+ end
+ end
+
+ def merge_request_params
+ params = {
+ source_project_id: project.id,
+ source_branch: mail.subject,
+ target_project_id: project.id
+ }
+ params[:description] = message if message.present?
+ params
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/email/receiver.rb b/lib/gitlab/email/receiver.rb
index c8f4591d060..d8c594ad0e7 100644
--- a/lib/gitlab/email/receiver.rb
+++ b/lib/gitlab/email/receiver.rb
@@ -13,8 +13,10 @@ module Gitlab
UserBlockedError = Class.new(ProcessingError)
UserNotAuthorizedError = Class.new(ProcessingError)
NoteableNotFoundError = Class.new(ProcessingError)
- InvalidNoteError = Class.new(ProcessingError)
- InvalidIssueError = Class.new(ProcessingError)
+ InvalidRecordError = Class.new(ProcessingError)
+ InvalidNoteError = Class.new(InvalidRecordError)
+ InvalidIssueError = Class.new(InvalidRecordError)
+ InvalidMergeRequestError = Class.new(InvalidRecordError)
UnknownIncomingEmail = Class.new(ProcessingError)
class Receiver
diff --git a/lib/gitlab/git.rb b/lib/gitlab/git.rb
index 1f31cdbc96d..1f7c35cafaa 100644
--- a/lib/gitlab/git.rb
+++ b/lib/gitlab/git.rb
@@ -70,6 +70,18 @@ module Gitlab
def diff_line_code(file_path, new_line_position, old_line_position)
"#{Digest::SHA1.hexdigest(file_path)}_#{old_line_position}_#{new_line_position}"
end
+
+ def shas_eql?(sha1, sha2)
+ return false if sha1.nil? || sha2.nil?
+ return false unless sha1.class == sha2.class
+
+ # If either of the shas is below the minimum length, we cannot be sure
+ # that they actually refer to the same commit because of hash collision.
+ length = [sha1.length, sha2.length].min
+ return false if length < Gitlab::Git::Commit::MIN_SHA_LENGTH
+
+ sha1[0, length] == sha2[0, length]
+ end
end
end
end
diff --git a/lib/gitlab/git/blob.rb b/lib/gitlab/git/blob.rb
index ddd52136bc4..228d97a87ab 100644
--- a/lib/gitlab/git/blob.rb
+++ b/lib/gitlab/git/blob.rb
@@ -49,6 +49,7 @@ module Gitlab
# Keep in mind that this method may allocate a lot of memory. It is up
# to the caller to limit the number of blobs and blob_size_limit.
#
+ # Gitaly migration issue: https://gitlab.com/gitlab-org/gitaly/issues/798
def batch(repository, blob_references, blob_size_limit: nil)
blob_size_limit ||= MAX_DATA_DISPLAY_SIZE
blob_references.map do |sha, path|
diff --git a/lib/gitlab/git/commit.rb b/lib/gitlab/git/commit.rb
index d5518814483..e90b158fb34 100644
--- a/lib/gitlab/git/commit.rb
+++ b/lib/gitlab/git/commit.rb
@@ -6,6 +6,7 @@ module Gitlab
attr_accessor :raw_commit, :head
+ MIN_SHA_LENGTH = 7
SERIALIZE_KEYS = [
:id, :message, :parent_ids,
:authored_date, :author_name, :author_email,
@@ -213,11 +214,17 @@ module Gitlab
end
def shas_with_signatures(repository, shas)
- shas.select do |sha|
- begin
- Rugged::Commit.extract_signature(repository.rugged, sha)
- rescue Rugged::OdbError
- false
+ GitalyClient.migrate(:filter_shas_with_signatures) do |is_enabled|
+ if is_enabled
+ Gitlab::GitalyClient::CommitService.new(repository).filter_shas_with_signatures(shas)
+ else
+ shas.select do |sha|
+ begin
+ Rugged::Commit.extract_signature(repository.rugged, sha)
+ rescue Rugged::OdbError
+ false
+ end
+ end
end
end
end
@@ -418,6 +425,20 @@ module Gitlab
parent_ids.size > 1
end
+ def to_gitaly_commit
+ return raw_commit if raw_commit.is_a?(Gitaly::GitCommit)
+
+ message_split = raw_commit.message.split("\n", 2)
+ Gitaly::GitCommit.new(
+ id: raw_commit.oid,
+ subject: message_split[0] ? message_split[0].chomp.b : "",
+ body: raw_commit.message.b,
+ parent_ids: raw_commit.parent_ids,
+ author: gitaly_commit_author_from_rugged(raw_commit.author),
+ committer: gitaly_commit_author_from_rugged(raw_commit.committer)
+ )
+ end
+
private
def init_from_hash(hash)
@@ -463,6 +484,14 @@ module Gitlab
def serialize_keys
SERIALIZE_KEYS
end
+
+ def gitaly_commit_author_from_rugged(author_or_committer)
+ Gitaly::CommitAuthor.new(
+ name: author_or_committer[:name].b,
+ email: author_or_committer[:email].b,
+ date: Google::Protobuf::Timestamp.new(seconds: author_or_committer[:time].to_i)
+ )
+ end
end
end
end
diff --git a/lib/gitlab/git/conflict/file.rb b/lib/gitlab/git/conflict/file.rb
index fc1595f1faf..b2a625e08fa 100644
--- a/lib/gitlab/git/conflict/file.rb
+++ b/lib/gitlab/git/conflict/file.rb
@@ -2,7 +2,7 @@ module Gitlab
module Git
module Conflict
class File
- attr_reader :content, :their_path, :our_path, :our_mode, :repository
+ attr_reader :content, :their_path, :our_path, :our_mode, :repository, :commit_oid
def initialize(repository, commit_oid, conflict, content)
@repository = repository
diff --git a/lib/gitlab/git/conflict/resolver.rb b/lib/gitlab/git/conflict/resolver.rb
index df509c5f4ce..de8cce41b6d 100644
--- a/lib/gitlab/git/conflict/resolver.rb
+++ b/lib/gitlab/git/conflict/resolver.rb
@@ -75,7 +75,7 @@ module Gitlab
resolved_lines = file.resolve_lines(params[:sections])
new_file = resolved_lines.map { |line| line[:full_line] }.join("\n")
- new_file << "\n" if file.our_blob.data.ends_with?("\n")
+ new_file << "\n" if file.our_blob.data.end_with?("\n")
elsif params[:content]
new_file = file.resolve_content(params[:content])
end
diff --git a/lib/gitlab/git/operation_service.rb b/lib/gitlab/git/operation_service.rb
index e36d5410431..ef5bdbaf819 100644
--- a/lib/gitlab/git/operation_service.rb
+++ b/lib/gitlab/git/operation_service.rb
@@ -83,7 +83,7 @@ module Gitlab
Gitlab::Git.check_namespace!(start_repository)
start_repository = RemoteRepository.new(start_repository) unless start_repository.is_a?(RemoteRepository)
- start_branch_name = nil if start_repository.empty_repo?
+ start_branch_name = nil if start_repository.empty?
if start_branch_name && !start_repository.branch_exists?(start_branch_name)
raise ArgumentError, "Cannot find branch #{start_branch_name} in #{start_repository.relative_path}"
@@ -126,7 +126,7 @@ module Gitlab
oldrev = branch.target
- if oldrev == repository.rugged.merge_base(newrev, branch.target)
+ if oldrev == repository.merge_base(newrev, branch.target)
oldrev
else
raise Gitlab::Git::CommitError.new('Branch diverged')
diff --git a/lib/gitlab/git/remote_repository.rb b/lib/gitlab/git/remote_repository.rb
index 3685aa20669..6bd6e58feeb 100644
--- a/lib/gitlab/git/remote_repository.rb
+++ b/lib/gitlab/git/remote_repository.rb
@@ -24,10 +24,12 @@ module Gitlab
@path = repository.path
end
- def empty_repo?
+ def empty?
# We will override this implementation in gitaly-ruby because we cannot
# use '@repository' there.
- @repository.empty_repo?
+ #
+ # Caches and memoization used on the Rails side
+ !@repository.exists? || @repository.empty?
end
def commit_id(revision)
diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb
index d399636bb28..369bb16f719 100644
--- a/lib/gitlab/git/repository.rb
+++ b/lib/gitlab/git/repository.rb
@@ -18,6 +18,9 @@ module Gitlab
GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE
].freeze
SEARCH_CONTEXT_LINES = 3
+ REBASE_WORKTREE_PREFIX = 'rebase'.freeze
+ SQUASH_WORKTREE_PREFIX = 'squash'.freeze
+ GITALY_INTERNAL_URL = 'ssh://gitaly/internal.git'.freeze
NoRepository = Class.new(StandardError)
InvalidBlobName = Class.new(StandardError)
@@ -73,9 +76,6 @@ module Gitlab
@attributes = Gitlab::Git::Attributes.new(path)
end
- delegate :empty?,
- to: :rugged
-
def ==(other)
path == other.path
end
@@ -204,6 +204,13 @@ module Gitlab
end
end
+ # Git repository can contains some hidden refs like:
+ # /refs/notes/*
+ # /refs/git-as-svn/*
+ # /refs/pulls/*
+ # This refs by default not visible in project page and not cloned to client side.
+ alias_method :has_visible_content?, :has_local_branches?
+
def has_local_branches_rugged?
rugged.branches.each(:local).any? do |ref|
begin
@@ -505,13 +512,20 @@ module Gitlab
# Counts the amount of commits between `from` and `to`.
def count_commits_between(from, to)
- Commit.between(self, from, to).size
+ count_commits(ref: "#{from}..#{to}")
end
# Returns the SHA of the most recent common ancestor of +from+ and +to+
def merge_base_commit(from, to)
- rugged.merge_base(from, to)
+ gitaly_migrate(:merge_base) do |is_enabled|
+ if is_enabled
+ gitaly_repository_client.find_merge_base(from, to)
+ else
+ rugged.merge_base(from, to)
+ end
+ end
end
+ alias_method :merge_base, :merge_base_commit
# Gitaly note: JV: check gitlab-ee before removing this method.
def rugged_is_ancestor?(ancestor_id, descendant_id)
@@ -774,24 +788,21 @@ module Gitlab
end
def revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
- OperationService.new(user, self).with_branch(
- branch_name,
- start_branch_name: start_branch_name,
- start_repository: start_repository
- ) do |start_commit|
-
- Gitlab::Git.check_namespace!(commit, start_repository)
-
- revert_tree_id = check_revert_content(commit, start_commit.sha)
- raise CreateTreeError unless revert_tree_id
-
- committer = user_to_committer(user)
+ gitaly_migrate(:revert) do |is_enabled|
+ args = {
+ user: user,
+ commit: commit,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ }
- create_commit(message: message,
- author: committer,
- committer: committer,
- tree: revert_tree_id,
- parents: [start_commit.sha])
+ if is_enabled
+ gitaly_operations_client.user_revert(args)
+ else
+ rugged_revert(args)
+ end
end
end
@@ -809,44 +820,24 @@ module Gitlab
end
def cherry_pick(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
- OperationService.new(user, self).with_branch(
- branch_name,
- start_branch_name: start_branch_name,
- start_repository: start_repository
- ) do |start_commit|
-
- Gitlab::Git.check_namespace!(commit, start_repository)
-
- cherry_pick_tree_id = check_cherry_pick_content(commit, start_commit.sha)
- raise CreateTreeError unless cherry_pick_tree_id
-
- committer = user_to_committer(user)
+ gitaly_migrate(:cherry_pick) do |is_enabled|
+ args = {
+ user: user,
+ commit: commit,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ }
- create_commit(message: message,
- author: {
- email: commit.author_email,
- name: commit.author_name,
- time: commit.authored_date
- },
- committer: committer,
- tree: cherry_pick_tree_id,
- parents: [start_commit.sha])
+ if is_enabled
+ gitaly_operations_client.user_cherry_pick(args)
+ else
+ rugged_cherry_pick(args)
+ end
end
end
- def check_cherry_pick_content(target_commit, source_sha)
- args = [target_commit.sha, source_sha]
- args << 1 if target_commit.merge_commit?
-
- cherry_pick_index = rugged.cherrypick_commit(*args)
- return false if cherry_pick_index.conflicts?
-
- tree_id = cherry_pick_index.write_tree(rugged)
- return false unless diff_exists?(source_sha, tree_id)
-
- tree_id
- end
-
def diff_exists?(sha1, sha2)
rugged.diff(sha1, sha2).size > 0
end
@@ -904,8 +895,11 @@ module Gitlab
end
end
- def add_remote(remote_name, url)
+ # If `mirror_refmap` is present the remote is set as mirror with that mapping
+ def add_remote(remote_name, url, mirror_refmap: nil)
rugged.remotes.create(remote_name, url)
+
+ set_remote_as_mirror(remote_name, refmap: mirror_refmap) if mirror_refmap
rescue Rugged::ConfigError
remote_update(remote_name, url: url)
end
@@ -1025,7 +1019,7 @@ module Gitlab
Gitlab::Git.check_namespace!(start_repository)
start_repository = RemoteRepository.new(start_repository) unless start_repository.is_a?(RemoteRepository)
- return yield nil if start_repository.empty_repo?
+ return yield nil if start_repository.empty?
if start_repository.same_repository?(self)
yield commit(start_branch_name)
@@ -1086,17 +1080,17 @@ module Gitlab
end
end
- def write_ref(ref_path, ref)
+ def write_ref(ref_path, ref, force: false)
raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ')
raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00")
- command = [Gitlab.config.git.bin_path] + %w[update-ref --stdin -z]
- input = "update #{ref_path}\x00#{ref}\x00\x00"
- output, status = circuit_breaker.perform do
- popen(command, path) { |stdin| stdin.write(input) }
- end
+ ref = "refs/heads/#{ref}" unless ref.start_with?("refs") || ref =~ /\A[a-f0-9]+\z/i
- raise GitError, output unless status.zero?
+ rugged.references.create(ref_path, ref, force: force)
+ rescue Rugged::ReferenceError => ex
+ raise GitError, "could not create ref #{ref_path}: #{ex}"
+ rescue Rugged::OSError => ex
+ raise GitError, "could not create ref #{ref_path}: #{ex}"
end
def fetch_ref(source_repository, source_ref:, target_ref:)
@@ -1118,12 +1112,22 @@ module Gitlab
end
# Refactoring aid; allows us to copy code from app/models/repository.rb
- def run_git(args, env: {})
+ def run_git(args, chdir: path, env: {}, nice: false, &block)
+ cmd = [Gitlab.config.git.bin_path, *args]
+ cmd.unshift("nice") if nice
circuit_breaker.perform do
- popen([Gitlab.config.git.bin_path, *args], path, env)
+ popen(cmd, chdir, env, &block)
end
end
+ def run_git!(args, chdir: path, env: {}, nice: false, &block)
+ output, status = run_git(args, chdir: chdir, env: env, nice: nice, &block)
+
+ raise GitError, output unless status.zero?
+
+ output
+ end
+
# Refactoring aid; allows us to copy code from app/models/repository.rb
def run_git_with_timeout(args, timeout, env: {})
circuit_breaker.perform do
@@ -1136,32 +1140,28 @@ module Gitlab
Gitlab::Git::Commit.find(self, ref)
end
- # Refactoring aid; allows us to copy code from app/models/repository.rb
- def empty_repo?
- !exists? || !has_visible_content?
+ def empty?
+ !has_visible_content?
end
- #
- # Git repository can contains some hidden refs like:
- # /refs/notes/*
- # /refs/git-as-svn/*
- # /refs/pulls/*
- # This refs by default not visible in project page and not cloned to client side.
- #
- # This method return true if repository contains some content visible in project page.
- #
- def has_visible_content?
- return @has_visible_content if defined?(@has_visible_content)
+ def fetch_repository_as_mirror(repository)
+ remote_name = "tmp-#{SecureRandom.hex}"
- @has_visible_content = has_local_branches?
- end
+ # Notice that this feature flag is not for `fetch_repository_as_mirror`
+ # as a whole but for the fetching mechanism (file path or gitaly-ssh).
+ url, env = gitaly_migrate(:fetch_internal) do |is_enabled|
+ if is_enabled
+ repository = RemoteRepository.new(repository) unless repository.is_a?(RemoteRepository)
+ [GITALY_INTERNAL_URL, repository.fetch_env]
+ else
+ [repository.path, nil]
+ end
+ end
- # Like all public `Gitlab::Git::Repository` methods, this method is part
- # of `Repository`'s interface through `method_missing`.
- # `Repository` has its own `fetch_remote` which uses `gitlab-shell` and
- # takes some extra attributes, so we qualify this method name to prevent confusion.
- def fetch_remote_without_shell(remote = 'origin')
- run_git(['fetch', remote]).last.zero?
+ add_remote(remote_name, url, mirror_refmap: :all_refs)
+ fetch_remote(remote_name, env: env)
+ ensure
+ remove_remote(remote_name)
end
def blob_at(sha, path)
@@ -1187,6 +1187,76 @@ module Gitlab
end
end
+ def fsck
+ gitaly_migrate(:git_fsck) do |is_enabled|
+ msg, status = if is_enabled
+ gitaly_fsck
+ else
+ shell_fsck
+ end
+
+ raise GitError.new("Could not fsck repository: #{msg}") unless status.zero?
+ end
+ end
+
+ def rebase(user, rebase_id, branch:, branch_sha:, remote_repository:, remote_branch:)
+ rebase_path = worktree_path(REBASE_WORKTREE_PREFIX, rebase_id)
+ env = git_env_for_user(user)
+
+ with_worktree(rebase_path, branch, env: env) do
+ run_git!(
+ %W(pull --rebase #{remote_repository.path} #{remote_branch}),
+ chdir: rebase_path, env: env
+ )
+
+ rebase_sha = run_git!(%w(rev-parse HEAD), chdir: rebase_path, env: env).strip
+
+ Gitlab::Git::OperationService.new(user, self)
+ .update_branch(branch, rebase_sha, branch_sha)
+
+ rebase_sha
+ end
+ end
+
+ def rebase_in_progress?(rebase_id)
+ fresh_worktree?(worktree_path(REBASE_WORKTREE_PREFIX, rebase_id))
+ end
+
+ def squash(user, squash_id, branch:, start_sha:, end_sha:, author:, message:)
+ squash_path = worktree_path(SQUASH_WORKTREE_PREFIX, squash_id)
+ env = git_env_for_user(user).merge(
+ 'GIT_AUTHOR_NAME' => author.name,
+ 'GIT_AUTHOR_EMAIL' => author.email
+ )
+ diff_range = "#{start_sha}...#{end_sha}"
+ diff_files = run_git!(
+ %W(diff --name-only --diff-filter=a --binary #{diff_range})
+ ).chomp
+
+ with_worktree(squash_path, branch, sparse_checkout_files: diff_files, env: env) do
+ # Apply diff of the `diff_range` to the worktree
+ diff = run_git!(%W(diff --binary #{diff_range}))
+ run_git!(%w(apply --index), chdir: squash_path, env: env) do |stdin|
+ stdin.write(diff)
+ end
+
+ # Commit the `diff_range` diff
+ run_git!(%W(commit --no-verify --message #{message}), chdir: squash_path, env: env)
+
+ # Return the squash sha. May print a warning for ambiguous refs, but
+ # we can ignore that with `--quiet` and just take the SHA, if present.
+ # HEAD here always refers to the current HEAD commit, even if there is
+ # another ref called HEAD.
+ run_git!(
+ %w(rev-parse --quiet --verify HEAD), chdir: squash_path, env: env
+ ).chomp
+ end
+ end
+
+ def squash_in_progress?(squash_id)
+ fresh_worktree?(worktree_path(SQUASH_WORKTREE_PREFIX, squash_id))
+ end
+
def gitaly_repository
Gitlab::GitalyClient::Util.repository(@storage, @relative_path, @gl_repository)
end
@@ -1223,6 +1293,65 @@ module Gitlab
private
+ def fresh_worktree?(path)
+ File.exist?(path) && !clean_stuck_worktree(path)
+ end
+
+ def with_worktree(worktree_path, branch, sparse_checkout_files: nil, env:)
+ base_args = %w(worktree add --detach)
+
+ # Note that we _don't_ want to test for `.present?` here: If the caller
+ # passes an non nil empty value it means it still wants sparse checkout
+ # but just isn't interested in any file, perhaps because it wants to
+ # checkout files in by a changeset but that changeset only adds files.
+ if sparse_checkout_files
+ # Create worktree without checking out
+ run_git!(base_args + ['--no-checkout', worktree_path], env: env)
+ worktree_git_path = run_git!(%w(rev-parse --git-dir), chdir: worktree_path)
+
+ configure_sparse_checkout(worktree_git_path, sparse_checkout_files)
+
+ # After sparse checkout configuration, checkout `branch` in worktree
+ run_git!(%W(checkout --detach #{branch}), chdir: worktree_path, env: env)
+ else
+ # Create worktree and checkout `branch` in it
+ run_git!(base_args + [worktree_path, branch], env: env)
+ end
+
+ yield
+ ensure
+ FileUtils.rm_rf(worktree_path) if File.exist?(worktree_path)
+ FileUtils.rm_rf(worktree_git_path) if worktree_git_path && File.exist?(worktree_git_path)
+ end
+
+ def clean_stuck_worktree(path)
+ return false unless File.mtime(path) < 15.minutes.ago
+
+ FileUtils.rm_rf(path)
+ true
+ end
+
+ # Adding a worktree means checking out the repository. For large repos,
+ # this can be very expensive, so set up sparse checkout for the worktree
+ # to only check out the files we're interested in.
+ def configure_sparse_checkout(worktree_git_path, files)
+ run_git!(%w(config core.sparseCheckout true))
+
+ return if files.empty?
+
+ worktree_info_path = File.join(worktree_git_path, 'info')
+ FileUtils.mkdir_p(worktree_info_path)
+ File.write(File.join(worktree_info_path, 'sparse-checkout'), files)
+ end
+
+ def gitaly_fsck
+ gitaly_repository_client.fsck
+ end
+
+ def shell_fsck
+ run_git(%W[--git-dir=#{path} fsck], nice: true)
+ end
+
def rugged_fetch_source_branch(source_repository, source_branch, local_ref)
with_repo_branch_commit(source_repository, source_branch) do |commit|
if commit
@@ -1234,6 +1363,24 @@ module Gitlab
end
end
+ def worktree_path(prefix, id)
+ id = id.to_s
+ raise ArgumentError, "worktree id can't be empty" unless id.present?
+ raise ArgumentError, "worktree id can't contain slashes " if id.include?("/")
+
+ File.join(path, 'gitlab-worktree', "#{prefix}-#{id}")
+ end
+
+ def git_env_for_user(user)
+ {
+ 'GIT_COMMITTER_NAME' => user.name,
+ 'GIT_COMMITTER_EMAIL' => user.email,
+ 'GL_ID' => Gitlab::GlId.gl_id(user),
+ 'GL_PROTOCOL' => Gitlab::Git::Hook::GL_PROTOCOL,
+ 'GL_REPOSITORY' => gl_repository
+ }
+ end
+
# Gitaly note: JV: Trying to get rid of the 'filter' option so we can implement this with 'git'.
def branches_filter(filter: nil, sort_by: nil)
# n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37464
@@ -1253,7 +1400,11 @@ module Gitlab
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/695
def git_merged_branch_names(branch_names = [])
- root_sha = find_branch(root_ref).target
+ return [] unless root_ref
+
+ root_sha = find_branch(root_ref)&.target
+
+ return [] unless root_sha
git_arguments =
%W[branch --merged #{root_sha}
@@ -1645,6 +1796,28 @@ module Gitlab
end
end
+ def rugged_revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ OperationService.new(user, self).with_branch(
+ branch_name,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ ) do |start_commit|
+
+ Gitlab::Git.check_namespace!(commit, start_repository)
+
+ revert_tree_id = check_revert_content(commit, start_commit.sha)
+ raise CreateTreeError unless revert_tree_id
+
+ committer = user_to_committer(user)
+
+ create_commit(message: message,
+ author: committer,
+ committer: committer,
+ tree: revert_tree_id,
+ parents: [start_commit.sha])
+ end
+ end
+
def gitaly_add_branch(branch_name, user, target)
gitaly_operation_client.user_create_branch(branch_name, user, target)
rescue GRPC::FailedPrecondition => ex
@@ -1661,13 +1834,52 @@ module Gitlab
raise InvalidRef, ex
end
+ def rugged_cherry_pick(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ OperationService.new(user, self).with_branch(
+ branch_name,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository
+ ) do |start_commit|
+
+ Gitlab::Git.check_namespace!(commit, start_repository)
+
+ cherry_pick_tree_id = check_cherry_pick_content(commit, start_commit.sha)
+ raise CreateTreeError unless cherry_pick_tree_id
+
+ committer = user_to_committer(user)
+
+ create_commit(message: message,
+ author: {
+ email: commit.author_email,
+ name: commit.author_name,
+ time: commit.authored_date
+ },
+ committer: committer,
+ tree: cherry_pick_tree_id,
+ parents: [start_commit.sha])
+ end
+ end
+
+ def check_cherry_pick_content(target_commit, source_sha)
+ args = [target_commit.sha, source_sha]
+ args << 1 if target_commit.merge_commit?
+
+ cherry_pick_index = rugged.cherrypick_commit(*args)
+ return false if cherry_pick_index.conflicts?
+
+ tree_id = cherry_pick_index.write_tree(rugged)
+ return false unless diff_exists?(source_sha, tree_id)
+
+ tree_id
+ end
+
def local_fetch_ref(source_path, source_ref:, target_ref:)
args = %W(fetch --no-tags -f #{source_path} #{source_ref}:#{target_ref})
run_git(args)
end
def gitaly_fetch_ref(source_repository, source_ref:, target_ref:)
- args = %W(fetch --no-tags -f ssh://gitaly/internal.git #{source_ref}:#{target_ref})
+ args = %W(fetch --no-tags -f #{GITALY_INTERNAL_URL} #{source_ref}:#{target_ref})
run_git(args, env: source_repository.fetch_env)
end
@@ -1687,6 +1899,10 @@ module Gitlab
rescue Rugged::ReferenceError
raise ArgumentError, 'Invalid merge source'
end
+
+ def fetch_remote(remote_name = 'origin', env: nil)
+ run_git(['fetch', remote_name], env: env).last.zero?
+ end
end
end
end
diff --git a/lib/gitlab/git/repository_mirroring.rb b/lib/gitlab/git/repository_mirroring.rb
index 392bef69e80..effb1f0ca19 100644
--- a/lib/gitlab/git/repository_mirroring.rb
+++ b/lib/gitlab/git/repository_mirroring.rb
@@ -17,33 +17,6 @@ module Gitlab
rugged.config["remote.#{remote_name}.prune"] = true
end
- def set_remote_refmap(remote_name, refmap)
- Array(refmap).each_with_index do |refspec, i|
- refspec = REFMAPS[refspec] || refspec
-
- # We need multiple `fetch` entries, but Rugged only allows replacing a config, not adding to it.
- # To make sure we start from scratch, we set the first using rugged, and use `git` for any others
- if i == 0
- rugged.config["remote.#{remote_name}.fetch"] = refspec
- else
- run_git(%W[config --add remote.#{remote_name}.fetch #{refspec}])
- end
- end
- end
-
- # Like all_refs public `Gitlab::Git::Repository` methods, this method is part
- # of `Repository`'s interface through `method_missing`.
- # `Repository` has its own `fetch_as_mirror` which uses `gitlab-shell` and
- # takes some extra attributes, so we qualify this method name to prevent confusion.
- def fetch_as_mirror_without_shell(url)
- remote_name = "tmp-#{SecureRandom.hex}"
- add_remote(remote_name, url)
- set_remote_as_mirror(remote_name)
- fetch_remote_without_shell(remote_name)
- ensure
- remove_remote(remote_name) if remote_name
- end
-
def remote_tags(remote)
# Each line has this format: "dc872e9fa6963f8f03da6c8f6f264d0845d6b092\trefs/tags/v1.10.0\n"
# We want to convert it to: [{ 'v1.10.0' => 'dc872e9fa6963f8f03da6c8f6f264d0845d6b092' }, ...]
@@ -85,6 +58,20 @@ module Gitlab
private
+ def set_remote_refmap(remote_name, refmap)
+ Array(refmap).each_with_index do |refspec, i|
+ refspec = REFMAPS[refspec] || refspec
+
+ # We need multiple `fetch` entries, but Rugged only allows replacing a config, not adding to it.
+ # To make sure we start from scratch, we set the first using rugged, and use `git` for any others
+ if i == 0
+ rugged.config["remote.#{remote_name}.fetch"] = refspec
+ else
+ run_git(%W[config --add remote.#{remote_name}.fetch #{refspec}])
+ end
+ end
+ end
+
def list_remote_tags(remote)
tag_list, exit_code, error = nil
cmd = %W(#{Gitlab.config.git.bin_path} --git-dir=#{path} ls-remote --tags #{remote})
diff --git a/lib/gitlab/git/storage.rb b/lib/gitlab/git/storage.rb
index 99518c9b1e4..5933312b0b5 100644
--- a/lib/gitlab/git/storage.rb
+++ b/lib/gitlab/git/storage.rb
@@ -15,6 +15,7 @@ module Gitlab
Failing = Class.new(Inaccessible)
REDIS_KEY_PREFIX = 'storage_accessible:'.freeze
+ REDIS_KNOWN_KEYS = "#{REDIS_KEY_PREFIX}known_keys_set".freeze
def self.redis
Gitlab::Redis::SharedState
diff --git a/lib/gitlab/git/storage/checker.rb b/lib/gitlab/git/storage/checker.rb
new file mode 100644
index 00000000000..d3c37f82101
--- /dev/null
+++ b/lib/gitlab/git/storage/checker.rb
@@ -0,0 +1,120 @@
+module Gitlab
+ module Git
+ module Storage
+ class Checker
+ include CircuitBreakerSettings
+
+ attr_reader :storage_path, :storage, :hostname, :logger
+ METRICS_MUTEX = Mutex.new
+ STORAGE_TIMING_BUCKETS = [0.1, 0.15, 0.25, 0.33, 0.5, 1, 1.5, 2.5, 5, 10, 15].freeze
+
+ def self.check_all(logger = Rails.logger)
+ threads = Gitlab.config.repositories.storages.keys.map do |storage_name|
+ Thread.new do
+ Thread.current[:result] = new(storage_name, logger).check_with_lease
+ end
+ end
+
+ threads.map do |thread|
+ thread.join
+ thread[:result]
+ end
+ end
+
+ def self.check_histogram
+ @check_histogram ||=
+ METRICS_MUTEX.synchronize do
+ @check_histogram || Gitlab::Metrics.histogram(:circuitbreaker_storage_check_duration_seconds,
+ 'Storage check time in seconds',
+ {},
+ STORAGE_TIMING_BUCKETS
+ )
+ end
+ end
+
+ def initialize(storage, logger = Rails.logger)
+ @storage = storage
+ config = Gitlab.config.repositories.storages[@storage]
+ @storage_path = config['path']
+ @logger = logger
+
+ @hostname = Gitlab::Environment.hostname
+ end
+
+ def check_with_lease
+ lease_key = "storage_check:#{cache_key}"
+ lease = Gitlab::ExclusiveLease.new(lease_key, timeout: storage_timeout)
+ result = { storage: storage, success: nil }
+
+ if uuid = lease.try_obtain
+ result[:success] = check
+
+ Gitlab::ExclusiveLease.cancel(lease_key, uuid)
+ else
+ logger.warn("#{hostname}: #{storage}: Skipping check, previous check still running")
+ end
+
+ result
+ end
+
+ def check
+ if perform_access_check
+ track_storage_accessible
+ true
+ else
+ track_storage_inaccessible
+ logger.error("#{hostname}: #{storage}: Not accessible.")
+ false
+ end
+ end
+
+ private
+
+ def perform_access_check
+ start_time = Gitlab::Metrics::System.monotonic_time
+
+ Gitlab::Git::Storage::ForkedStorageCheck.storage_available?(storage_path, storage_timeout, access_retries)
+ ensure
+ execution_time = Gitlab::Metrics::System.monotonic_time - start_time
+ self.class.check_histogram.observe({ storage: storage }, execution_time)
+ end
+
+ def track_storage_inaccessible
+ first_failure = current_failure_info.first_failure || Time.now
+ last_failure = Time.now
+
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.pipelined do
+ redis.hset(cache_key, :first_failure, first_failure.to_i)
+ redis.hset(cache_key, :last_failure, last_failure.to_i)
+ redis.hincrby(cache_key, :failure_count, 1)
+ redis.expire(cache_key, failure_reset_time)
+ maintain_known_keys(redis)
+ end
+ end
+ end
+
+ def track_storage_accessible
+ Gitlab::Git::Storage.redis.with do |redis|
+ redis.pipelined do
+ redis.hset(cache_key, :first_failure, nil)
+ redis.hset(cache_key, :last_failure, nil)
+ redis.hset(cache_key, :failure_count, 0)
+ maintain_known_keys(redis)
+ end
+ end
+ end
+
+ def maintain_known_keys(redis)
+ expire_time = Time.now.to_i + failure_reset_time
+ redis.zadd(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, expire_time, cache_key)
+ redis.zremrangebyscore(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, '-inf', Time.now.to_i)
+ end
+
+ def current_failure_info
+ FailureInfo.load(cache_key)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/storage/circuit_breaker.rb b/lib/gitlab/git/storage/circuit_breaker.rb
index be7598ef011..898bb1b65be 100644
--- a/lib/gitlab/git/storage/circuit_breaker.rb
+++ b/lib/gitlab/git/storage/circuit_breaker.rb
@@ -4,24 +4,11 @@ module Gitlab
class CircuitBreaker
include CircuitBreakerSettings
- FailureInfo = Struct.new(:last_failure, :failure_count)
-
attr_reader :storage,
- :hostname,
- :storage_path
-
- delegate :last_failure, :failure_count, to: :failure_info
-
- def self.reset_all!
- pattern = "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}*"
+ :hostname
- Gitlab::Git::Storage.redis.with do |redis|
- all_storage_keys = redis.scan_each(match: pattern).to_a
- redis.del(*all_storage_keys) unless all_storage_keys.empty?
- end
-
- RequestStore.delete(:circuitbreaker_cache)
- end
+ delegate :last_failure, :failure_count, :no_failures?,
+ to: :failure_info
def self.for_storage(storage)
cached_circuitbreakers = RequestStore.fetch(:circuitbreaker_cache) do
@@ -48,9 +35,6 @@ module Gitlab
def initialize(storage, hostname)
@storage = storage
@hostname = hostname
-
- config = Gitlab.config.repositories.storages[@storage]
- @storage_path = config['path']
end
def perform
@@ -67,15 +51,6 @@ module Gitlab
failure_count > failure_count_threshold
end
- def backing_off?
- return false if no_failures?
-
- recent_failure = last_failure > failure_wait_time.seconds.ago
- too_many_failures = failure_count > backoff_threshold
-
- recent_failure && too_many_failures
- end
-
private
# The circuitbreaker can be enabled for the entire fleet using a Feature
@@ -88,82 +63,13 @@ module Gitlab
end
def failure_info
- @failure_info ||= get_failure_info
- end
-
- # Memoizing the `storage_available` call means we only do it once per
- # request when the storage is available.
- #
- # When the storage appears not available, and the memoized value is `false`
- # we might want to try again.
- def storage_available?
- return @storage_available if @storage_available
-
- if @storage_available = Gitlab::Git::Storage::ForkedStorageCheck
- .storage_available?(storage_path, storage_timeout, access_retries)
- track_storage_accessible
- else
- track_storage_inaccessible
- end
-
- @storage_available
+ @failure_info ||= FailureInfo.load(cache_key)
end
def check_storage_accessible!
if circuit_broken?
raise Gitlab::Git::Storage::CircuitOpen.new("Circuit for #{storage} is broken", failure_reset_time)
end
-
- if backing_off?
- raise Gitlab::Git::Storage::Failing.new("Backing off access to #{storage}", failure_wait_time)
- end
-
- unless storage_available?
- raise Gitlab::Git::Storage::Inaccessible.new("#{storage} not accessible", failure_wait_time)
- end
- end
-
- def no_failures?
- last_failure.blank? && failure_count == 0
- end
-
- def track_storage_inaccessible
- @failure_info = FailureInfo.new(Time.now, failure_count + 1)
-
- Gitlab::Git::Storage.redis.with do |redis|
- redis.pipelined do
- redis.hset(cache_key, :last_failure, last_failure.to_i)
- redis.hincrby(cache_key, :failure_count, 1)
- redis.expire(cache_key, failure_reset_time)
- end
- end
- end
-
- def track_storage_accessible
- return if no_failures?
-
- @failure_info = FailureInfo.new(nil, 0)
-
- Gitlab::Git::Storage.redis.with do |redis|
- redis.pipelined do
- redis.hset(cache_key, :last_failure, nil)
- redis.hset(cache_key, :failure_count, 0)
- end
- end
- end
-
- def get_failure_info
- last_failure, failure_count = Gitlab::Git::Storage.redis.with do |redis|
- redis.hmget(cache_key, :last_failure, :failure_count)
- end
-
- last_failure = Time.at(last_failure.to_i) if last_failure.present?
-
- FailureInfo.new(last_failure, failure_count.to_i)
- end
-
- def cache_key
- @cache_key ||= "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage}:#{hostname}"
end
end
end
diff --git a/lib/gitlab/git/storage/circuit_breaker_settings.rb b/lib/gitlab/git/storage/circuit_breaker_settings.rb
index 257fe8cd8f0..c9e225f187d 100644
--- a/lib/gitlab/git/storage/circuit_breaker_settings.rb
+++ b/lib/gitlab/git/storage/circuit_breaker_settings.rb
@@ -6,10 +6,6 @@ module Gitlab
application_settings.circuitbreaker_failure_count_threshold
end
- def failure_wait_time
- application_settings.circuitbreaker_failure_wait_time
- end
-
def failure_reset_time
application_settings.circuitbreaker_failure_reset_time
end
@@ -22,8 +18,12 @@ module Gitlab
application_settings.circuitbreaker_access_retries
end
- def backoff_threshold
- application_settings.circuitbreaker_backoff_threshold
+ def check_interval
+ application_settings.circuitbreaker_check_interval
+ end
+
+ def cache_key
+ @cache_key ||= "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage}:#{hostname}"
end
private
diff --git a/lib/gitlab/git/storage/failure_info.rb b/lib/gitlab/git/storage/failure_info.rb
new file mode 100644
index 00000000000..387279c110d
--- /dev/null
+++ b/lib/gitlab/git/storage/failure_info.rb
@@ -0,0 +1,39 @@
+module Gitlab
+ module Git
+ module Storage
+ class FailureInfo
+ attr_accessor :first_failure, :last_failure, :failure_count
+
+ def self.reset_all!
+ Gitlab::Git::Storage.redis.with do |redis|
+ all_storage_keys = redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1)
+ redis.del(*all_storage_keys) unless all_storage_keys.empty?
+ end
+
+ RequestStore.delete(:circuitbreaker_cache)
+ end
+
+ def self.load(cache_key)
+ first_failure, last_failure, failure_count = Gitlab::Git::Storage.redis.with do |redis|
+ redis.hmget(cache_key, :first_failure, :last_failure, :failure_count)
+ end
+
+ last_failure = Time.at(last_failure.to_i) if last_failure.present?
+ first_failure = Time.at(first_failure.to_i) if first_failure.present?
+
+ new(first_failure, last_failure, failure_count.to_i)
+ end
+
+ def initialize(first_failure, last_failure, failure_count)
+ @first_failure = first_failure
+ @last_failure = last_failure
+ @failure_count = failure_count
+ end
+
+ def no_failures?
+ first_failure.blank? && last_failure.blank? && failure_count == 0
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/git/storage/health.rb b/lib/gitlab/git/storage/health.rb
index 7049772fe3b..90bbe85fd37 100644
--- a/lib/gitlab/git/storage/health.rb
+++ b/lib/gitlab/git/storage/health.rb
@@ -4,8 +4,8 @@ module Gitlab
class Health
attr_reader :storage_name, :info
- def self.pattern_for_storage(storage_name)
- "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage_name}:*"
+ def self.prefix_for_storage(storage_name)
+ "#{Gitlab::Git::Storage::REDIS_KEY_PREFIX}#{storage_name}:"
end
def self.for_all_storages
@@ -25,26 +25,15 @@ module Gitlab
private_class_method def self.all_keys_for_storages(storage_names, redis)
keys_per_storage = {}
+ all_keys = redis.zrange(Gitlab::Git::Storage::REDIS_KNOWN_KEYS, 0, -1)
- redis.pipelined do
- storage_names.each do |storage_name|
- pattern = pattern_for_storage(storage_name)
- matched_keys = redis.scan_each(match: pattern)
+ storage_names.each do |storage_name|
+ prefix = prefix_for_storage(storage_name)
- keys_per_storage[storage_name] = matched_keys
- end
+ keys_per_storage[storage_name] = all_keys.select { |key| key.starts_with?(prefix) }
end
- # We need to make sure each lazy-loaded `Enumerator` for matched keys
- # is loaded into an array.
- #
- # Otherwise it would be loaded in the second `Redis#pipelined` block
- # within `.load_for_keys`. In this pipelined call, the active
- # Redis-client changes again, so the values would not be available
- # until the end of that pipelined-block.
- keys_per_storage.each do |storage_name, key_future|
- keys_per_storage[storage_name] = key_future.to_a
- end
+ keys_per_storage
end
private_class_method def self.load_for_keys(keys_per_storage, redis)
diff --git a/lib/gitlab/git/storage/null_circuit_breaker.rb b/lib/gitlab/git/storage/null_circuit_breaker.rb
index a12d52d295f..261c936c689 100644
--- a/lib/gitlab/git/storage/null_circuit_breaker.rb
+++ b/lib/gitlab/git/storage/null_circuit_breaker.rb
@@ -11,6 +11,9 @@ module Gitlab
# These will always have nil values
attr_reader :storage_path
+ delegate :last_failure, :failure_count, :no_failures?,
+ to: :failure_info
+
def initialize(storage, hostname, error: nil)
@storage = storage
@hostname = hostname
@@ -29,16 +32,17 @@ module Gitlab
false
end
- def last_failure
- circuit_broken? ? Time.now : nil
- end
-
- def failure_count
- circuit_broken? ? failure_count_threshold : 0
- end
-
def failure_info
- Gitlab::Git::Storage::CircuitBreaker::FailureInfo.new(last_failure, failure_count)
+ @failure_info ||=
+ if circuit_broken?
+ Gitlab::Git::Storage::FailureInfo.new(Time.now,
+ Time.now,
+ failure_count_threshold)
+ else
+ Gitlab::Git::Storage::FailureInfo.new(nil,
+ nil,
+ 0)
+ end
end
end
end
diff --git a/lib/gitlab/git_access.rb b/lib/gitlab/git_access.rb
index 8998c4b1a83..56f6febe86d 100644
--- a/lib/gitlab/git_access.rb
+++ b/lib/gitlab/git_access.rb
@@ -102,18 +102,15 @@ module Gitlab
end
def check_project_moved!
- return unless redirected_path
+ return if redirected_path.nil?
- url = protocol == 'ssh' ? project.ssh_url_to_repo : project.http_url_to_repo
- message = <<-MESSAGE.strip_heredoc
- Project '#{redirected_path}' was moved to '#{project.full_path}'.
+ project_moved = Checks::ProjectMoved.new(project, user, redirected_path, protocol)
- Please update your Git remote and try again:
-
- git remote set-url origin #{url}
- MESSAGE
-
- raise ProjectMovedError, message
+ if project_moved.permanent_redirect?
+ project_moved.add_redirect_message
+ else
+ raise ProjectMovedError, project_moved.redirect_message(rejected: true)
+ end
end
def check_command_disabled!(cmd)
@@ -166,7 +163,7 @@ module Gitlab
end
if Gitlab::Database.read_only?
- raise UnauthorizedError, ERROR_MESSAGES[:cannot_push_to_read_only]
+ raise UnauthorizedError, push_to_read_only_message
end
if deploy_key
@@ -280,5 +277,9 @@ module Gitlab
UserAccess.new(user, project: project)
end
end
+
+ def push_to_read_only_message
+ ERROR_MESSAGES[:cannot_push_to_read_only]
+ end
end
end
diff --git a/lib/gitlab/git_access_wiki.rb b/lib/gitlab/git_access_wiki.rb
index 98f1f45b338..1c9477e84b2 100644
--- a/lib/gitlab/git_access_wiki.rb
+++ b/lib/gitlab/git_access_wiki.rb
@@ -19,10 +19,14 @@ module Gitlab
end
if Gitlab::Database.read_only?
- raise UnauthorizedError, ERROR_MESSAGES[:read_only]
+ raise UnauthorizedError, push_to_read_only_message
end
true
end
+
+ def push_to_read_only_message
+ ERROR_MESSAGES[:read_only]
+ end
end
end
diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb
index f27cd800bdd..b753ac46291 100644
--- a/lib/gitlab/gitaly_client.rb
+++ b/lib/gitlab/gitaly_client.rb
@@ -27,7 +27,7 @@ module Gitlab
end
SERVER_VERSION_FILE = 'GITALY_SERVER_VERSION'.freeze
- MAXIMUM_GITALY_CALLS = 30
+ MAXIMUM_GITALY_CALLS = 35
CLIENT_NAME = (Sidekiq.server? ? 'gitlab-sidekiq' : 'gitlab-web').freeze
MUTEX = Mutex.new
@@ -336,6 +336,12 @@ module Gitlab
s.dup.force_encoding(Encoding::ASCII_8BIT)
end
+ def self.binary_stringio(s)
+ io = StringIO.new(s || '')
+ io.set_encoding(Encoding::ASCII_8BIT)
+ io
+ end
+
def self.encode_repeated(a)
Google::Protobuf::RepeatedField.new(:bytes, a.map { |s| self.encode(s) } )
end
diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb
index 34807d280e5..7985f5b5457 100644
--- a/lib/gitlab/gitaly_client/commit_service.rb
+++ b/lib/gitlab/gitaly_client/commit_service.rb
@@ -250,6 +250,26 @@ module Gitlab
consume_commits_response(response)
end
+ def filter_shas_with_signatures(shas)
+ request = Gitaly::FilterShasWithSignaturesRequest.new(repository: @gitaly_repo)
+
+ enum = Enumerator.new do |y|
+ shas.each_slice(20) do |revs|
+ request.shas = GitalyClient.encode_repeated(revs)
+
+ y.yield request
+
+ request = Gitaly::FilterShasWithSignaturesRequest.new
+ end
+ end
+
+ response = GitalyClient.call(@repository.storage, :commit_service, :filter_shas_with_signatures, enum)
+
+ response.flat_map do |msg|
+ msg.shas.map { |sha| EncodingHelper.encode!(sha) }
+ end
+ end
+
private
def call_commit_diff(request_params, options = {})
diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb
index 526d44a8b77..400a4af363b 100644
--- a/lib/gitlab/gitaly_client/operation_service.rb
+++ b/lib/gitlab/gitaly_client/operation_service.rb
@@ -122,6 +122,64 @@ module Gitlab
).branch_update
Gitlab::Git::OperationService::BranchUpdate.from_gitaly(branch_update)
end
+
+ def user_cherry_pick(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ call_cherry_pick_or_revert(:cherry_pick,
+ user: user,
+ commit: commit,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository)
+ end
+
+ def user_revert(user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ call_cherry_pick_or_revert(:revert,
+ user: user,
+ commit: commit,
+ branch_name: branch_name,
+ message: message,
+ start_branch_name: start_branch_name,
+ start_repository: start_repository)
+ end
+
+ private
+
+ def call_cherry_pick_or_revert(rpc, user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
+ request_class = "Gitaly::User#{rpc.to_s.camelcase}Request".constantize
+
+ request = request_class.new(
+ repository: @gitaly_repo,
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
+ commit: commit.to_gitaly_commit,
+ branch_name: GitalyClient.encode(branch_name),
+ message: GitalyClient.encode(message),
+ start_branch_name: GitalyClient.encode(start_branch_name.to_s),
+ start_repository: start_repository.gitaly_repository
+ )
+
+ response = GitalyClient.call(
+ @repository.storage,
+ :operation_service,
+ :"user_#{rpc}",
+ request,
+ remote_storage: start_repository.storage
+ )
+
+ handle_cherry_pick_or_revert_response(response)
+ end
+
+ def handle_cherry_pick_or_revert_response(response)
+ if response.pre_receive_error.presence
+ raise Gitlab::Git::HooksService::PreReceiveError, response.pre_receive_error
+ elsif response.commit_error.presence
+ raise Gitlab::Git::CommitError, response.commit_error
+ elsif response.create_tree_error.presence
+ raise Gitlab::Git::Repository::CreateTreeError, response.create_tree_error
+ else
+ Gitlab::Git::OperationService::BranchUpdate.from_gitaly(response.branch_update)
+ end
+ end
end
end
end
diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb
index b9e606592d7..c1f95396878 100644
--- a/lib/gitlab/gitaly_client/repository_service.rb
+++ b/lib/gitlab/gitaly_client/repository_service.rb
@@ -69,6 +69,16 @@ module Gitlab
response.value
end
+ def find_merge_base(*revisions)
+ request = Gitaly::FindMergeBaseRequest.new(
+ repository: @gitaly_repo,
+ revisions: revisions.map { |r| GitalyClient.encode(r) }
+ )
+
+ response = GitalyClient.call(@storage, :repository_service, :find_merge_base, request)
+ response.base.presence
+ end
+
def fetch_source_branch(source_repository, source_branch, local_ref)
request = Gitaly::FetchSourceBranchRequest.new(
repository: @gitaly_repo,
@@ -87,6 +97,17 @@ module Gitlab
response.result
end
+
+ def fsck
+ request = Gitaly::FsckRequest.new(repository: @gitaly_repo)
+ response = GitalyClient.call(@storage, :repository_service, :fsck, request)
+
+ if response.error.empty?
+ return "", 0
+ else
+ return response.error.b, 1
+ end
+ end
end
end
end
diff --git a/lib/gitlab/gitaly_client/wiki_service.rb b/lib/gitlab/gitaly_client/wiki_service.rb
index c8f065f5881..337d225d081 100644
--- a/lib/gitlab/gitaly_client/wiki_service.rb
+++ b/lib/gitlab/gitaly_client/wiki_service.rb
@@ -18,12 +18,11 @@ module Gitlab
commit_details: gitaly_commit_details(commit_details)
)
- strio = StringIO.new(content)
+ strio = GitalyClient.binary_stringio(content)
enum = Enumerator.new do |y|
until strio.eof?
- chunk = strio.read(MAX_MSG_SIZE)
- request.content = GitalyClient.encode(chunk)
+ request.content = strio.read(MAX_MSG_SIZE)
y.yield request
@@ -46,12 +45,11 @@ module Gitlab
commit_details: gitaly_commit_details(commit_details)
)
- strio = StringIO.new(content)
+ strio = GitalyClient.binary_stringio(content)
enum = Enumerator.new do |y|
until strio.eof?
- chunk = strio.read(MAX_MSG_SIZE)
- request.content = GitalyClient.encode(chunk)
+ request.content = strio.read(MAX_MSG_SIZE)
y.yield request
diff --git a/lib/gitlab/identifier.rb b/lib/gitlab/identifier.rb
index 94678b6ec40..3f3f10596c5 100644
--- a/lib/gitlab/identifier.rb
+++ b/lib/gitlab/identifier.rb
@@ -2,9 +2,8 @@
# key-13 or user-36 or last commit
module Gitlab
module Identifier
- def identify(identifier, project, newrev)
+ def identify(identifier, project = nil, newrev = nil)
if identifier.blank?
- # Local push from gitlab
identify_using_commit(project, newrev)
elsif identifier =~ /\Auser-\d+\Z/
# git push over http
@@ -17,6 +16,8 @@ module Gitlab
# Tries to identify a user based on a commit SHA.
def identify_using_commit(project, ref)
+ return if project.nil? && ref.nil?
+
commit = project.commit(ref)
return if !commit || !commit.author_email
diff --git a/lib/gitlab/kubernetes/helm.rb b/lib/gitlab/kubernetes/helm.rb
index 7a50f07f3c5..407cdefc04d 100644
--- a/lib/gitlab/kubernetes/helm.rb
+++ b/lib/gitlab/kubernetes/helm.rb
@@ -18,7 +18,7 @@ module Gitlab
def initialize(kubeclient)
@kubeclient = kubeclient
- @namespace = Namespace.new(NAMESPACE, kubeclient)
+ @namespace = Gitlab::Kubernetes::Namespace.new(NAMESPACE, kubeclient)
end
def install(command)
diff --git a/lib/gitlab/metrics/method_call.rb b/lib/gitlab/metrics/method_call.rb
index 65d55576ac2..9112164f22e 100644
--- a/lib/gitlab/metrics/method_call.rb
+++ b/lib/gitlab/metrics/method_call.rb
@@ -1,7 +1,11 @@
+# rubocop:disable Style/ClassVars
+
module Gitlab
module Metrics
# Class for tracking timing information about method calls
class MethodCall
+ @@measurement_enabled_cache = Concurrent::AtomicBoolean.new(false)
+ @@measurement_enabled_cache_expires_at = Concurrent::AtomicFixnum.new(Time.now.to_i)
MUTEX = Mutex.new
BASE_LABELS = { module: nil, method: nil }.freeze
attr_reader :real_time, :cpu_time, :call_count, :labels
@@ -18,6 +22,10 @@ module Gitlab
end
end
+ def self.measurement_enabled_cache_expires_at
+ @@measurement_enabled_cache_expires_at
+ end
+
# name - The full name of the method (including namespace) such as
# `User#sign_in`.
#
@@ -72,7 +80,14 @@ module Gitlab
end
def call_measurement_enabled?
- Feature.get(:prometheus_metrics_method_instrumentation).enabled?
+ expires_at = @@measurement_enabled_cache_expires_at.value
+ if expires_at < Time.now.to_i
+ if @@measurement_enabled_cache_expires_at.compare_and_set(expires_at, 1.minute.from_now.to_i)
+ @@measurement_enabled_cache.value = Feature.get(:prometheus_metrics_method_instrumentation).enabled?
+ end
+ end
+
+ @@measurement_enabled_cache.value
end
end
end
diff --git a/lib/gitlab/metrics/samplers/influx_sampler.rb b/lib/gitlab/metrics/samplers/influx_sampler.rb
index f4f9b5ca792..5a0f7f28fc8 100644
--- a/lib/gitlab/metrics/samplers/influx_sampler.rb
+++ b/lib/gitlab/metrics/samplers/influx_sampler.rb
@@ -27,7 +27,6 @@ module Gitlab
def sample
sample_memory_usage
sample_file_descriptors
- sample_objects
sample_gc
flush
@@ -48,29 +47,6 @@ module Gitlab
add_metric('file_descriptors', value: System.file_descriptor_count)
end
- if Metrics.mri?
- def sample_objects
- sample = Allocations.to_hash
- counts = sample.each_with_object({}) do |(klass, count), hash|
- name = klass.name
-
- next unless name
-
- hash[name] = count
- end
-
- # Symbols aren't allocated so we'll need to add those manually.
- counts['Symbol'] = Symbol.all_symbols.length
-
- counts.each do |name, count|
- add_metric('object_counts', { count: count }, type: name)
- end
- end
- else
- def sample_objects
- end
- end
-
def sample_gc
time = GC::Profiler.total_time * 1000.0
stats = GC.stat.merge(total_time: time)
diff --git a/lib/gitlab/metrics/samplers/ruby_sampler.rb b/lib/gitlab/metrics/samplers/ruby_sampler.rb
index 436a9e9550d..b68800417a2 100644
--- a/lib/gitlab/metrics/samplers/ruby_sampler.rb
+++ b/lib/gitlab/metrics/samplers/ruby_sampler.rb
@@ -32,7 +32,7 @@ module Gitlab
def init_metrics
metrics = {}
- metrics[:sampler_duration] = Metrics.histogram(with_prefix(:sampler_duration, :seconds), 'Sampler time', {})
+ metrics[:sampler_duration] = Metrics.histogram(with_prefix(:sampler_duration, :seconds), 'Sampler time', { worker: nil })
metrics[:total_time] = Metrics.gauge(with_prefix(:gc, :time_total), 'Total GC time', labels, :livesum)
GC.stat.keys.each do |key|
metrics[key] = Metrics.gauge(with_prefix(:gc, key), to_doc_string(key), labels, :livesum)
@@ -48,7 +48,6 @@ module Gitlab
def sample
start_time = System.monotonic_time
sample_gc
- sample_objects
metrics[:memory_usage].set(labels, System.memory_usage)
metrics[:file_descriptors].set(labels, System.file_descriptor_count)
@@ -68,41 +67,15 @@ module Gitlab
end
end
- def sample_objects
- list_objects.each do |name, count|
- metrics[:objects_total].set(labels.merge(class: name), count)
- end
- end
-
- if Metrics.mri?
- def list_objects
- sample = Allocations.to_hash
- counts = sample.each_with_object({}) do |(klass, count), hash|
- name = klass.name
-
- next unless name
-
- hash[name] = count
- end
-
- # Symbols aren't allocated so we'll need to add those manually.
- counts['Symbol'] = Symbol.all_symbols.length
- counts
- end
- else
- def list_objects
- end
- end
-
def worker_label
return {} unless defined?(Unicorn::Worker)
worker_no = ::Prometheus::Client::Support::Unicorn.worker_id
if worker_no
- { unicorn: worker_no }
+ { worker: worker_no }
else
- { unicorn: 'master' }
+ { worker: 'master' }
end
end
end
diff --git a/lib/gitlab/project_search_results.rb b/lib/gitlab/project_search_results.rb
index 561aa9e162c..e2662fc362b 100644
--- a/lib/gitlab/project_search_results.rb
+++ b/lib/gitlab/project_search_results.rb
@@ -47,8 +47,11 @@ module Gitlab
startline = 0
result.each_line.each_with_index do |line, index|
- if line =~ /^.*:.*:\d+:/
- ref, filename, startline = line.split(':')
+ matches = line.match(/^(?<ref>[^:]*):(?<filename>.*):(?<startline>\d+):/)
+ if matches
+ ref = matches[:ref]
+ filename = matches[:filename]
+ startline = matches[:startline]
startline = startline.to_i - index
extname = Regexp.escape(File.extname(filename))
basename = filename.sub(/#{extname}$/, '')
diff --git a/lib/gitlab/prometheus/queries/query_additional_metrics.rb b/lib/gitlab/prometheus/queries/query_additional_metrics.rb
index 7ac6162b54d..5cddc96a643 100644
--- a/lib/gitlab/prometheus/queries/query_additional_metrics.rb
+++ b/lib/gitlab/prometheus/queries/query_additional_metrics.rb
@@ -76,7 +76,7 @@ module Gitlab
timeframe_start: timeframe_start,
timeframe_end: timeframe_end,
ci_environment_slug: environment.slug,
- kube_namespace: environment.project.kubernetes_service&.actual_namespace || '',
+ kube_namespace: environment.project.deployment_platform&.actual_namespace || '',
environment_filter: %{container_name!="POD",environment="#{environment.slug}"}
}
end
diff --git a/lib/gitlab/reference_extractor.rb b/lib/gitlab/reference_extractor.rb
index bc836dcc08d..9ff82d628c0 100644
--- a/lib/gitlab/reference_extractor.rb
+++ b/lib/gitlab/reference_extractor.rb
@@ -1,7 +1,7 @@
module Gitlab
# Extract possible GFM references from an arbitrary String for further processing.
class ReferenceExtractor < Banzai::ReferenceExtractor
- REFERABLES = %i(user issue label milestone merge_request snippet commit commit_range directly_addressed_user).freeze
+ REFERABLES = %i(user issue label milestone merge_request snippet commit commit_range directly_addressed_user epic).freeze
attr_accessor :project, :current_user, :author
def initialize(project, current_user = nil)
diff --git a/lib/gitlab/seeder.rb b/lib/gitlab/seeder.rb
index f9ab9bd466f..30df7e4a831 100644
--- a/lib/gitlab/seeder.rb
+++ b/lib/gitlab/seeder.rb
@@ -8,7 +8,8 @@ end
module Gitlab
class Seeder
def self.quiet
- mute_mailer
+ mute_mailer unless Rails.env.test?
+
SeedFu.quiet = true
yield
diff --git a/lib/gitlab/shell.rb b/lib/gitlab/shell.rb
index 996d213fdb4..a22a63665be 100644
--- a/lib/gitlab/shell.rb
+++ b/lib/gitlab/shell.rb
@@ -143,20 +143,27 @@ module Gitlab
storage, "#{path}.git", "#{new_path}.git"])
end
- # Fork repository to new namespace
+ # Fork repository to new path
# forked_from_storage - forked-from project's storage path
- # path - project path with namespace
+ # forked_from_disk_path - project disk path
# forked_to_storage - forked-to project's storage path
- # fork_namespace - namespace for forked project
+ # forked_to_disk_path - forked project disk path
#
# Ex.
- # fork_repository("/path/to/forked_from/storage", "gitlab/gitlab-ci", "/path/to/forked_to/storage", "randx")
+ # fork_repository("/path/to/forked_from/storage", "gitlab/gitlab-ci", "/path/to/forked_to/storage", "new-namespace/gitlab-ci")
#
# Gitaly note: JV: not easy to migrate because this involves two Gitaly servers, not one.
- def fork_repository(forked_from_storage, path, forked_to_storage, fork_namespace)
- gitlab_shell_fast_execute([gitlab_shell_projects_path, 'fork-project',
- forked_from_storage, "#{path}.git", forked_to_storage,
- fork_namespace])
+ def fork_repository(forked_from_storage, forked_from_disk_path, forked_to_storage, forked_to_disk_path)
+ gitlab_shell_fast_execute(
+ [
+ gitlab_shell_projects_path,
+ 'fork-repository',
+ forked_from_storage,
+ "#{forked_from_disk_path}.git",
+ forked_to_storage,
+ "#{forked_to_disk_path}.git"
+ ]
+ )
end
# Remove repository from file system
diff --git a/lib/gitlab/sidekiq_config.rb b/lib/gitlab/sidekiq_config.rb
new file mode 100644
index 00000000000..c3d7814551c
--- /dev/null
+++ b/lib/gitlab/sidekiq_config.rb
@@ -0,0 +1,61 @@
+require 'yaml'
+require 'set'
+
+module Gitlab
+ module SidekiqConfig
+ # This method is called by `bin/sidekiq-cluster` in EE, which runs outside
+ # of bundler/Rails context, so we cannot use any gem or Rails methods.
+ def self.worker_queues(rails_path = Rails.root.to_s)
+ @worker_queues ||= {}
+ @worker_queues[rails_path] ||= YAML.load_file(File.join(rails_path, 'app/workers/all_queues.yml'))
+ end
+
+ # This method is called by `bin/sidekiq-cluster` in EE, which runs outside
+ # of bundler/Rails context, so we cannot use any gem or Rails methods.
+ def self.expand_queues(queues, all_queues = self.worker_queues)
+ return [] if queues.empty?
+
+ queues_set = all_queues.to_set
+
+ queues.flat_map do |queue|
+ [queue, *queues_set.grep(/\A#{queue}:/)]
+ end
+ end
+
+ def self.redis_queues
+ # Not memoized, because this can change during the life of the application
+ Sidekiq::Queue.all.map(&:name)
+ end
+
+ def self.config_queues
+ @config_queues ||= begin
+ config = YAML.load_file(Rails.root.join('config/sidekiq_queues.yml'))
+ config[:queues].map(&:first)
+ end
+ end
+
+ def self.cron_workers
+ @cron_workers ||= Settings.cron_jobs.map { |job_name, options| options['job_class'].constantize }
+ end
+
+ def self.workers
+ @workers ||= find_workers(Rails.root.join('app', 'workers'))
+ end
+
+ def self.find_workers(root)
+ concerns = root.join('concerns').to_s
+
+ workers = Dir[root.join('**', '*.rb')]
+ .reject { |path| path.start_with?(concerns) }
+
+ workers.map! do |path|
+ ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
+
+ ns.camelize.constantize
+ end
+
+ # Skip things that aren't workers
+ workers.select { |w| w < Sidekiq::Worker }
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_versioning.rb b/lib/gitlab/sidekiq_versioning.rb
new file mode 100644
index 00000000000..9683214ec18
--- /dev/null
+++ b/lib/gitlab/sidekiq_versioning.rb
@@ -0,0 +1,25 @@
+module Gitlab
+ module SidekiqVersioning
+ def self.install!
+ Sidekiq::Manager.prepend SidekiqVersioning::Manager
+
+ # The Sidekiq client API always adds the queue to the Sidekiq queue
+ # list, but mail_room and gitlab-shell do not. This is only necessary
+ # for monitoring.
+ begin
+ queues = SidekiqConfig.worker_queues
+
+ if queues.any?
+ Sidekiq.redis do |conn|
+ conn.pipelined do
+ queues.each do |queue|
+ conn.sadd('queues', queue)
+ end
+ end
+ end
+ end
+ rescue ::Redis::BaseError, SocketError, Errno::ENOENT, Errno::EADDRNOTAVAIL, Errno::EAFNOSUPPORT, Errno::ECONNRESET, Errno::ECONNREFUSED
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_versioning/manager.rb b/lib/gitlab/sidekiq_versioning/manager.rb
new file mode 100644
index 00000000000..308be0fdf76
--- /dev/null
+++ b/lib/gitlab/sidekiq_versioning/manager.rb
@@ -0,0 +1,12 @@
+module Gitlab
+ module SidekiqVersioning
+ module Manager
+ def initialize(options = {})
+ options[:strict] = false
+ options[:queues] = SidekiqConfig.expand_queues(options[:queues])
+ Sidekiq.logger.info "Listening on queues #{options[:queues].uniq.sort}"
+ super
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/storage_check.rb b/lib/gitlab/storage_check.rb
new file mode 100644
index 00000000000..fe81513c9ec
--- /dev/null
+++ b/lib/gitlab/storage_check.rb
@@ -0,0 +1,11 @@
+require_relative 'storage_check/cli'
+require_relative 'storage_check/gitlab_caller'
+require_relative 'storage_check/option_parser'
+require_relative 'storage_check/response'
+
+module Gitlab
+ module StorageCheck
+ ENDPOINT = '/-/storage_check'.freeze
+ Options = Struct.new(:target, :token, :interval, :dryrun)
+ end
+end
diff --git a/lib/gitlab/storage_check/cli.rb b/lib/gitlab/storage_check/cli.rb
new file mode 100644
index 00000000000..04bf1bf1d26
--- /dev/null
+++ b/lib/gitlab/storage_check/cli.rb
@@ -0,0 +1,69 @@
+module Gitlab
+ module StorageCheck
+ class CLI
+ def self.start!(args)
+ runner = new(Gitlab::StorageCheck::OptionParser.parse!(args))
+ runner.start_loop
+ end
+
+ attr_reader :logger, :options
+
+ def initialize(options)
+ @options = options
+ @logger = Logger.new(STDOUT)
+ end
+
+ def start_loop
+ logger.info "Checking #{options.target} every #{options.interval} seconds"
+
+ if options.dryrun
+ logger.info "Dryrun, exiting..."
+ return
+ end
+
+ begin
+ loop do
+ response = GitlabCaller.new(options).call!
+ log_response(response)
+ update_settings(response)
+
+ sleep options.interval
+ end
+ rescue Interrupt
+ logger.info "Ending storage-check"
+ end
+ end
+
+ def update_settings(response)
+ previous_interval = options.interval
+
+ if response.valid?
+ options.interval = response.check_interval || previous_interval
+ end
+
+ if previous_interval != options.interval
+ logger.info "Interval changed: #{options.interval} seconds"
+ end
+ end
+
+ def log_response(response)
+ unless response.valid?
+ return logger.error("Invalid response checking nfs storage: #{response.http_response.inspect}")
+ end
+
+ if response.responsive_shards.any?
+ logger.debug("Responsive shards: #{response.responsive_shards.join(', ')}")
+ end
+
+ warnings = []
+ if response.skipped_shards.any?
+ warnings << "Skipped shards: #{response.skipped_shards.join(', ')}"
+ end
+ if response.failing_shards.any?
+ warnings << "Failing shards: #{response.failing_shards.join(', ')}"
+ end
+ logger.warn(warnings.join(' - ')) if warnings.any?
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/storage_check/gitlab_caller.rb b/lib/gitlab/storage_check/gitlab_caller.rb
new file mode 100644
index 00000000000..44952b68844
--- /dev/null
+++ b/lib/gitlab/storage_check/gitlab_caller.rb
@@ -0,0 +1,39 @@
+require 'excon'
+
+module Gitlab
+ module StorageCheck
+ class GitlabCaller
+ def initialize(options)
+ @options = options
+ end
+
+ def call!
+ Gitlab::StorageCheck::Response.new(get_response)
+ rescue Errno::ECONNREFUSED, Excon::Error
+ # Server not ready, treated as invalid response.
+ Gitlab::StorageCheck::Response.new(nil)
+ end
+
+ def get_response
+ scheme, *other_parts = URI.split(@options.target)
+ socket_path = if scheme == 'unix'
+ other_parts.compact.join
+ end
+
+ connection = Excon.new(@options.target, socket: socket_path)
+ connection.post(path: Gitlab::StorageCheck::ENDPOINT,
+ headers: headers)
+ end
+
+ def headers
+ @headers ||= begin
+ headers = {}
+ headers['Content-Type'] = headers['Accept'] = 'application/json'
+ headers['TOKEN'] = @options.token if @options.token
+
+ headers
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/storage_check/option_parser.rb b/lib/gitlab/storage_check/option_parser.rb
new file mode 100644
index 00000000000..66ed7906f97
--- /dev/null
+++ b/lib/gitlab/storage_check/option_parser.rb
@@ -0,0 +1,39 @@
+module Gitlab
+ module StorageCheck
+ class OptionParser
+ def self.parse!(args)
+ # Start out with some defaults
+ options = Gitlab::StorageCheck::Options.new(nil, nil, 1, false)
+
+ parser = ::OptionParser.new do |opts|
+ opts.banner = "Usage: bin/storage_check [options]"
+
+ opts.on('-t=string', '--target string', 'URL or socket to trigger storage check') do |value|
+ options.target = value
+ end
+
+ opts.on('-T=string', '--token string', 'Health token to use') { |value| options.token = value }
+
+ opts.on('-i=n', '--interval n', ::OptionParser::DecimalInteger, 'Seconds between checks') do |value|
+ options.interval = value
+ end
+
+ opts.on('-d', '--dryrun', "Output what will be performed, but don't start the process") do |value|
+ options.dryrun = value
+ end
+ end
+ parser.parse!(args)
+
+ unless options.target
+ raise ::OptionParser::InvalidArgument.new('Provide a URI to provide checks')
+ end
+
+ if URI.parse(options.target).scheme.nil?
+ raise ::OptionParser::InvalidArgument.new('Add the scheme to the target, `unix://`, `https://` or `http://` are supported')
+ end
+
+ options
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/storage_check/response.rb b/lib/gitlab/storage_check/response.rb
new file mode 100644
index 00000000000..326ab236e3e
--- /dev/null
+++ b/lib/gitlab/storage_check/response.rb
@@ -0,0 +1,77 @@
+require 'json'
+
+module Gitlab
+ module StorageCheck
+ class Response
+ attr_reader :http_response
+
+ def initialize(http_response)
+ @http_response = http_response
+ end
+
+ def valid?
+ @http_response && (200...299).cover?(@http_response.status) &&
+ @http_response.headers['Content-Type'].include?('application/json') &&
+ parsed_response
+ end
+
+ def check_interval
+ return nil unless parsed_response
+
+ parsed_response['check_interval']
+ end
+
+ def responsive_shards
+ divided_results[:responsive_shards]
+ end
+
+ def skipped_shards
+ divided_results[:skipped_shards]
+ end
+
+ def failing_shards
+ divided_results[:failing_shards]
+ end
+
+ private
+
+ def results
+ return [] unless parsed_response
+
+ parsed_response['results']
+ end
+
+ def divided_results
+ return @divided_results if @divided_results
+
+ @divided_results = {}
+ @divided_results[:responsive_shards] = []
+ @divided_results[:skipped_shards] = []
+ @divided_results[:failing_shards] = []
+
+ results.each do |info|
+ name = info['storage']
+
+ case info['success']
+ when true
+ @divided_results[:responsive_shards] << name
+ when false
+ @divided_results[:failing_shards] << name
+ else
+ @divided_results[:skipped_shards] << name
+ end
+ end
+
+ @divided_results
+ end
+
+ def parsed_response
+ return @parsed_response if defined?(@parsed_response)
+
+ @parsed_response = JSON.parse(@http_response.body)
+ rescue JSON::JSONError
+ @parsed_response = nil
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/tcp_checker.rb b/lib/gitlab/tcp_checker.rb
new file mode 100644
index 00000000000..6e24e46d0ea
--- /dev/null
+++ b/lib/gitlab/tcp_checker.rb
@@ -0,0 +1,45 @@
+module Gitlab
+ class TcpChecker
+ attr_reader :remote_host, :remote_port, :local_host, :local_port, :error
+
+ def initialize(remote_host, remote_port, local_host = nil, local_port = nil)
+ @remote_host = remote_host
+ @remote_port = remote_port
+ @local_host = local_host
+ @local_port = local_port
+ end
+
+ def local
+ join_host_port(local_host, local_port)
+ end
+
+ def remote
+ join_host_port(remote_host, remote_port)
+ end
+
+ def check(timeout: 10)
+ Socket.tcp(
+ remote_host, remote_port,
+ local_host, local_port,
+ connect_timeout: timeout
+ ) do |sock|
+ @local_port, @local_host = Socket.unpack_sockaddr_in(sock.local_address)
+ @remote_port, @remote_host = Socket.unpack_sockaddr_in(sock.remote_address)
+ end
+
+ true
+ rescue => err
+ @error = err
+
+ false
+ end
+
+ private
+
+ def join_host_port(host, port)
+ host = "[#{host}]" if host.include?(':')
+
+ "#{host}:#{port}"
+ end
+ end
+end
diff --git a/lib/gitlab/utils.rb b/lib/gitlab/utils.rb
index abb3d3a02c3..b3baaf036d8 100644
--- a/lib/gitlab/utils.rb
+++ b/lib/gitlab/utils.rb
@@ -46,5 +46,22 @@ module Gitlab
def random_string
Random.rand(Float::MAX.to_i).to_s(36)
end
+
+ # See: http://stackoverflow.com/questions/2108727/which-in-ruby-checking-if-program-exists-in-path-from-ruby
+ # Cross-platform way of finding an executable in the $PATH.
+ #
+ # which('ruby') #=> /usr/bin/ruby
+ def which(cmd, env = ENV)
+ exts = env['PATHEXT'] ? env['PATHEXT'].split(';') : ['']
+
+ env['PATH'].split(File::PATH_SEPARATOR).each do |path|
+ exts.each do |ext|
+ exe = File.join(path, "#{cmd}#{ext}")
+ return exe if File.executable?(exe) && !File.directory?(exe)
+ end
+ end
+
+ nil
+ end
end
end
diff --git a/lib/gitlab/utils/strong_memoize.rb b/lib/gitlab/utils/strong_memoize.rb
index a2ac9285b56..fe091f4611b 100644
--- a/lib/gitlab/utils/strong_memoize.rb
+++ b/lib/gitlab/utils/strong_memoize.rb
@@ -11,6 +11,8 @@ module Gitlab
#
# We could write it like:
#
+ # include Gitlab::Utils::StrongMemoize
+ #
# def trigger_from_token
# strong_memoize(:trigger) do
# Ci::Trigger.find_by_token(params[:token].to_s)
@@ -18,14 +20,22 @@ module Gitlab
# end
#
def strong_memoize(name)
- ivar_name = "@#{name}"
-
- if instance_variable_defined?(ivar_name)
- instance_variable_get(ivar_name)
+ if instance_variable_defined?(ivar(name))
+ instance_variable_get(ivar(name))
else
- instance_variable_set(ivar_name, yield)
+ instance_variable_set(ivar(name), yield)
end
end
+
+ def clear_memoization(name)
+ remove_instance_variable(ivar(name)) if instance_variable_defined?(ivar(name))
+ end
+
+ private
+
+ def ivar(name)
+ "@#{name}"
+ end
end
end
end
diff --git a/lib/gitlab/view/presenter/factory.rb b/lib/gitlab/view/presenter/factory.rb
index d172d61e2c9..570f0723e39 100644
--- a/lib/gitlab/view/presenter/factory.rb
+++ b/lib/gitlab/view/presenter/factory.rb
@@ -16,7 +16,7 @@ module Gitlab
attr_reader :subject, :attributes
def presenter_class
- "#{subject.class.name}Presenter".constantize
+ attributes.delete(:presenter_class) { "#{subject.class.name}Presenter".constantize }
end
end
end
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index 864a9e04888..5ab6cd5a4ef 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -58,7 +58,7 @@ module Gitlab
end
def artifact_upload_ok
- { TempPath: ArtifactUploader.artifacts_upload_path }
+ { TempPath: JobArtifactUploader.artifacts_upload_path }
end
def send_git_blob(repository, blob)
diff --git a/lib/google_api/cloud_platform/client.rb b/lib/google_api/cloud_platform/client.rb
index 9242cbe840c..b0563fb2d69 100644
--- a/lib/google_api/cloud_platform/client.rb
+++ b/lib/google_api/cloud_platform/client.rb
@@ -44,7 +44,7 @@ module GoogleApi
service = Google::Apis::ContainerV1::ContainerService.new
service.authorization = access_token
- service.get_zone_cluster(project_id, zone, cluster_id)
+ service.get_zone_cluster(project_id, zone, cluster_id, options: user_agent_header)
end
def projects_zones_clusters_create(project_id, zone, cluster_name, cluster_size, machine_type:)
@@ -62,14 +62,14 @@ module GoogleApi
}
} )
- service.create_cluster(project_id, zone, request_body)
+ service.create_cluster(project_id, zone, request_body, options: user_agent_header)
end
def projects_zones_operations(project_id, zone, operation_id)
service = Google::Apis::ContainerV1::ContainerService.new
service.authorization = access_token
- service.get_zone_operation(project_id, zone, operation_id)
+ service.get_zone_operation(project_id, zone, operation_id, options: user_agent_header)
end
def parse_operation_id(self_link)
@@ -82,6 +82,12 @@ module GoogleApi
def token_life_time(expires_at)
DateTime.strptime(expires_at, '%s').to_time.utc - Time.now.utc
end
+
+ def user_agent_header
+ Google::Apis::RequestOptions.new.tap do |options|
+ options.header = { 'User-Agent': "GitLab/#{Gitlab::VERSION.match('(\d+\.\d+)').captures.first} (GPN:GitLab;)" }
+ end
+ end
end
end
end
diff --git a/lib/tasks/gitlab/tcp_check.rake b/lib/tasks/gitlab/tcp_check.rake
new file mode 100644
index 00000000000..1400f57d6b9
--- /dev/null
+++ b/lib/tasks/gitlab/tcp_check.rake
@@ -0,0 +1,20 @@
+namespace :gitlab do
+ desc "GitLab | Check TCP connectivity to a specific host and port"
+ task :tcp_check, [:host, :port] => :environment do |_t, args|
+ unless args.host && args.port
+ puts "Please specify a host and port: `rake gitlab:tcp_check[example.com,80]`".color(:red)
+ exit 1
+ end
+
+ checker = Gitlab::TcpChecker.new(args.host, args.port)
+
+ if checker.check
+ puts "TCP connection from #{checker.local} to #{checker.remote} succeeded".color(:green)
+ else
+ puts "TCP connection to #{checker.remote} failed: #{checker.error}".color(:red)
+ puts
+ puts 'Check that host and port are correct, and that the traffic is permitted through any firewalls.'
+ exit 1
+ end
+ end
+end