Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab-ci.yml20
-rw-r--r--GITLAB_WORKHORSE_VERSION2
-rw-r--r--Gemfile2
-rw-r--r--Gemfile.lock12
-rw-r--r--app/assets/javascripts/cycle_analytics/components/total_time_component.vue2
-rw-r--r--app/assets/javascripts/locale/index.js1
-rw-r--r--app/assets/javascripts/monitoring/components/graph.vue18
-rw-r--r--app/assets/javascripts/monitoring/components/graph/path.vue (renamed from app/assets/javascripts/monitoring/components/graph_path.vue)0
-rw-r--r--app/assets/javascripts/pipelines/components/pipeline_url.vue7
-rw-r--r--app/assets/stylesheets/pages/environments.scss10
-rw-r--r--app/assets/stylesheets/pages/profile.scss9
-rw-r--r--app/controllers/profiles/gpg_keys_controller.rb2
-rw-r--r--app/models/ci/pipeline.rb20
-rw-r--r--app/models/concerns/has_status.rb1
-rw-r--r--app/models/gpg_key.rb20
-rw-r--r--app/models/gpg_key_subkey.rb22
-rw-r--r--app/models/gpg_signature.rb31
-rw-r--r--app/presenters/ci/pipeline_presenter.rb11
-rw-r--r--app/serializers/pipeline_entity.rb8
-rw-r--r--app/views/layouts/_head.html.haml2
-rw-r--r--app/views/profiles/gpg_keys/_key.html.haml7
-rw-r--r--app/views/projects/snippets/show.html.haml2
-rw-r--r--changelogs/unreleased/36829-add-ability-to-verify-gpg-subkeys.yml5
-rw-r--r--changelogs/unreleased/37105-monitoring-graph-axes-labels-are-inaccurate-and-inconsistent.yml5
-rw-r--r--changelogs/unreleased/38696-fix-project-snippets-breadcrumb-link.yml5
-rw-r--r--changelogs/unreleased/fix-update-doorkeeper-openid-connect.yml5
-rw-r--r--changelogs/unreleased/fl-fix-ca-time-component.yml5
-rw-r--r--changelogs/unreleased/valid-branch-name-dash-bug.yml5
-rw-r--r--config/initializers/doorkeeper_openid_connect.rb2
-rw-r--r--config/initializers/secret_token.rb2
-rw-r--r--config/initializers/sentry.rb4
-rw-r--r--db/migrate/20170927161718_create_gpg_key_subkeys.rb23
-rw-r--r--db/migrate/20170929080234_add_failure_reason_to_pipelines.rb9
-rw-r--r--db/post_migrate/20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys.rb28
-rw-r--r--db/schema.rb17
-rw-r--r--doc/install/installation.md6
-rw-r--r--doc/update/10.0-to-10.1.md356
-rw-r--r--doc/update/mysql_to_postgresql.md143
-rw-r--r--lib/api/helpers.rb2
-rw-r--r--lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb53
-rw-r--r--lib/gitlab/ci/pipeline/chain/validate/config.rb2
-rw-r--r--lib/gitlab/ci/stage/seed.rb2
-rw-r--r--lib/gitlab/ee_compat_check.rb6
-rw-r--r--lib/gitlab/git_ref_validator.rb2
-rw-r--r--lib/gitlab/gpg.rb15
-rw-r--r--lib/gitlab/gpg/commit.rb10
-rw-r--r--lib/gitlab/gpg/invalid_gpg_signature_updater.rb2
-rw-r--r--lib/rspec_flaky/config.rb21
-rw-r--r--lib/rspec_flaky/flaky_example.rb21
-rw-r--r--lib/rspec_flaky/flaky_examples_collection.rb37
-rw-r--r--lib/rspec_flaky/listener.rb63
-rw-r--r--lib/tasks/gitlab/dev.rake5
-rw-r--r--spec/factories/ci/pipelines.rb1
-rw-r--r--spec/factories/gpg_key_subkeys.rb10
-rw-r--r--spec/factories/gpg_keys.rb4
-rw-r--r--spec/factories/gpg_signature.rb2
-rw-r--r--spec/features/profiles/gpg_keys_spec.rb12
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb10
-rw-r--r--spec/initializers/secret_token_spec.rb18
-rw-r--r--spec/javascripts/cycle_analytics/total_time_component_spec.js58
-rw-r--r--spec/javascripts/monitoring/graph_path_spec.js2
-rw-r--r--spec/javascripts/monitoring/graph_spec.js6
-rw-r--r--spec/javascripts/pipelines/pipeline_url_spec.js19
-rw-r--r--spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb32
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/stage/seed_spec.rb6
-rw-r--r--spec/lib/gitlab/git_ref_validator_spec.rb5
-rw-r--r--spec/lib/gitlab/gpg/commit_spec.rb39
-rw-r--r--spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb49
-rw-r--r--spec/lib/gitlab/gpg_spec.rb17
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml1
-rw-r--r--spec/lib/rspec_flaky/config_spec.rb102
-rw-r--r--spec/lib/rspec_flaky/flaky_example_spec.rb129
-rw-r--r--spec/lib/rspec_flaky/flaky_examples_collection_spec.rb79
-rw-r--r--spec/lib/rspec_flaky/listener_spec.rb172
-rw-r--r--spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb43
-rw-r--r--spec/models/ci/pipeline_spec.rb10
-rw-r--r--spec/models/concerns/has_status_spec.rb12
-rw-r--r--spec/models/gpg_key_spec.rb33
-rw-r--r--spec/models/gpg_key_subkey_spec.rb15
-rw-r--r--spec/models/gpg_signature_spec.rb26
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb17
-rw-r--r--spec/requests/api/helpers_spec.rb28
-rw-r--r--spec/serializers/pipeline_entity_spec.rb13
-rw-r--r--spec/services/gpg_keys/create_service_spec.rb10
-rw-r--r--spec/spec_helper.rb5
-rw-r--r--spec/support/gpg_helpers.rb313
87 files changed, 2102 insertions, 270 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 8501911fde4..fb170515c1d 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -27,7 +27,7 @@ variables:
GET_SOURCES_ATTEMPTS: "3"
KNAPSACK_RSPEC_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/rspec_report-master.json
KNAPSACK_SPINACH_SUITE_REPORT_PATH: knapsack/${CI_PROJECT_NAME}/spinach_report-master.json
- FLAKY_RSPEC_SUITE_REPORT_PATH: rspec_flaky/${CI_PROJECT_NAME}/report-master.json
+ FLAKY_RSPEC_SUITE_REPORT_PATH: rspec_flaky/report-suite.json
before_script:
- bundle --version
@@ -87,12 +87,13 @@ stages:
- export CI_NODE_TOTAL=${JOB_NAME[-1]}
- export KNAPSACK_REPORT_PATH=knapsack/${CI_PROJECT_NAME}/${JOB_NAME[0]}_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export KNAPSACK_GENERATE_REPORT=true
- - export ALL_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/${CI_PROJECT_NAME}/all_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- - export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/${CI_PROJECT_NAME}/new_node_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
+ - export SUITE_FLAKY_RSPEC_REPORT_PATH=${FLAKY_RSPEC_SUITE_REPORT_PATH}
+ - export FLAKY_RSPEC_REPORT_PATH=rspec_flaky/all_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
+ - export NEW_FLAKY_RSPEC_REPORT_PATH=rspec_flaky/new_${JOB_NAME[0]}_${CI_NODE_INDEX}_${CI_NODE_TOTAL}_report.json
- export FLAKY_RSPEC_GENERATE_REPORT=true
- export CACHE_CLASSES=true
- cp ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} ${KNAPSACK_REPORT_PATH}
- - cp ${FLAKY_RSPEC_SUITE_REPORT_PATH} ${ALL_FLAKY_RSPEC_REPORT_PATH}
+ - '[[ -f $FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_REPORT_PATH}'
- '[[ -f $NEW_FLAKY_RSPEC_REPORT_PATH ]] || echo "{}" > ${NEW_FLAKY_RSPEC_REPORT_PATH}'
- scripts/gitaly-test-spawn
- knapsack rspec "--color --format documentation"
@@ -233,7 +234,7 @@ retrieve-tests-metadata:
- wget -O $KNAPSACK_SPINACH_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$KNAPSACK_SPINACH_SUITE_REPORT_PATH || rm $KNAPSACK_SPINACH_SUITE_REPORT_PATH
- '[[ -f $KNAPSACK_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_RSPEC_SUITE_REPORT_PATH}'
- '[[ -f $KNAPSACK_SPINACH_SUITE_REPORT_PATH ]] || echo "{}" > ${KNAPSACK_SPINACH_SUITE_REPORT_PATH}'
- - mkdir -p rspec_flaky/${CI_PROJECT_NAME}/
+ - mkdir -p rspec_flaky/
- wget -O $FLAKY_RSPEC_SUITE_REPORT_PATH http://${TESTS_METADATA_S3_BUCKET}.s3.amazonaws.com/$FLAKY_RSPEC_SUITE_REPORT_PATH || rm $FLAKY_RSPEC_SUITE_REPORT_PATH
- '[[ -f $FLAKY_RSPEC_SUITE_REPORT_PATH ]] || echo "{}" > ${FLAKY_RSPEC_SUITE_REPORT_PATH}'
@@ -252,22 +253,21 @@ update-tests-metadata:
- retry gem install fog-aws mime-types
- scripts/merge-reports ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/rspec-pg_node_*.json
- scripts/merge-reports ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/spinach-pg_node_*.json
- - scripts/merge-reports ${FLAKY_RSPEC_SUITE_REPORT_PATH} rspec_flaky/${CI_PROJECT_NAME}/all_node_*.json
+ - scripts/merge-reports ${FLAKY_RSPEC_SUITE_REPORT_PATH} rspec_flaky/all_*_*.json
- '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $KNAPSACK_RSPEC_SUITE_REPORT_PATH $KNAPSACK_SPINACH_SUITE_REPORT_PATH'
- '[[ -z ${TESTS_METADATA_S3_BUCKET} ]] || scripts/sync-reports put $TESTS_METADATA_S3_BUCKET $FLAKY_RSPEC_SUITE_REPORT_PATH'
- rm -f knapsack/${CI_PROJECT_NAME}/*_node_*.json
- - rm -f rspec_flaky/${CI_PROJECT_NAME}/*_node_*.json
+ - rm -f rspec_flaky/all_*.json rspec_flaky/new_*.json
flaky-examples-check:
<<: *dedicated-runner
image: ruby:2.3-alpine
services: []
before_script: []
- cache: {}
variables:
SETUP_DB: "false"
USE_BUNDLE_INSTALL: "false"
- NEW_FLAKY_SPECS_REPORT: rspec_flaky/${CI_PROJECT_NAME}/new_rspec_flaky_examples.json
+ NEW_FLAKY_SPECS_REPORT: rspec_flaky/report-new.json
stage: post-test
allow_failure: yes
only:
@@ -281,7 +281,7 @@ flaky-examples-check:
- rspec_flaky/
script:
- '[[ -f $NEW_FLAKY_SPECS_REPORT ]] || echo "{}" > ${NEW_FLAKY_SPECS_REPORT}'
- - scripts/merge-reports $NEW_FLAKY_SPECS_REPORT rspec_flaky/${CI_PROJECT_NAME}/new_node_*.json
+ - scripts/merge-reports ${NEW_FLAKY_SPECS_REPORT} rspec_flaky/new_*_*.json
- scripts/detect-new-flaky-examples $NEW_FLAKY_SPECS_REPORT
setup-test-env:
diff --git a/GITLAB_WORKHORSE_VERSION b/GITLAB_WORKHORSE_VERSION
index fd2a01863fd..944880fa15e 100644
--- a/GITLAB_WORKHORSE_VERSION
+++ b/GITLAB_WORKHORSE_VERSION
@@ -1 +1 @@
-3.1.0
+3.2.0
diff --git a/Gemfile b/Gemfile
index 44c459c497f..5645d1ea4ff 100644
--- a/Gemfile
+++ b/Gemfile
@@ -23,7 +23,7 @@ gem 'faraday', '~> 0.12'
# Authentication libraries
gem 'devise', '~> 4.2'
gem 'doorkeeper', '~> 4.2.0'
-gem 'doorkeeper-openid_connect', '~> 1.1.0'
+gem 'doorkeeper-openid_connect', '~> 1.2.0'
gem 'omniauth', '~> 1.4.2'
gem 'omniauth-auth0', '~> 1.4.1'
gem 'omniauth-azure-oauth2', '~> 0.0.9'
diff --git a/Gemfile.lock b/Gemfile.lock
index 4622fd141ba..6040596dda5 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -80,7 +80,7 @@ GEM
coderay (>= 1.0.0)
erubis (>= 2.6.6)
rack (>= 0.9.0)
- bindata (2.3.5)
+ bindata (2.4.1)
binding_of_caller (0.7.2)
debug_inspector (>= 0.0.1)
bootstrap-sass (3.3.6)
@@ -166,9 +166,9 @@ GEM
docile (1.1.5)
domain_name (0.5.20161021)
unf (>= 0.0.5, < 1.0.0)
- doorkeeper (4.2.0)
+ doorkeeper (4.2.6)
railties (>= 4.2)
- doorkeeper-openid_connect (1.1.2)
+ doorkeeper-openid_connect (1.2.0)
doorkeeper (~> 4.0)
json-jwt (~> 1.6)
dropzonejs-rails (0.7.2)
@@ -410,7 +410,7 @@ GEM
railties (>= 4.2.0)
thor (>= 0.14, < 2.0)
json (1.8.6)
- json-jwt (1.7.1)
+ json-jwt (1.7.2)
activesupport
bindata
multi_json (>= 1.3)
@@ -681,7 +681,7 @@ GEM
rainbow (2.2.2)
rake
raindrops (0.18.0)
- rake (12.0.0)
+ rake (12.1.0)
rblineprof (0.3.6)
debugger-ruby_core_source (~> 1.3)
rbnacl (4.0.2)
@@ -1002,7 +1002,7 @@ DEPENDENCIES
devise-two-factor (~> 3.0.0)
diffy (~> 3.1.0)
doorkeeper (~> 4.2.0)
- doorkeeper-openid_connect (~> 1.1.0)
+ doorkeeper-openid_connect (~> 1.2.0)
dropzonejs-rails (~> 0.7.1)
email_reply_trimmer (~> 0.1)
email_spec (~> 1.6.0)
diff --git a/app/assets/javascripts/cycle_analytics/components/total_time_component.vue b/app/assets/javascripts/cycle_analytics/components/total_time_component.vue
index 9941b997b3f..62efd4f9c28 100644
--- a/app/assets/javascripts/cycle_analytics/components/total_time_component.vue
+++ b/app/assets/javascripts/cycle_analytics/components/total_time_component.vue
@@ -20,7 +20,7 @@
<template v-if="time.days">{{ time.days }} <span>{{ n__('day', 'days', time.days) }}</span></template>
<template v-if="time.hours">{{ time.hours }} <span>{{ n__('Time|hr', 'Time|hrs', time.hours) }}</span></template>
<template v-if="time.mins && !time.days">{{ time.mins }} <span>{{ n__('Time|min', 'Time|mins', time.mins) }}</span></template>
- <template v-if="time.seconds && hasDa === 1 || time.seconds === 0">{{ time.seconds }} <span>{{ s__('Time|s') }}</span></template>
+ <template v-if="time.seconds && hasData === 1 || time.seconds === 0">{{ time.seconds }} <span>{{ s__('Time|s') }}</span></template>
</template>
<template v-else>
--
diff --git a/app/assets/javascripts/locale/index.js b/app/assets/javascripts/locale/index.js
index ce05b3eabec..1003b9ba0af 100644
--- a/app/assets/javascripts/locale/index.js
+++ b/app/assets/javascripts/locale/index.js
@@ -4,6 +4,7 @@ import sprintf from './sprintf';
const langAttribute = document.querySelector('html').getAttribute('lang');
const lang = (langAttribute || 'en').replace(/-/g, '_');
const locale = new Jed(window.translations || {});
+delete window.translations;
/**
Translates `text`
diff --git a/app/assets/javascripts/monitoring/components/graph.vue b/app/assets/javascripts/monitoring/components/graph.vue
index 33611c408ef..5aa3865f96a 100644
--- a/app/assets/javascripts/monitoring/components/graph.vue
+++ b/app/assets/javascripts/monitoring/components/graph.vue
@@ -3,7 +3,7 @@
import GraphLegend from './graph/legend.vue';
import GraphFlag from './graph/flag.vue';
import GraphDeployment from './graph/deployment.vue';
- import GraphPath from './graph_path.vue';
+ import GraphPath from './graph/path.vue';
import MonitoringMixin from '../mixins/monitoring_mixins';
import eventHub from '../event_hub';
import measurements from '../utils/measurements';
@@ -69,7 +69,7 @@
},
computed: {
- outterViewBox() {
+ outerViewBox() {
return `0 0 ${this.baseGraphWidth} ${this.baseGraphHeight}`;
},
@@ -137,17 +137,19 @@
},
renderAxesPaths() {
- this.timeSeries = createTimeSeries(this.graphData.queries[0],
- this.graphWidth,
- this.graphHeight,
- this.graphHeightOffset);
+ this.timeSeries = createTimeSeries(
+ this.graphData.queries[0],
+ this.graphWidth,
+ this.graphHeight,
+ this.graphHeightOffset,
+ );
if (this.timeSeries.length > 3) {
this.baseGraphHeight = this.baseGraphHeight += (this.timeSeries.length - 3) * 20;
}
const axisXScale = d3.time.scale()
- .range([0, this.graphWidth]);
+ .range([0, this.graphWidth - 70]);
const axisYScale = d3.scale.linear()
.range([this.graphHeight - this.graphHeightOffset, 0]);
@@ -214,7 +216,7 @@
class="prometheus-svg-container"
:style="paddingBottomRootSvg">
<svg
- :viewBox="outterViewBox"
+ :viewBox="outerViewBox"
ref="baseSvg">
<g
class="x-axis"
diff --git a/app/assets/javascripts/monitoring/components/graph_path.vue b/app/assets/javascripts/monitoring/components/graph/path.vue
index 043f1bf66bb..043f1bf66bb 100644
--- a/app/assets/javascripts/monitoring/components/graph_path.vue
+++ b/app/assets/javascripts/monitoring/components/graph/path.vue
diff --git a/app/assets/javascripts/pipelines/components/pipeline_url.vue b/app/assets/javascripts/pipelines/components/pipeline_url.vue
index 76b97af39f1..9da0aac50a1 100644
--- a/app/assets/javascripts/pipelines/components/pipeline_url.vue
+++ b/app/assets/javascripts/pipelines/components/pipeline_url.vue
@@ -72,6 +72,13 @@
:title="pipeline.yaml_errors">
yaml invalid
</span>
+ <span
+ v-if="pipeline.flags.failure_reason"
+ v-tooltip
+ class="js-pipeline-url-failure label label-danger"
+ :title="pipeline.failure_reason">
+ error
+ </span>
<a
v-if="pipeline.flags.auto_devops"
tabindex="0"
diff --git a/app/assets/stylesheets/pages/environments.scss b/app/assets/stylesheets/pages/environments.scss
index 6a9c2578d95..3b5e411e2c5 100644
--- a/app/assets/stylesheets/pages/environments.scss
+++ b/app/assets/stylesheets/pages/environments.scss
@@ -291,8 +291,14 @@
fill: $black;
}
- .tick > text {
- font-size: 12px;
+ .tick {
+ > line {
+ stroke: $gray-darker;
+ }
+
+ > text {
+ font-size: 12px;
+ }
}
.text-metric-title {
diff --git a/app/assets/stylesheets/pages/profile.scss b/app/assets/stylesheets/pages/profile.scss
index 6c521bb06ee..eab39f698c3 100644
--- a/app/assets/stylesheets/pages/profile.scss
+++ b/app/assets/stylesheets/pages/profile.scss
@@ -108,6 +108,15 @@
}
}
+.subkeys-list {
+ @include basic-list;
+
+ li {
+ padding: 3px 0;
+ border: none;
+ }
+}
+
.key-list-item {
.key-list-item-info {
@media (min-width: $screen-sm-min) {
diff --git a/app/controllers/profiles/gpg_keys_controller.rb b/app/controllers/profiles/gpg_keys_controller.rb
index 689c76059f6..38e3eacd229 100644
--- a/app/controllers/profiles/gpg_keys_controller.rb
+++ b/app/controllers/profiles/gpg_keys_controller.rb
@@ -2,7 +2,7 @@ class Profiles::GpgKeysController < Profiles::ApplicationController
before_action :set_gpg_key, only: [:destroy, :revoke]
def index
- @gpg_keys = current_user.gpg_keys
+ @gpg_keys = current_user.gpg_keys.with_subkeys
@gpg_key = GpgKey.new
end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 3d5acc00f8f..cf3ce3c9e54 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -5,6 +5,7 @@ module Ci
include Importable
include AfterCommitQueue
include Presentable
+ include Gitlab::OptimisticLocking
belongs_to :project
belongs_to :user
@@ -58,6 +59,11 @@ module Ci
auto_devops_source: 2
}
+ enum failure_reason: {
+ unknown_failure: 0,
+ config_error: 1
+ }
+
state_machine :status, initial: :created do
event :enqueue do
transition created: :pending
@@ -109,6 +115,12 @@ module Ci
pipeline.auto_canceled_by = nil
end
+ before_transition any => :failed do |pipeline, transition|
+ transition.args.first.try do |reason|
+ pipeline.failure_reason = reason
+ end
+ end
+
after_transition [:created, :pending] => :running do |pipeline|
pipeline.run_after_commit { PipelineMetricsWorker.perform_async(pipeline.id) }
end
@@ -263,7 +275,7 @@ module Ci
end
def cancel_running
- Gitlab::OptimisticLocking.retry_lock(cancelable_statuses) do |cancelable|
+ retry_optimistic_lock(cancelable_statuses) do |cancelable|
cancelable.find_each do |job|
yield(job) if block_given?
job.cancel
@@ -312,6 +324,10 @@ module Ci
@stage_seeds ||= config_processor.stage_seeds(self)
end
+ def seeds_size
+ @seeds_size ||= stage_seeds.sum(&:size)
+ end
+
def has_kubernetes_active?
project.kubernetes_service&.active?
end
@@ -403,7 +419,7 @@ module Ci
end
def update_status
- Gitlab::OptimisticLocking.retry_lock(self) do
+ retry_optimistic_lock(self) do
case latest_builds_status
when 'pending' then enqueue
when 'running' then run
diff --git a/app/models/concerns/has_status.rb b/app/models/concerns/has_status.rb
index 3803e18a96e..7c3ed96bc28 100644
--- a/app/models/concerns/has_status.rb
+++ b/app/models/concerns/has_status.rb
@@ -81,6 +81,7 @@ module HasStatus
scope :canceled, -> { where(status: 'canceled') }
scope :skipped, -> { where(status: 'skipped') }
scope :manual, -> { where(status: 'manual') }
+ scope :alive, -> { where(status: [:created, :pending, :running]) }
scope :created_or_pending, -> { where(status: [:created, :pending]) }
scope :running_or_pending, -> { where(status: [:running, :pending]) }
scope :finished, -> { where(status: [:success, :failed, :canceled]) }
diff --git a/app/models/gpg_key.rb b/app/models/gpg_key.rb
index 54bd5b68777..44eda741679 100644
--- a/app/models/gpg_key.rb
+++ b/app/models/gpg_key.rb
@@ -9,6 +9,9 @@ class GpgKey < ActiveRecord::Base
belongs_to :user
has_many :gpg_signatures
+ has_many :subkeys, class_name: 'GpgKeySubkey'
+
+ scope :with_subkeys, -> { includes(:subkeys) }
validates :user, presence: true
@@ -36,10 +39,12 @@ class GpgKey < ActiveRecord::Base
before_validation :extract_fingerprint, :extract_primary_keyid
after_commit :update_invalid_gpg_signatures, on: :create
+ after_create :generate_subkeys
def primary_keyid
super&.upcase
end
+ alias_method :keyid, :primary_keyid
def fingerprint
super&.upcase
@@ -49,6 +54,10 @@ class GpgKey < ActiveRecord::Base
super(value&.strip)
end
+ def keyids
+ [keyid].concat(subkeys.map(&:keyid))
+ end
+
def user_infos
@user_infos ||= Gitlab::Gpg.user_infos_from_key(key)
end
@@ -82,10 +91,11 @@ class GpgKey < ActiveRecord::Base
def revoke
GpgSignature
- .where(gpg_key: self)
+ .with_key_and_subkeys(self)
.where.not(verification_status: GpgSignature.verification_statuses[:unknown_key])
.update_all(
gpg_key_id: nil,
+ gpg_key_subkey_id: nil,
verification_status: GpgSignature.verification_statuses[:unknown_key],
updated_at: Time.zone.now
)
@@ -106,4 +116,12 @@ class GpgKey < ActiveRecord::Base
# only allows one key
self.primary_keyid = Gitlab::Gpg.primary_keyids_from_key(key).first
end
+
+ def generate_subkeys
+ gpg_subkeys = Gitlab::Gpg.subkeys_from_key(key)
+
+ gpg_subkeys[primary_keyid]&.each do |subkey_data|
+ subkeys.create!(keyid: subkey_data[:keyid], fingerprint: subkey_data[:fingerprint])
+ end
+ end
end
diff --git a/app/models/gpg_key_subkey.rb b/app/models/gpg_key_subkey.rb
new file mode 100644
index 00000000000..b57922aba30
--- /dev/null
+++ b/app/models/gpg_key_subkey.rb
@@ -0,0 +1,22 @@
+class GpgKeySubkey < ActiveRecord::Base
+ include ShaAttribute
+
+ sha_attribute :keyid
+ sha_attribute :fingerprint
+
+ belongs_to :gpg_key
+
+ validates :gpg_key_id, presence: true
+ validates :fingerprint, :keyid, presence: true, uniqueness: true
+
+ delegate :key, :user, :user_infos, :verified?, :verified_user_infos,
+ :verified_and_belongs_to_email?, to: :gpg_key
+
+ def keyid
+ super&.upcase
+ end
+
+ def fingerprint
+ super&.upcase
+ end
+end
diff --git a/app/models/gpg_signature.rb b/app/models/gpg_signature.rb
index 1f047a32c84..675e7a2456d 100644
--- a/app/models/gpg_signature.rb
+++ b/app/models/gpg_signature.rb
@@ -15,11 +15,42 @@ class GpgSignature < ActiveRecord::Base
belongs_to :project
belongs_to :gpg_key
+ belongs_to :gpg_key_subkey
validates :commit_sha, presence: true
validates :project_id, presence: true
validates :gpg_key_primary_keyid, presence: true
+ def self.with_key_and_subkeys(gpg_key)
+ subkey_ids = gpg_key.subkeys.pluck(:id)
+
+ where(
+ arel_table[:gpg_key_id].eq(gpg_key.id).or(
+ arel_table[:gpg_key_subkey_id].in(subkey_ids)
+ )
+ )
+ end
+
+ def gpg_key=(model)
+ case model
+ when GpgKey
+ super
+ when GpgKeySubkey
+ self.gpg_key_subkey = model
+ when NilClass
+ super
+ self.gpg_key_subkey = nil
+ end
+ end
+
+ def gpg_key
+ if gpg_key_id
+ super
+ elsif gpg_key_subkey_id
+ gpg_key_subkey
+ end
+ end
+
def gpg_key_primary_keyid
super&.upcase
end
diff --git a/app/presenters/ci/pipeline_presenter.rb b/app/presenters/ci/pipeline_presenter.rb
index a542bdd8295..099b4720fb6 100644
--- a/app/presenters/ci/pipeline_presenter.rb
+++ b/app/presenters/ci/pipeline_presenter.rb
@@ -1,7 +1,18 @@
module Ci
class PipelinePresenter < Gitlab::View::Presenter::Delegated
+ FAILURE_REASONS = {
+ config_error: 'CI/CD YAML configuration error!'
+ }.freeze
+
presents :pipeline
+ def failure_reason
+ return unless pipeline.failure_reason?
+
+ FAILURE_REASONS[pipeline.failure_reason.to_sym] ||
+ pipeline.failure_reason
+ end
+
def status_title
if auto_canceled?
"Pipeline is redundant and is auto-canceled by Pipeline ##{auto_canceled_by_id}"
diff --git a/app/serializers/pipeline_entity.rb b/app/serializers/pipeline_entity.rb
index 357fc71f877..6457294b285 100644
--- a/app/serializers/pipeline_entity.rb
+++ b/app/serializers/pipeline_entity.rb
@@ -20,6 +20,7 @@ class PipelineEntity < Grape::Entity
expose :has_yaml_errors?, as: :yaml_errors
expose :can_retry?, as: :retryable
expose :can_cancel?, as: :cancelable
+ expose :failure_reason?, as: :failure_reason
end
expose :details do
@@ -44,6 +45,11 @@ class PipelineEntity < Grape::Entity
end
expose :commit, using: CommitEntity
+ expose :yaml_errors, if: -> (pipeline, _) { pipeline.has_yaml_errors? }
+
+ expose :failure_reason, if: -> (pipeline, _) { pipeline.failure_reason? } do |pipeline|
+ pipeline.present.failure_reason
+ end
expose :retry_path, if: -> (*) { can_retry? } do |pipeline|
retry_project_pipeline_path(pipeline.project, pipeline)
@@ -53,8 +59,6 @@ class PipelineEntity < Grape::Entity
cancel_project_pipeline_path(pipeline.project, pipeline)
end
- expose :yaml_errors, if: -> (pipeline, _) { pipeline.has_yaml_errors? }
-
private
alias_method :pipeline, :object
diff --git a/app/views/layouts/_head.html.haml b/app/views/layouts/_head.html.haml
index 0d5350f873b..f1b32274664 100644
--- a/app/views/layouts/_head.html.haml
+++ b/app/views/layouts/_head.html.haml
@@ -37,7 +37,7 @@
- if content_for?(:library_javascripts)
= yield :library_javascripts
- = javascript_include_tag asset_path("locale/#{I18n.locale.to_s || I18n.default_locale.to_s}/app.js")
+ = javascript_include_tag asset_path("locale/#{I18n.locale.to_s || I18n.default_locale.to_s}/app.js") unless I18n.locale == :en
= webpack_bundle_tag "webpack_runtime"
= webpack_bundle_tag "common"
= webpack_bundle_tag "main"
diff --git a/app/views/profiles/gpg_keys/_key.html.haml b/app/views/profiles/gpg_keys/_key.html.haml
index 970e92aadaa..5ed517c1ef6 100644
--- a/app/views/profiles/gpg_keys/_key.html.haml
+++ b/app/views/profiles/gpg_keys/_key.html.haml
@@ -7,6 +7,13 @@
.description
%code= key.fingerprint
+ - if key.subkeys.present?
+ .subkeys
+ %span.bold Subkeys:
+ %ul.subkeys-list
+ - key.subkeys.each do |subkey|
+ %li
+ %code= subkey.fingerprint
.pull-right
%span.key-created-at
created #{time_ago_with_tooltip(key.created_at)}
diff --git a/app/views/projects/snippets/show.html.haml b/app/views/projects/snippets/show.html.haml
index fda068f08c2..7062c5b765e 100644
--- a/app/views/projects/snippets/show.html.haml
+++ b/app/views/projects/snippets/show.html.haml
@@ -1,5 +1,5 @@
- @content_class = "limit-container-width limited-inner-width-container" unless fluid_layout
-- add_to_breadcrumbs "Snippets", dashboard_snippets_path
+- add_to_breadcrumbs "Snippets", project_snippets_path(@project)
- breadcrumb_title @snippet.to_reference
- page_title "#{@snippet.title} (#{@snippet.to_reference})", "Snippets"
diff --git a/changelogs/unreleased/36829-add-ability-to-verify-gpg-subkeys.yml b/changelogs/unreleased/36829-add-ability-to-verify-gpg-subkeys.yml
new file mode 100644
index 00000000000..ee6a7287e86
--- /dev/null
+++ b/changelogs/unreleased/36829-add-ability-to-verify-gpg-subkeys.yml
@@ -0,0 +1,5 @@
+---
+title: Add support for GPG subkeys in signature verification
+merge_request: 14517
+author:
+type: added
diff --git a/changelogs/unreleased/37105-monitoring-graph-axes-labels-are-inaccurate-and-inconsistent.yml b/changelogs/unreleased/37105-monitoring-graph-axes-labels-are-inaccurate-and-inconsistent.yml
new file mode 100644
index 00000000000..3364b1d46b3
--- /dev/null
+++ b/changelogs/unreleased/37105-monitoring-graph-axes-labels-are-inaccurate-and-inconsistent.yml
@@ -0,0 +1,5 @@
+---
+title: Fix incorrect X-axis labels in Prometheus graphs
+merge_request: 14258
+author:
+type: fixed
diff --git a/changelogs/unreleased/38696-fix-project-snippets-breadcrumb-link.yml b/changelogs/unreleased/38696-fix-project-snippets-breadcrumb-link.yml
new file mode 100644
index 00000000000..18b1645d7a9
--- /dev/null
+++ b/changelogs/unreleased/38696-fix-project-snippets-breadcrumb-link.yml
@@ -0,0 +1,5 @@
+---
+title: Fix project snippets breadcrumb link
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/fix-update-doorkeeper-openid-connect.yml b/changelogs/unreleased/fix-update-doorkeeper-openid-connect.yml
new file mode 100644
index 00000000000..c57fceec92f
--- /dev/null
+++ b/changelogs/unreleased/fix-update-doorkeeper-openid-connect.yml
@@ -0,0 +1,5 @@
+---
+title: Upgrade doorkeeper-openid_connect
+merge_request: 14372
+author: Markus Koller
+type: other
diff --git a/changelogs/unreleased/fl-fix-ca-time-component.yml b/changelogs/unreleased/fl-fix-ca-time-component.yml
new file mode 100644
index 00000000000..ecd377409ca
--- /dev/null
+++ b/changelogs/unreleased/fl-fix-ca-time-component.yml
@@ -0,0 +1,5 @@
+---
+title: Fix typo in cycle analytics breaking time component
+merge_request:
+author:
+type: fixed
diff --git a/changelogs/unreleased/valid-branch-name-dash-bug.yml b/changelogs/unreleased/valid-branch-name-dash-bug.yml
new file mode 100644
index 00000000000..89e4578b3e5
--- /dev/null
+++ b/changelogs/unreleased/valid-branch-name-dash-bug.yml
@@ -0,0 +1,5 @@
+---
+title: Prevent branches or tags from starting with invalid characters (e.g. -, .)
+merge_request:
+author:
+type: fixed
diff --git a/config/initializers/doorkeeper_openid_connect.rb b/config/initializers/doorkeeper_openid_connect.rb
index c58f425b19b..af174def047 100644
--- a/config/initializers/doorkeeper_openid_connect.rb
+++ b/config/initializers/doorkeeper_openid_connect.rb
@@ -1,7 +1,7 @@
Doorkeeper::OpenidConnect.configure do
issuer Gitlab.config.gitlab.url
- jws_private_key Rails.application.secrets.jws_private_key
+ signing_key Rails.application.secrets.openid_connect_signing_key
resource_owner_from_access_token do |access_token|
User.active.find_by(id: access_token.resource_owner_id)
diff --git a/config/initializers/secret_token.rb b/config/initializers/secret_token.rb
index f9c1d2165d3..750a5b34f3b 100644
--- a/config/initializers/secret_token.rb
+++ b/config/initializers/secret_token.rb
@@ -25,7 +25,7 @@ def create_tokens
secret_key_base: file_secret_key || generate_new_secure_token,
otp_key_base: env_secret_key || file_secret_key || generate_new_secure_token,
db_key_base: generate_new_secure_token,
- jws_private_key: generate_new_rsa_private_key
+ openid_connect_signing_key: generate_new_rsa_private_key
}
missing_secrets = set_missing_keys(defaults)
diff --git a/config/initializers/sentry.rb b/config/initializers/sentry.rb
index 62d0967009a..b2da3b3dc19 100644
--- a/config/initializers/sentry.rb
+++ b/config/initializers/sentry.rb
@@ -2,7 +2,7 @@
require 'gitlab/current_settings'
-if Rails.env.production?
+def configure_sentry
# allow it to fail: it may do so when create_from_defaults is executed before migrations are actually done
begin
sentry_enabled = Gitlab::CurrentSettings.current_application_settings.sentry_enabled
@@ -23,3 +23,5 @@ if Rails.env.production?
end
end
end
+
+configure_sentry if Rails.env.production?
diff --git a/db/migrate/20170927161718_create_gpg_key_subkeys.rb b/db/migrate/20170927161718_create_gpg_key_subkeys.rb
new file mode 100644
index 00000000000..c03c40416a8
--- /dev/null
+++ b/db/migrate/20170927161718_create_gpg_key_subkeys.rb
@@ -0,0 +1,23 @@
+class CreateGpgKeySubkeys < ActiveRecord::Migration
+ DOWNTIME = false
+
+ def up
+ create_table :gpg_key_subkeys do |t|
+ t.references :gpg_key, null: false, index: true, foreign_key: { on_delete: :cascade }
+
+ t.binary :keyid
+ t.binary :fingerprint
+
+ t.index :keyid, unique: true, length: Gitlab::Database.mysql? ? 20 : nil
+ t.index :fingerprint, unique: true, length: Gitlab::Database.mysql? ? 20 : nil
+ end
+
+ add_reference :gpg_signatures, :gpg_key_subkey, index: true, foreign_key: { on_delete: :nullify }
+ end
+
+ def down
+ remove_reference(:gpg_signatures, :gpg_key_subkey, index: true, foreign_key: true)
+
+ drop_table :gpg_key_subkeys
+ end
+end
diff --git a/db/migrate/20170929080234_add_failure_reason_to_pipelines.rb b/db/migrate/20170929080234_add_failure_reason_to_pipelines.rb
new file mode 100644
index 00000000000..82adddbc1ec
--- /dev/null
+++ b/db/migrate/20170929080234_add_failure_reason_to_pipelines.rb
@@ -0,0 +1,9 @@
+class AddFailureReasonToPipelines < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def change
+ add_column :ci_pipelines, :failure_reason, :integer
+ end
+end
diff --git a/db/post_migrate/20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys.rb b/db/post_migrate/20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys.rb
new file mode 100644
index 00000000000..01d56fbd490
--- /dev/null
+++ b/db/post_migrate/20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys.rb
@@ -0,0 +1,28 @@
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration
+ disable_ddl_transaction!
+
+ DOWNTIME = false
+ MIGRATION = 'CreateGpgKeySubkeysFromGpgKeys'
+
+ class GpgKey < ActiveRecord::Base
+ self.table_name = 'gpg_keys'
+
+ include EachBatch
+ end
+
+ def up
+ GpgKey.select(:id).each_batch do |gpg_keys|
+ jobs = gpg_keys.pluck(:id).map do |id|
+ [MIGRATION, [id]]
+ end
+
+ BackgroundMigrationWorker.perform_bulk(jobs)
+ end
+ end
+
+ def down
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 5f8f4bb7d5e..c9420a39088 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema.define(version: 20171004121444) do
+ActiveRecord::Schema.define(version: 20171005130944) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
@@ -342,6 +342,7 @@ ActiveRecord::Schema.define(version: 20171004121444) do
t.integer "source"
t.integer "config_source"
t.boolean "protected"
+ t.integer "failure_reason"
end
add_index "ci_pipelines", ["auto_canceled_by_id"], name: "index_ci_pipelines_on_auto_canceled_by_id", using: :btree
@@ -608,6 +609,16 @@ ActiveRecord::Schema.define(version: 20171004121444) do
add_index "gcp_clusters", ["project_id"], name: "index_gcp_clusters_on_project_id", unique: true, using: :btree
+ create_table "gpg_key_subkeys", force: :cascade do |t|
+ t.integer "gpg_key_id", null: false
+ t.binary "keyid"
+ t.binary "fingerprint"
+ end
+
+ add_index "gpg_key_subkeys", ["fingerprint"], name: "index_gpg_key_subkeys_on_fingerprint", unique: true, using: :btree
+ add_index "gpg_key_subkeys", ["gpg_key_id"], name: "index_gpg_key_subkeys_on_gpg_key_id", using: :btree
+ add_index "gpg_key_subkeys", ["keyid"], name: "index_gpg_key_subkeys_on_keyid", unique: true, using: :btree
+
create_table "gpg_keys", force: :cascade do |t|
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
@@ -631,11 +642,13 @@ ActiveRecord::Schema.define(version: 20171004121444) do
t.text "gpg_key_user_name"
t.text "gpg_key_user_email"
t.integer "verification_status", limit: 2, default: 0, null: false
+ t.integer "gpg_key_subkey_id"
end
add_index "gpg_signatures", ["commit_sha"], name: "index_gpg_signatures_on_commit_sha", unique: true, using: :btree
add_index "gpg_signatures", ["gpg_key_id"], name: "index_gpg_signatures_on_gpg_key_id", using: :btree
add_index "gpg_signatures", ["gpg_key_primary_keyid"], name: "index_gpg_signatures_on_gpg_key_primary_keyid", using: :btree
+ add_index "gpg_signatures", ["gpg_key_subkey_id"], name: "index_gpg_signatures_on_gpg_key_subkey_id", using: :btree
add_index "gpg_signatures", ["project_id"], name: "index_gpg_signatures_on_project_id", using: :btree
create_table "identities", force: :cascade do |t|
@@ -1758,7 +1771,9 @@ ActiveRecord::Schema.define(version: 20171004121444) do
add_foreign_key "gcp_clusters", "projects", on_delete: :cascade
add_foreign_key "gcp_clusters", "services", on_delete: :nullify
add_foreign_key "gcp_clusters", "users", on_delete: :nullify
+ add_foreign_key "gpg_key_subkeys", "gpg_keys", on_delete: :cascade
add_foreign_key "gpg_keys", "users", on_delete: :cascade
+ add_foreign_key "gpg_signatures", "gpg_key_subkeys", on_delete: :nullify
add_foreign_key "gpg_signatures", "gpg_keys", on_delete: :nullify
add_foreign_key "gpg_signatures", "projects", on_delete: :cascade
add_foreign_key "issue_assignees", "issues", name: "fk_b7d881734a", on_delete: :cascade
diff --git a/doc/install/installation.md b/doc/install/installation.md
index 200cd94f43c..af6c797dc00 100644
--- a/doc/install/installation.md
+++ b/doc/install/installation.md
@@ -133,9 +133,9 @@ Remove the old Ruby 1.8 if present:
Download Ruby and compile it:
mkdir /tmp/ruby && cd /tmp/ruby
- curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.3.tar.gz
- echo '1014ee699071aa2ddd501907d18cbe15399c997d ruby-2.3.3.tar.gz' | shasum -c - && tar xzf ruby-2.3.3.tar.gz
- cd ruby-2.3.3
+ curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.5.tar.gz
+ echo '3247e217d6745c27ef23bdc77b6abdb4b57a118f ruby-2.3.5.tar.gz' | shasum -c - && tar xzf ruby-2.3.5.tar.gz
+ cd ruby-2.3.5
./configure --disable-install-rdoc
make
sudo make install
diff --git a/doc/update/10.0-to-10.1.md b/doc/update/10.0-to-10.1.md
new file mode 100644
index 00000000000..4a9384f3ad6
--- /dev/null
+++ b/doc/update/10.0-to-10.1.md
@@ -0,0 +1,356 @@
+# From 10.0 to 10.1
+
+Make sure you view this update guide from the tag (version) of GitLab you would
+like to install. In most cases this should be the highest numbered production
+tag (without rc in it). You can select the tag in the version dropdown at the
+top left corner of GitLab (below the menu bar).
+
+If the highest number stable branch is unclear please check the
+[GitLab Blog](https://about.gitlab.com/blog/archives.html) for installation
+guide links by version.
+
+### 1. Stop server
+
+```bash
+sudo service gitlab stop
+```
+
+### 2. Backup
+
+```bash
+cd /home/git/gitlab
+
+sudo -u git -H bundle exec rake gitlab:backup:create RAILS_ENV=production
+```
+
+### 3. Update Ruby
+
+NOTE: GitLab 9.0 and higher only support Ruby 2.3.x and dropped support for Ruby 2.1.x. Be
+sure to upgrade your interpreter if necessary.
+
+You can check which version you are running with `ruby -v`.
+
+Download and compile Ruby:
+
+```bash
+mkdir /tmp/ruby && cd /tmp/ruby
+curl --remote-name --progress https://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.5.tar.gz
+echo '3247e217d6745c27ef23bdc77b6abdb4b57a118f ruby-2.3.5.tar.gz' | shasum -c - && tar xzf ruby-2.3.5.tar.gz
+cd ruby-2.3.5
+./configure --disable-install-rdoc
+make
+sudo make install
+```
+
+Install Bundler:
+
+```bash
+sudo gem install bundler --no-ri --no-rdoc
+```
+
+### 4. Update Node
+
+GitLab now runs [webpack](http://webpack.js.org) to compile frontend assets and
+it has a minimum requirement of node v4.3.0.
+
+You can check which version you are running with `node -v`. If you are running
+a version older than `v4.3.0` you will need to update to a newer version. You
+can find instructions to install from community maintained packages or compile
+from source at the nodejs.org website.
+
+<https://nodejs.org/en/download/>
+
+
+Since 8.17, GitLab requires the use of yarn `>= v0.17.0` to manage
+JavaScript dependencies.
+
+```bash
+curl --silent --show-error https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
+echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
+sudo apt-get update
+sudo apt-get install yarn
+```
+
+More information can be found on the [yarn website](https://yarnpkg.com/en/docs/install).
+
+### 5. Update Go
+
+NOTE: GitLab 9.2 and higher only supports Go 1.8.3 and dropped support for Go
+1.5.x through 1.7.x. Be sure to upgrade your installation if necessary.
+
+You can check which version you are running with `go version`.
+
+Download and install Go:
+
+```bash
+# Remove former Go installation folder
+sudo rm -rf /usr/local/go
+
+curl --remote-name --progress https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz
+echo '1862f4c3d3907e59b04a757cfda0ea7aa9ef39274af99a784f5be843c80c6772 go1.8.3.linux-amd64.tar.gz' | shasum -a256 -c - && \
+ sudo tar -C /usr/local -xzf go1.8.3.linux-amd64.tar.gz
+sudo ln -sf /usr/local/go/bin/{go,godoc,gofmt} /usr/local/bin/
+rm go1.8.3.linux-amd64.tar.gz
+```
+
+### 6. Get latest code
+
+```bash
+cd /home/git/gitlab
+
+sudo -u git -H git fetch --all
+sudo -u git -H git checkout -- db/schema.rb # local changes will be restored automatically
+sudo -u git -H git checkout -- locale
+```
+
+For GitLab Community Edition:
+
+```bash
+cd /home/git/gitlab
+
+sudo -u git -H git checkout 10-1-stable
+```
+
+OR
+
+For GitLab Enterprise Edition:
+
+```bash
+cd /home/git/gitlab
+
+sudo -u git -H git checkout 10-1-stable-ee
+```
+
+### 7. Update gitlab-shell
+
+```bash
+cd /home/git/gitlab-shell
+
+sudo -u git -H git fetch --all --tags
+sudo -u git -H git checkout v$(</home/git/gitlab/GITLAB_SHELL_VERSION)
+sudo -u git -H bin/compile
+```
+
+### 8. Update gitlab-workhorse
+
+Install and compile gitlab-workhorse. GitLab-Workhorse uses
+[GNU Make](https://www.gnu.org/software/make/).
+If you are not using Linux you may have to run `gmake` instead of
+`make` below.
+
+```bash
+cd /home/git/gitlab-workhorse
+
+sudo -u git -H git fetch --all --tags
+sudo -u git -H git checkout v$(</home/git/gitlab/GITLAB_WORKHORSE_VERSION)
+sudo -u git -H make
+```
+
+### 9. Update Gitaly
+
+#### New Gitaly configuration options required
+
+In order to function Gitaly needs some additional configuration information. Below we assume you installed Gitaly in `/home/git/gitaly` and GitLab Shell in `/home/git/gitlab-shell'.
+
+```shell
+echo '
+[gitaly-ruby]
+dir = "/home/git/gitaly/ruby"
+
+[gitlab-shell]
+dir = "/home/git/gitlab-shell"
+' | sudo -u git tee -a /home/git/gitaly/config.toml
+```
+
+#### Check Gitaly configuration
+
+Due to a bug in the `rake gitlab:gitaly:install` script your Gitaly
+configuration file may contain syntax errors. The block name
+`[[storages]]`, which may occur more than once in your `config.toml`
+file, should be `[[storage]]` instead.
+
+```shell
+sudo -u git -H sed -i.pre-10.1 's/\[\[storages\]\]/[[storage]]/' /home/git/gitaly/config.toml
+```
+
+#### Compile Gitaly
+
+```shell
+cd /home/git/gitaly
+sudo -u git -H git fetch --all --tags
+sudo -u git -H git checkout v$(</home/git/gitlab/GITALY_SERVER_VERSION)
+sudo -u git -H make
+```
+
+### 10. Update MySQL permissions
+
+If you are using MySQL you need to grant the GitLab user the necessary
+permissions on the database:
+
+```bash
+mysql -u root -p -e "GRANT TRIGGER ON \`gitlabhq_production\`.* TO 'git'@'localhost';"
+```
+
+If you use MySQL with replication, or just have MySQL configured with binary logging,
+you will need to also run the following on all of your MySQL servers:
+
+```bash
+mysql -u root -p -e "SET GLOBAL log_bin_trust_function_creators = 1;"
+```
+
+You can make this setting permanent by adding it to your `my.cnf`:
+
+```
+log_bin_trust_function_creators=1
+```
+
+### 11. Update configuration files
+
+#### New configuration options for `gitlab.yml`
+
+There might be configuration options available for [`gitlab.yml`][yaml]. View them with the command below and apply them manually to your current `gitlab.yml`:
+
+```sh
+cd /home/git/gitlab
+
+git diff origin/10-0-stable:config/gitlab.yml.example origin/10-1-stable:config/gitlab.yml.example
+```
+
+#### Nginx configuration
+
+Ensure you're still up-to-date with the latest NGINX configuration changes:
+
+```sh
+cd /home/git/gitlab
+
+# For HTTPS configurations
+git diff origin/10-0-stable:lib/support/nginx/gitlab-ssl origin/10-1-stable:lib/support/nginx/gitlab-ssl
+
+# For HTTP configurations
+git diff origin/10-0-stable:lib/support/nginx/gitlab origin/10-1-stable:lib/support/nginx/gitlab
+```
+
+If you are using Strict-Transport-Security in your installation to continue using it you must enable it in your Nginx
+configuration as GitLab application no longer handles setting it.
+
+If you are using Apache instead of NGINX please see the updated [Apache templates].
+Also note that because Apache does not support upstreams behind Unix sockets you
+will need to let gitlab-workhorse listen on a TCP port. You can do this
+via [/etc/default/gitlab].
+
+[Apache templates]: https://gitlab.com/gitlab-org/gitlab-recipes/tree/master/web-server/apache
+[/etc/default/gitlab]: https://gitlab.com/gitlab-org/gitlab-ce/blob/10-1-stable/lib/support/init.d/gitlab.default.example#L38
+
+#### SMTP configuration
+
+If you're installing from source and use SMTP to deliver mail, you will need to add the following line
+to config/initializers/smtp_settings.rb:
+
+```ruby
+ActionMailer::Base.delivery_method = :smtp
+```
+
+See [smtp_settings.rb.sample] as an example.
+
+[smtp_settings.rb.sample]: https://gitlab.com/gitlab-org/gitlab-ce/blob/10-1-stable/config/initializers/smtp_settings.rb.sample#L13
+
+#### Init script
+
+There might be new configuration options available for [`gitlab.default.example`][gl-example]. View them with the command below and apply them manually to your current `/etc/default/gitlab`:
+
+```sh
+cd /home/git/gitlab
+
+git diff origin/10-0-stable:lib/support/init.d/gitlab.default.example origin/10-1-stable:lib/support/init.d/gitlab.default.example
+```
+
+Ensure you're still up-to-date with the latest init script changes:
+
+```bash
+cd /home/git/gitlab
+
+sudo cp lib/support/init.d/gitlab /etc/init.d/gitlab
+```
+
+For Ubuntu 16.04.1 LTS:
+
+```bash
+sudo systemctl daemon-reload
+```
+
+### 12. Install libs, migrations, etc.
+
+```bash
+cd /home/git/gitlab
+
+# MySQL installations (note: the line below states '--without postgres')
+sudo -u git -H bundle install --without postgres development test --deployment
+
+# PostgreSQL installations (note: the line below states '--without mysql')
+sudo -u git -H bundle install --without mysql development test --deployment
+
+# Optional: clean up old gems
+sudo -u git -H bundle clean
+
+# Run database migrations
+sudo -u git -H bundle exec rake db:migrate RAILS_ENV=production
+
+# Compile GetText PO files
+
+sudo -u git -H bundle exec rake gettext:compile RAILS_ENV=production
+
+# Update node dependencies and recompile assets
+sudo -u git -H bundle exec rake yarn:install gitlab:assets:clean gitlab:assets:compile RAILS_ENV=production NODE_ENV=production
+
+# Clean up cache
+sudo -u git -H bundle exec rake cache:clear RAILS_ENV=production
+```
+
+**MySQL installations**: Run through the `MySQL strings limits` and `Tables and data conversion to utf8mb4` [tasks](../install/database_mysql.md).
+
+### 13. Start application
+
+```bash
+sudo service gitlab start
+sudo service nginx restart
+```
+
+### 14. Check application status
+
+Check if GitLab and its environment are configured correctly:
+
+```bash
+cd /home/git/gitlab
+
+sudo -u git -H bundle exec rake gitlab:env:info RAILS_ENV=production
+```
+
+To make sure you didn't miss anything run a more thorough check:
+
+```bash
+cd /home/git/gitlab
+
+sudo -u git -H bundle exec rake gitlab:check RAILS_ENV=production
+```
+
+If all items are green, then congratulations, the upgrade is complete!
+
+## Things went south? Revert to previous version (9.5)
+
+### 1. Revert the code to the previous version
+
+Follow the [upgrade guide from 9.4 to 9.5](9.4-to-9.5.md), except for the
+database migration (the backup is already migrated to the previous version).
+
+### 2. Restore from the backup
+
+```bash
+cd /home/git/gitlab
+
+sudo -u git -H bundle exec rake gitlab:backup:restore RAILS_ENV=production
+```
+
+If you have more than one backup `*.tar` file(s) please add `BACKUP=timestamp_of_backup` to the command above.
+
+[yaml]: https://gitlab.com/gitlab-org/gitlab-ce/blob/10-1-stable/config/gitlab.yml.example
+[gl-example]: https://gitlab.com/gitlab-org/gitlab-ce/blob/10-1-stable/lib/support/init.d/gitlab.default.example
diff --git a/doc/update/mysql_to_postgresql.md b/doc/update/mysql_to_postgresql.md
index 5dc8e6f65f8..a3eba38c91a 100644
--- a/doc/update/mysql_to_postgresql.md
+++ b/doc/update/mysql_to_postgresql.md
@@ -1,80 +1,113 @@
-*** NOTE: These instructions should be considered deprecated. In GitLab 10.0 we will be releasing new migration instructions using [pgloader](http://pgloader.io/).
+---
+last_updated: 2017-10-05
+---
-# Migrating GitLab from MySQL to Postgres
-*Make sure you view this [guide from the `master` branch](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/update/mysql_to_postgresql.md#migrating-gitlab-from-mysql-to-postgres) for the most up to date instructions.*
+# Migrating from MySQL to PostgreSQL
-If you are replacing MySQL with Postgres while keeping GitLab on the same server all you need to do is to export from MySQL, convert the resulting SQL file, and import it into Postgres. If you are also moving GitLab to another server, or if you are switching to omnibus-gitlab, you may want to use a GitLab backup file. The second part of this documents explains the procedure to do this.
+> **Note:** This guide assumes you have a working Omnibus GitLab instance with
+> MySQL and want to migrate to bundled PostgreSQL database.
-## Export from MySQL and import into Postgres
+## Prerequisites
-Use this if you are keeping GitLab on the same server.
+First, we'll need to enable the bundled PostgreSQL database with up-to-date
+schema. Next, we'll use [pgloader](http://pgloader.io) to migrate the data
+from the old MySQL database to the new PostgreSQL one.
-```
-sudo service gitlab stop
+Here's what you'll need to have installed:
-# Update /home/git/gitlab/config/database.yml
+- pgloader 3.4.1+
+- Omnibus GitLab
+- MySQL
-git clone https://github.com/gitlabhq/mysql-postgresql-converter.git -b gitlab
-cd mysql-postgresql-converter
-mysqldump --compatible=postgresql --default-character-set=utf8 -r gitlabhq_production.mysql -u root gitlabhq_production -p
-python db_converter.py gitlabhq_production.mysql gitlabhq_production.psql
-ed -s gitlabhq_production.psql < move_drop_indexes.ed
+## Enable bundled PostgreSQL database
-# Import the database dump as the application database user
-sudo -u git psql -f gitlabhq_production.psql -d gitlabhq_production
+1. Stop GitLab:
-# Install gems for PostgreSQL (note: the line below states '--without ... mysql')
-sudo -u git -H bundle install --without development test mysql --deployment
+ ``` bash
+ sudo gitlab-ctl stop
+ ```
-sudo service gitlab start
-```
+1. Edit `/etc/gitlab/gitlab.rb` to enable bundled PostgreSQL:
-## Converting a GitLab backup file from MySQL to Postgres
-**Note:** Please make sure to have Python 2.7.x (or higher) installed.
+ ```
+ postgresql['enable'] = true
+ ```
-GitLab backup files (`<timestamp>_gitlab_backup.tar`) contain a SQL dump. Using the lanyrd database converter we can replace a MySQL database dump inside the tar file with a Postgres database dump. This can be useful if you are moving to another server.
+1. Edit `/etc/gitlab/gitlab.rb` to use the bundled PostgreSQL. Please check
+ all the settings beginning with `db_`, such as `gitlab_rails['db_adapter']`
+ and alike. You could just comment all of them out so that we'll just use
+ the defaults.
-```
-# Stop GitLab
-sudo service gitlab stop
+1. [Reconfigure GitLab] for the changes to take effect:
+
+ ``` bash
+ sudo gitlab-ctl reconfigure
+ ```
+
+1. Start Unicorn and PostgreSQL so that we could prepare the schema:
+
+ ``` bash
+ sudo gitlab-ctl start unicorn
+ sudo gitlab-ctl start posgresql
+ ```
+
+1. Run the following commands to prepare the schema:
+
+ ``` bash
+ sudo gitlab-rake db:create db:migrate
+ ```
+
+1. Stop Unicorn in case it's interfering the next step:
-# Create the backup
-cd /home/git/gitlab
-sudo -u git -H bundle exec rake gitlab:backup:create RAILS_ENV=production
+ ``` bash
+ sudo gitlab-ctl stop unicorn
+ ```
-# Note the filename of the backup that was created. We will call it
-# TIMESTAMP_gitlab_backup.tar below.
+After these steps, you'll have a fresh PostgreSQL database with up-to-date schema.
-# Move the backup file we will convert to its own directory
-sudo -u git -H mkdir -p tmp/backups/postgresql
-sudo -u git -H mv tmp/backups/TIMESTAMP_gitlab_backup.tar tmp/backups/postgresql/
+## Migrate data from MySQL to PostgreSQL
-# Create a separate database dump with PostgreSQL compatibility
-cd tmp/backups/postgresql
-sudo -u git -H mysqldump --compatible=postgresql --default-character-set=utf8 -r gitlabhq_production.mysql -u root gitlabhq_production -p
+Now, you can use pgloader to migrate the data from MySQL to PostgreSQL:
-# Clone the database converter
-sudo -u git -H git clone https://github.com/gitlabhq/mysql-postgresql-converter.git -b gitlab
+1. Save the following snippet in a `commands.load` file, and edit with your
+ database `username`, `password` and `host`:
-# Convert gitlabhq_production.mysql
-sudo -u git -H mkdir db
-sudo -u git -H python mysql-postgresql-converter/db_converter.py gitlabhq_production.mysql db/database.sql
-sudo -u git -H ed -s db/database.sql < mysql-postgresql-converter/move_drop_indexes.ed
+ ```
+ LOAD DATABASE
+ FROM mysql://username:password@host/gitlabhq_production
+ INTO postgresql://gitlab-psql@unix://var/opt/gitlab/postgresql:/gitlabhq_production
-# Compress database backup
-# Warning: If you have Gitlab 7.12.0 or older skip this step and import the database.sql directly into the backup with:
-# sudo -u git -H tar rf TIMESTAMP_gitlab_backup.tar db/database.sql
-# The compressed databasedump is not supported at 7.12.0 and older.
-sudo -u git -H gzip db/database.sql
+ WITH include no drop, truncate, disable triggers, create no tables,
+ create no indexes, preserve index names, no foreign keys,
+ data only
-# Replace the MySQL dump in TIMESTAMP_gitlab_backup.tar.
+ ALTER SCHEMA 'gitlabhq_production' RENAME TO 'public'
-# Warning: if you forget to replace TIMESTAMP below, tar will create a new file
-# 'TIMESTAMP_gitlab_backup.tar' without giving an error.
+ ;
+ ```
-sudo -u git -H tar rf TIMESTAMP_gitlab_backup.tar db/database.sql.gz
+1. Start the migration:
-# Done! TIMESTAMP_gitlab_backup.tar can now be restored into a Postgres GitLab
-# installation.
-# See https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/raketasks/backup_restore.md for more information about backups.
+ ``` bash
+ sudo -u gitlab-psql pgloader commands.load
+ ```
+
+1. Once the migration finishes, start GitLab:
+
+ ``` bash
+ sudo gitlab-ctl start
+ ```
+
+Now, you can verify that everything worked by visiting GitLab.
+
+## Troubleshooting
+
+### Experiencing 500 errors after the migration
+
+If you experience 500 errors after the migration, try to clear the cache:
+
+``` bash
+sudo gitlab-rake cache:clear
```
+
+[reconfigure GitLab]: ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb
index 4964a76bef6..a87297a604c 100644
--- a/lib/api/helpers.rb
+++ b/lib/api/helpers.rb
@@ -287,7 +287,7 @@ module API
if sentry_enabled? && report_exception?(exception)
define_params_for_grape_middleware
sentry_context
- Raven.capture_exception(exception)
+ Raven.capture_exception(exception, extra: params)
end
# lifted from https://github.com/rails/rails/blob/master/actionpack/lib/action_dispatch/middleware/debug_exceptions.rb#L60
diff --git a/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb b/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb
new file mode 100644
index 00000000000..e94719db72e
--- /dev/null
+++ b/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys.rb
@@ -0,0 +1,53 @@
+class Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys
+ class GpgKey < ActiveRecord::Base
+ self.table_name = 'gpg_keys'
+
+ include EachBatch
+ include ShaAttribute
+
+ sha_attribute :primary_keyid
+ sha_attribute :fingerprint
+
+ has_many :subkeys, class_name: 'GpgKeySubkey'
+ end
+
+ class GpgKeySubkey < ActiveRecord::Base
+ self.table_name = 'gpg_key_subkeys'
+
+ include ShaAttribute
+
+ sha_attribute :keyid
+ sha_attribute :fingerprint
+ end
+
+ def perform(gpg_key_id)
+ gpg_key = GpgKey.find_by(id: gpg_key_id)
+
+ return if gpg_key.nil?
+ return if gpg_key.subkeys.any?
+
+ create_subkeys(gpg_key)
+ update_signatures(gpg_key)
+ end
+
+ private
+
+ def create_subkeys(gpg_key)
+ gpg_subkeys = Gitlab::Gpg.subkeys_from_key(gpg_key.key)
+
+ gpg_subkeys[gpg_key.primary_keyid.upcase]&.each do |subkey_data|
+ gpg_key.subkeys.build(keyid: subkey_data[:keyid], fingerprint: subkey_data[:fingerprint])
+ end
+
+ # Improve latency by doing all INSERTs in a single call
+ GpgKey.transaction do
+ gpg_key.save!
+ end
+ end
+
+ def update_signatures(gpg_key)
+ return unless gpg_key.subkeys.exists?
+
+ InvalidGpgSignatureUpdateWorker.perform_async(gpg_key.id)
+ end
+end
diff --git a/lib/gitlab/ci/pipeline/chain/validate/config.rb b/lib/gitlab/ci/pipeline/chain/validate/config.rb
index 489bcd79655..075504bcce5 100644
--- a/lib/gitlab/ci/pipeline/chain/validate/config.rb
+++ b/lib/gitlab/ci/pipeline/chain/validate/config.rb
@@ -13,7 +13,7 @@ module Gitlab
end
if @command.save_incompleted && @pipeline.has_yaml_errors?
- @pipeline.drop
+ @pipeline.drop!(:config_error)
end
return error(@pipeline.yaml_errors)
diff --git a/lib/gitlab/ci/stage/seed.rb b/lib/gitlab/ci/stage/seed.rb
index e19aae35a81..bc97aa63b02 100644
--- a/lib/gitlab/ci/stage/seed.rb
+++ b/lib/gitlab/ci/stage/seed.rb
@@ -3,7 +3,9 @@ module Gitlab
module Stage
class Seed
attr_reader :pipeline
+
delegate :project, to: :pipeline
+ delegate :size, to: :@jobs
def initialize(pipeline, stage, jobs)
@pipeline = pipeline
diff --git a/lib/gitlab/ee_compat_check.rb b/lib/gitlab/ee_compat_check.rb
index c5a8ea12245..c4c60d1dfee 100644
--- a/lib/gitlab/ee_compat_check.rb
+++ b/lib/gitlab/ee_compat_check.rb
@@ -2,7 +2,7 @@
module Gitlab
# Checks if a set of migrations requires downtime or not.
class EeCompatCheck
- CE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ce.git'.freeze
+ DEFAULT_CE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ce.git'.freeze
EE_REPO = 'https://gitlab.com/gitlab-org/gitlab-ee.git'.freeze
CHECK_DIR = Rails.root.join('ee_compat_check')
IGNORED_FILES_REGEX = /(VERSION|CHANGELOG\.md:\d+)/.freeze
@@ -20,7 +20,7 @@ module Gitlab
attr_reader :ee_repo_dir, :patches_dir, :ce_repo, :ce_branch, :ee_branch_found
attr_reader :failed_files
- def initialize(branch:, ce_repo: CE_REPO)
+ def initialize(branch:, ce_repo: DEFAULT_CE_REPO)
@ee_repo_dir = CHECK_DIR.join('ee-repo')
@patches_dir = CHECK_DIR.join('patches')
@ce_branch = branch
@@ -132,7 +132,7 @@ module Gitlab
def check_patch(patch_path)
step("Checking out master", %w[git checkout master])
step("Resetting to latest master", %w[git reset --hard origin/master])
- step("Fetching CE/#{ce_branch}", %W[git fetch #{CE_REPO} #{ce_branch}])
+ step("Fetching CE/#{ce_branch}", %W[git fetch #{ce_repo} #{ce_branch}])
step(
"Checking if #{patch_path} applies cleanly to EE/master",
# Don't use --check here because it can result in a 0-exit status even
diff --git a/lib/gitlab/git_ref_validator.rb b/lib/gitlab/git_ref_validator.rb
index a3c6b21a6a1..2e3e4fc3f1f 100644
--- a/lib/gitlab/git_ref_validator.rb
+++ b/lib/gitlab/git_ref_validator.rb
@@ -11,7 +11,7 @@ module Gitlab
return false if ref_name.start_with?('refs/remotes/')
Gitlab::Utils.system_silent(
- %W(#{Gitlab.config.git.bin_path} check-ref-format refs/#{ref_name}))
+ %W(#{Gitlab.config.git.bin_path} check-ref-format --branch #{ref_name}))
end
end
end
diff --git a/lib/gitlab/gpg.rb b/lib/gitlab/gpg.rb
index 0d5039ddf5f..413872d7e08 100644
--- a/lib/gitlab/gpg.rb
+++ b/lib/gitlab/gpg.rb
@@ -34,6 +34,21 @@ module Gitlab
end
end
+ def subkeys_from_key(key)
+ using_tmp_keychain do
+ fingerprints = CurrentKeyChain.fingerprints_from_key(key)
+ raw_keys = GPGME::Key.find(:public, fingerprints)
+
+ raw_keys.each_with_object({}) do |raw_key, grouped_subkeys|
+ primary_subkey_id = raw_key.primary_subkey.keyid
+
+ grouped_subkeys[primary_subkey_id] = raw_key.subkeys[1..-1].map do |s|
+ { keyid: s.keyid, fingerprint: s.fingerprint }
+ end
+ end
+ end
+ end
+
def user_infos_from_key(key)
using_tmp_keychain do
fingerprints = CurrentKeyChain.fingerprints_from_key(key)
diff --git a/lib/gitlab/gpg/commit.rb b/lib/gitlab/gpg/commit.rb
index 86bd9f5b125..0f4ba6f83fc 100644
--- a/lib/gitlab/gpg/commit.rb
+++ b/lib/gitlab/gpg/commit.rb
@@ -43,7 +43,9 @@ module Gitlab
# key belonging to the keyid.
# This way we can add the key to the temporary keychain and extract
# the proper signature.
- gpg_key = GpgKey.find_by(primary_keyid: verified_signature.fingerprint)
+ # NOTE: the invoked method is #fingerprint but it's only returning
+ # 16 characters (the format used by keyid) instead of 40.
+ gpg_key = find_gpg_key(verified_signature.fingerprint)
if gpg_key
Gitlab::Gpg::CurrentKeyChain.add(gpg_key.key)
@@ -74,7 +76,7 @@ module Gitlab
commit_sha: @commit.sha,
project: @commit.project,
gpg_key: gpg_key,
- gpg_key_primary_keyid: gpg_key&.primary_keyid || verified_signature.fingerprint,
+ gpg_key_primary_keyid: gpg_key&.keyid || verified_signature.fingerprint,
gpg_key_user_name: user_infos[:name],
gpg_key_user_email: user_infos[:email],
verification_status: verification_status
@@ -98,6 +100,10 @@ module Gitlab
def user_infos(gpg_key)
gpg_key&.verified_user_infos&.first || gpg_key&.user_infos&.first || {}
end
+
+ def find_gpg_key(keyid)
+ GpgKey.find_by(primary_keyid: keyid) || GpgKeySubkey.find_by(keyid: keyid)
+ end
end
end
end
diff --git a/lib/gitlab/gpg/invalid_gpg_signature_updater.rb b/lib/gitlab/gpg/invalid_gpg_signature_updater.rb
index e085eab26c9..b7fb9dde2bc 100644
--- a/lib/gitlab/gpg/invalid_gpg_signature_updater.rb
+++ b/lib/gitlab/gpg/invalid_gpg_signature_updater.rb
@@ -9,7 +9,7 @@ module Gitlab
GpgSignature
.select(:id, :commit_sha, :project_id)
.where('gpg_key_id IS NULL OR verification_status <> ?', GpgSignature.verification_statuses[:verified])
- .where(gpg_key_primary_keyid: @gpg_key.primary_keyid)
+ .where(gpg_key_primary_keyid: @gpg_key.keyids)
.find_each { |sig| sig.gpg_commit.update_signature!(sig) }
end
end
diff --git a/lib/rspec_flaky/config.rb b/lib/rspec_flaky/config.rb
new file mode 100644
index 00000000000..a17ae55910e
--- /dev/null
+++ b/lib/rspec_flaky/config.rb
@@ -0,0 +1,21 @@
+require 'json'
+
+module RspecFlaky
+ class Config
+ def self.generate_report?
+ ENV['FLAKY_RSPEC_GENERATE_REPORT'] == 'true'
+ end
+
+ def self.suite_flaky_examples_report_path
+ ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/suite-report.json")
+ end
+
+ def self.flaky_examples_report_path
+ ENV['FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/report.json")
+ end
+
+ def self.new_flaky_examples_report_path
+ ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] || Rails.root.join("rspec_flaky/new-report.json")
+ end
+ end
+end
diff --git a/lib/rspec_flaky/flaky_example.rb b/lib/rspec_flaky/flaky_example.rb
index f81fb90e870..6be24014d89 100644
--- a/lib/rspec_flaky/flaky_example.rb
+++ b/lib/rspec_flaky/flaky_example.rb
@@ -9,24 +9,21 @@ module RspecFlaky
line: example.line,
description: example.description,
last_attempts_count: example.attempts,
- flaky_reports: 1)
+ flaky_reports: 0)
else
super
end
end
- def first_flaky_at
- self[:first_flaky_at] || Time.now
- end
-
- def last_flaky_at
- Time.now
- end
+ def update_flakiness!(last_attempts_count: nil)
+ self.first_flaky_at ||= Time.now
+ self.last_flaky_at = Time.now
+ self.flaky_reports += 1
+ self.last_attempts_count = last_attempts_count if last_attempts_count
- def last_flaky_job
- return unless ENV['CI_PROJECT_URL'] && ENV['CI_JOB_ID']
-
- "#{ENV['CI_PROJECT_URL']}/-/jobs/#{ENV['CI_JOB_ID']}"
+ if ENV['CI_PROJECT_URL'] && ENV['CI_JOB_ID']
+ self.last_flaky_job = "#{ENV['CI_PROJECT_URL']}/-/jobs/#{ENV['CI_JOB_ID']}"
+ end
end
def to_h
diff --git a/lib/rspec_flaky/flaky_examples_collection.rb b/lib/rspec_flaky/flaky_examples_collection.rb
new file mode 100644
index 00000000000..973c95b0212
--- /dev/null
+++ b/lib/rspec_flaky/flaky_examples_collection.rb
@@ -0,0 +1,37 @@
+require 'json'
+
+module RspecFlaky
+ class FlakyExamplesCollection < SimpleDelegator
+ def self.from_json(json)
+ new(JSON.parse(json))
+ end
+
+ def initialize(collection = {})
+ unless collection.is_a?(Hash)
+ raise ArgumentError, "`collection` must be a Hash, #{collection.class} given!"
+ end
+
+ collection_of_flaky_examples =
+ collection.map do |uid, example|
+ [
+ uid,
+ example.is_a?(RspecFlaky::FlakyExample) ? example : RspecFlaky::FlakyExample.new(example)
+ ]
+ end
+
+ super(Hash[collection_of_flaky_examples])
+ end
+
+ def to_report
+ Hash[map { |uid, example| [uid, example.to_h] }].deep_symbolize_keys
+ end
+
+ def -(other)
+ unless other.respond_to?(:key)
+ raise ArgumentError, "`other` must respond to `#key?`, #{other.class} does not!"
+ end
+
+ self.class.new(reject { |uid, _| other.key?(uid) })
+ end
+ end
+end
diff --git a/lib/rspec_flaky/listener.rb b/lib/rspec_flaky/listener.rb
index ec2fbd9e36c..4a5bfec9967 100644
--- a/lib/rspec_flaky/listener.rb
+++ b/lib/rspec_flaky/listener.rb
@@ -2,11 +2,15 @@ require 'json'
module RspecFlaky
class Listener
- attr_reader :all_flaky_examples, :new_flaky_examples
-
- def initialize
- @new_flaky_examples = {}
- @all_flaky_examples = init_all_flaky_examples
+ # - suite_flaky_examples: contains all the currently tracked flacky example
+ # for the whole RSpec suite
+ # - flaky_examples: contains the examples detected as flaky during the
+ # current RSpec run
+ attr_reader :suite_flaky_examples, :flaky_examples
+
+ def initialize(suite_flaky_examples_json = nil)
+ @flaky_examples = FlakyExamplesCollection.new
+ @suite_flaky_examples = init_suite_flaky_examples(suite_flaky_examples_json)
end
def example_passed(notification)
@@ -14,29 +18,21 @@ module RspecFlaky
return unless current_example.attempts > 1
- flaky_example_hash = all_flaky_examples[current_example.uid]
-
- all_flaky_examples[current_example.uid] =
- if flaky_example_hash
- FlakyExample.new(flaky_example_hash).tap do |ex|
- ex.last_attempts_count = current_example.attempts
- ex.flaky_reports += 1
- end
- else
- FlakyExample.new(current_example).tap do |ex|
- new_flaky_examples[current_example.uid] = ex
- end
- end
+ flaky_example = suite_flaky_examples.fetch(current_example.uid) { FlakyExample.new(current_example) }
+ flaky_example.update_flakiness!(last_attempts_count: current_example.attempts)
+
+ flaky_examples[current_example.uid] = flaky_example
end
def dump_summary(_)
- write_report_file(all_flaky_examples, all_flaky_examples_report_path)
+ write_report_file(flaky_examples, RspecFlaky::Config.flaky_examples_report_path)
+ new_flaky_examples = flaky_examples - suite_flaky_examples
if new_flaky_examples.any?
Rails.logger.warn "\nNew flaky examples detected:\n"
- Rails.logger.warn JSON.pretty_generate(to_report(new_flaky_examples))
+ Rails.logger.warn JSON.pretty_generate(new_flaky_examples.to_report)
- write_report_file(new_flaky_examples, new_flaky_examples_report_path)
+ write_report_file(new_flaky_examples, RspecFlaky::Config.new_flaky_examples_report_path)
end
end
@@ -46,30 +42,23 @@ module RspecFlaky
private
- def init_all_flaky_examples
- return {} unless File.exist?(all_flaky_examples_report_path)
+ def init_suite_flaky_examples(suite_flaky_examples_json = nil)
+ unless suite_flaky_examples_json
+ return {} unless File.exist?(RspecFlaky::Config.suite_flaky_examples_report_path)
- all_flaky_examples = JSON.parse(File.read(all_flaky_examples_report_path))
+ suite_flaky_examples_json = File.read(RspecFlaky::Config.suite_flaky_examples_report_path)
+ end
- Hash[(all_flaky_examples || {}).map { |k, ex| [k, FlakyExample.new(ex)] }]
+ FlakyExamplesCollection.from_json(suite_flaky_examples_json)
end
- def write_report_file(examples, file_path)
- return unless ENV['FLAKY_RSPEC_GENERATE_REPORT'] == 'true'
+ def write_report_file(examples_collection, file_path)
+ return unless RspecFlaky::Config.generate_report?
report_path_dir = File.dirname(file_path)
FileUtils.mkdir_p(report_path_dir) unless Dir.exist?(report_path_dir)
- File.write(file_path, JSON.pretty_generate(to_report(examples)))
- end
-
- def all_flaky_examples_report_path
- @all_flaky_examples_report_path ||= ENV['ALL_FLAKY_RSPEC_REPORT_PATH'] ||
- Rails.root.join("rspec_flaky/all-report.json")
- end
- def new_flaky_examples_report_path
- @new_flaky_examples_report_path ||= ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] ||
- Rails.root.join("rspec_flaky/new-report.json")
+ File.write(file_path, JSON.pretty_generate(examples_collection.to_report))
end
end
end
diff --git a/lib/tasks/gitlab/dev.rake b/lib/tasks/gitlab/dev.rake
index 3eade7bf553..b4d05f5995a 100644
--- a/lib/tasks/gitlab/dev.rake
+++ b/lib/tasks/gitlab/dev.rake
@@ -4,7 +4,10 @@ namespace :gitlab do
task :ee_compat_check, [:branch] => :environment do |_, args|
opts =
if ENV['CI']
- { branch: ENV['CI_COMMIT_REF_NAME'] }
+ {
+ ce_repo: ENV['CI_REPOSITORY_URL'],
+ branch: ENV['CI_COMMIT_REF_NAME']
+ }
else
unless args[:branch]
puts "Must specify a branch as an argument".color(:red)
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index e5ea6b41ea3..f994c2df821 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -47,6 +47,7 @@ FactoryGirl.define do
trait :invalid do
config(rspec: nil)
+ failure_reason :config_error
end
trait :blocked do
diff --git a/spec/factories/gpg_key_subkeys.rb b/spec/factories/gpg_key_subkeys.rb
new file mode 100644
index 00000000000..66ecb44d84b
--- /dev/null
+++ b/spec/factories/gpg_key_subkeys.rb
@@ -0,0 +1,10 @@
+require_relative '../support/gpg_helpers'
+
+FactoryGirl.define do
+ factory :gpg_key_subkey do
+ gpg_key
+
+ sequence(:keyid) { |n| "keyid-#{n}" }
+ sequence(:fingerprint) { |n| "fingerprint-#{n}" }
+ end
+end
diff --git a/spec/factories/gpg_keys.rb b/spec/factories/gpg_keys.rb
index 1258dce8940..93218e5763e 100644
--- a/spec/factories/gpg_keys.rb
+++ b/spec/factories/gpg_keys.rb
@@ -4,5 +4,9 @@ FactoryGirl.define do
factory :gpg_key do
key GpgHelpers::User1.public_key
user
+
+ factory :gpg_key_with_subkeys do
+ key GpgHelpers::User1.public_key_with_extra_signing_key
+ end
end
end
diff --git a/spec/factories/gpg_signature.rb b/spec/factories/gpg_signature.rb
index c0beecf0bea..e9798ff6a41 100644
--- a/spec/factories/gpg_signature.rb
+++ b/spec/factories/gpg_signature.rb
@@ -5,7 +5,7 @@ FactoryGirl.define do
commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
project
gpg_key
- gpg_key_primary_keyid { gpg_key.primary_keyid }
+ gpg_key_primary_keyid { gpg_key.keyid }
verification_status :verified
end
end
diff --git a/spec/features/profiles/gpg_keys_spec.rb b/spec/features/profiles/gpg_keys_spec.rb
index b0f6848bc4b..59233e92f93 100644
--- a/spec/features/profiles/gpg_keys_spec.rb
+++ b/spec/features/profiles/gpg_keys_spec.rb
@@ -20,6 +20,18 @@ feature 'Profile > GPG Keys' do
expect(page).to have_content('bette.cartwright@example.net Unverified')
expect(page).to have_content(GpgHelpers::User2.fingerprint)
end
+
+ scenario 'with multiple subkeys' do
+ fill_in('Key', with: GpgHelpers::User3.public_key)
+ click_button('Add key')
+
+ expect(page).to have_content('john.doe@example.com Unverified')
+ expect(page).to have_content(GpgHelpers::User3.fingerprint)
+
+ GpgHelpers::User3.subkey_fingerprints.each do |fingerprint|
+ expect(page).to have_content(fingerprint)
+ end
+ end
end
scenario 'User sees their key' do
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index f7b40cb1820..92486d2bc57 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -162,6 +162,16 @@ describe 'Pipelines', :js do
expect(page).to have_selector(
%Q{span[data-original-title="#{pipeline.yaml_errors}"]})
end
+
+ it 'contains badge that indicates failure reason' do
+ expect(page).to have_content 'error'
+ end
+
+ it 'contains badge with tooltip which contains failure reason' do
+ expect(pipeline.failure_reason?).to eq true
+ expect(page).to have_selector(
+ %Q{span[data-original-title="#{pipeline.present.failure_reason}"]})
+ end
end
context 'with manual actions' do
diff --git a/spec/initializers/secret_token_spec.rb b/spec/initializers/secret_token_spec.rb
index 84ad55e9f98..d56e14e0e0b 100644
--- a/spec/initializers/secret_token_spec.rb
+++ b/spec/initializers/secret_token_spec.rb
@@ -36,10 +36,10 @@ describe 'create_tokens' do
expect(keys).to all(match(HEX_KEY))
end
- it 'generates an RSA key for jws_private_key' do
+ it 'generates an RSA key for openid_connect_signing_key' do
create_tokens
- keys = secrets.values_at(:jws_private_key)
+ keys = secrets.values_at(:openid_connect_signing_key)
expect(keys.uniq).to eq(keys)
expect(keys).to all(match(RSA_KEY))
@@ -49,7 +49,7 @@ describe 'create_tokens' do
expect(self).to receive(:warn_missing_secret).with('secret_key_base')
expect(self).to receive(:warn_missing_secret).with('otp_key_base')
expect(self).to receive(:warn_missing_secret).with('db_key_base')
- expect(self).to receive(:warn_missing_secret).with('jws_private_key')
+ expect(self).to receive(:warn_missing_secret).with('openid_connect_signing_key')
create_tokens
end
@@ -61,7 +61,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq(secrets.secret_key_base)
expect(new_secrets['otp_key_base']).to eq(secrets.otp_key_base)
expect(new_secrets['db_key_base']).to eq(secrets.db_key_base)
- expect(new_secrets['jws_private_key']).to eq(secrets.jws_private_key)
+ expect(new_secrets['openid_connect_signing_key']).to eq(secrets.openid_connect_signing_key)
end
create_tokens
@@ -77,7 +77,7 @@ describe 'create_tokens' do
context 'when the other secrets all exist' do
before do
secrets.db_key_base = 'db_key_base'
- secrets.jws_private_key = 'jws_private_key'
+ secrets.openid_connect_signing_key = 'openid_connect_signing_key'
allow(File).to receive(:exist?).with('.secret').and_return(true)
allow(File).to receive(:read).with('.secret').and_return('file_key')
@@ -88,7 +88,7 @@ describe 'create_tokens' do
stub_env('SECRET_KEY_BASE', 'env_key')
secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base'
- secrets.jws_private_key = 'jws_private_key'
+ secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end
it 'does not issue a warning' do
@@ -114,7 +114,7 @@ describe 'create_tokens' do
before do
secrets.secret_key_base = 'secret_key_base'
secrets.otp_key_base = 'otp_key_base'
- secrets.jws_private_key = 'jws_private_key'
+ secrets.openid_connect_signing_key = 'openid_connect_signing_key'
end
it 'does not write any files' do
@@ -129,7 +129,7 @@ describe 'create_tokens' do
expect(secrets.secret_key_base).to eq('secret_key_base')
expect(secrets.otp_key_base).to eq('otp_key_base')
expect(secrets.db_key_base).to eq('db_key_base')
- expect(secrets.jws_private_key).to eq('jws_private_key')
+ expect(secrets.openid_connect_signing_key).to eq('openid_connect_signing_key')
end
it 'deletes the .secret file' do
@@ -153,7 +153,7 @@ describe 'create_tokens' do
expect(new_secrets['secret_key_base']).to eq('file_key')
expect(new_secrets['otp_key_base']).to eq('file_key')
expect(new_secrets['db_key_base']).to eq('db_key_base')
- expect(new_secrets['jws_private_key']).to eq('jws_private_key')
+ expect(new_secrets['openid_connect_signing_key']).to eq('openid_connect_signing_key')
end
create_tokens
diff --git a/spec/javascripts/cycle_analytics/total_time_component_spec.js b/spec/javascripts/cycle_analytics/total_time_component_spec.js
new file mode 100644
index 00000000000..31b65fd1cde
--- /dev/null
+++ b/spec/javascripts/cycle_analytics/total_time_component_spec.js
@@ -0,0 +1,58 @@
+import Vue from 'vue';
+import component from '~/cycle_analytics/components/total_time_component.vue';
+import mountComponent from '../helpers/vue_mount_component_helper';
+
+describe('Total time component', () => {
+ let vm;
+ let Component;
+
+ beforeEach(() => {
+ Component = Vue.extend(component);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('With data', () => {
+ it('should render information for days and hours', () => {
+ vm = mountComponent(Component, {
+ time: {
+ days: 3,
+ hours: 4,
+ },
+ });
+
+ expect(vm.$el.textContent.trim()).toEqual('3 days 4 hrs');
+ });
+
+ it('should render information for hours and minutes', () => {
+ vm = mountComponent(Component, {
+ time: {
+ hours: 4,
+ mins: 35,
+ },
+ });
+
+ expect(vm.$el.textContent.trim()).toEqual('4 hrs 35 mins');
+ });
+
+ it('should render information for seconds', () => {
+ vm = mountComponent(Component, {
+ time: {
+ seconds: 45,
+ },
+ });
+
+ expect(vm.$el.textContent.trim()).toEqual('45 s');
+ });
+ });
+
+ describe('Without data', () => {
+ it('should render no information', () => {
+ vm = mountComponent(Component);
+
+ expect(vm.$el.textContent.trim()).toEqual('--');
+ });
+ });
+});
diff --git a/spec/javascripts/monitoring/graph_path_spec.js b/spec/javascripts/monitoring/graph_path_spec.js
index a4844636d09..81825a3ae87 100644
--- a/spec/javascripts/monitoring/graph_path_spec.js
+++ b/spec/javascripts/monitoring/graph_path_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import GraphPath from '~/monitoring/components/graph_path.vue';
+import GraphPath from '~/monitoring/components/graph/path.vue';
import createTimeSeries from '~/monitoring/utils/multiple_time_series';
import { singleRowMetricsMultipleSeries, convertDatesMultipleSeries } from './mock_data';
diff --git a/spec/javascripts/monitoring/graph_spec.js b/spec/javascripts/monitoring/graph_spec.js
index 7213b3bfa30..fd79abe241a 100644
--- a/spec/javascripts/monitoring/graph_spec.js
+++ b/spec/javascripts/monitoring/graph_spec.js
@@ -44,7 +44,7 @@ describe('Graph', () => {
.not.toEqual(-1);
});
- it('outterViewBox gets a width and height property based on the DOM size of the element', () => {
+ it('outerViewBox gets a width and height property based on the DOM size of the element', () => {
const component = createComponent({
graphData: convertedMetrics[1],
classType: 'col-md-6',
@@ -52,8 +52,8 @@ describe('Graph', () => {
deploymentData,
});
- const viewBoxArray = component.outterViewBox.split(' ');
- expect(typeof component.outterViewBox).toEqual('string');
+ const viewBoxArray = component.outerViewBox.split(' ');
+ expect(typeof component.outerViewBox).toEqual('string');
expect(viewBoxArray[2]).toEqual(component.graphWidth.toString());
expect(viewBoxArray[3]).toEqual(component.graphHeight.toString());
});
diff --git a/spec/javascripts/pipelines/pipeline_url_spec.js b/spec/javascripts/pipelines/pipeline_url_spec.js
index 256fdbe743c..4a4f2259d23 100644
--- a/spec/javascripts/pipelines/pipeline_url_spec.js
+++ b/spec/javascripts/pipelines/pipeline_url_spec.js
@@ -125,4 +125,23 @@ describe('Pipeline Url Component', () => {
component.$el.querySelector('.js-pipeline-url-autodevops').textContent.trim(),
).toEqual('Auto DevOps');
});
+
+ it('should render error badge when pipeline has a failure reason set', () => {
+ const component = new PipelineUrlComponent({
+ propsData: {
+ pipeline: {
+ id: 1,
+ path: 'foo',
+ flags: {
+ failure_reason: true,
+ },
+ failure_reason: 'some reason',
+ },
+ autoDevopsHelpPath: 'foo',
+ },
+ }).$mount();
+
+ expect(component.$el.querySelector('.js-pipeline-url-failure').textContent).toContain('error');
+ expect(component.$el.querySelector('.js-pipeline-url-failure').getAttribute('data-original-title')).toContain('some reason');
+ });
});
diff --git a/spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb b/spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb
new file mode 100644
index 00000000000..26d48cc8201
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/create_gpg_key_subkeys_from_gpg_keys_spec.rb
@@ -0,0 +1,32 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys, :migration, schema: 20171005130944 do
+ context 'when GpgKey exists' do
+ let!(:gpg_key) { create(:gpg_key, key: GpgHelpers::User3.public_key) }
+
+ before do
+ GpgKeySubkey.destroy_all
+ end
+
+ it 'generate the subkeys' do
+ expect do
+ described_class.new.perform(gpg_key.id)
+ end.to change { gpg_key.subkeys.count }.from(0).to(2)
+ end
+
+ it 'schedules the signature update worker' do
+ expect(InvalidGpgSignatureUpdateWorker).to receive(:perform_async).with(gpg_key.id)
+
+ described_class.new.perform(gpg_key.id)
+ end
+ end
+
+ context 'when GpgKey does not exist' do
+ it 'does not do anything' do
+ expect(Gitlab::Gpg).not_to receive(:subkeys_from_key)
+ expect(InvalidGpgSignatureUpdateWorker).not_to receive(:perform_async)
+
+ described_class.new.perform(123)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
index 3740df88f42..8357af38f92 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/config_spec.rb
@@ -55,6 +55,10 @@ describe Gitlab::Ci::Pipeline::Chain::Validate::Config do
it 'fails the pipeline' do
expect(pipeline.reload).to be_failed
end
+
+ it 'sets a config error failure reason' do
+ expect(pipeline.reload.config_error?).to eq true
+ end
end
context 'when saving incomplete pipeline is not allowed' do
diff --git a/spec/lib/gitlab/ci/stage/seed_spec.rb b/spec/lib/gitlab/ci/stage/seed_spec.rb
index 9ecd128faca..3fe8d50c49a 100644
--- a/spec/lib/gitlab/ci/stage/seed_spec.rb
+++ b/spec/lib/gitlab/ci/stage/seed_spec.rb
@@ -11,6 +11,12 @@ describe Gitlab::Ci::Stage::Seed do
described_class.new(pipeline, 'test', builds)
end
+ describe '#size' do
+ it 'returns a number of jobs in the stage' do
+ expect(subject.size).to eq 2
+ end
+ end
+
describe '#stage' do
it 'returns hash attributes of a stage' do
expect(subject.stage).to be_a Hash
diff --git a/spec/lib/gitlab/git_ref_validator_spec.rb b/spec/lib/gitlab/git_ref_validator_spec.rb
index e1fa8ae03f8..ba7fb168a3b 100644
--- a/spec/lib/gitlab/git_ref_validator_spec.rb
+++ b/spec/lib/gitlab/git_ref_validator_spec.rb
@@ -4,6 +4,7 @@ describe Gitlab::GitRefValidator do
it { expect(described_class.validate('feature/new')).to be_truthy }
it { expect(described_class.validate('implement_@all')).to be_truthy }
it { expect(described_class.validate('my_new_feature')).to be_truthy }
+ it { expect(described_class.validate('my-branch')).to be_truthy }
it { expect(described_class.validate('#1')).to be_truthy }
it { expect(described_class.validate('feature/refs/heads/foo')).to be_truthy }
it { expect(described_class.validate('feature/~new/')).to be_falsey }
@@ -22,4 +23,8 @@ describe Gitlab::GitRefValidator do
it { expect(described_class.validate('refs/remotes/')).to be_falsey }
it { expect(described_class.validate('refs/heads/feature')).to be_falsey }
it { expect(described_class.validate('refs/remotes/origin')).to be_falsey }
+ it { expect(described_class.validate('-')).to be_falsey }
+ it { expect(described_class.validate('-branch')).to be_falsey }
+ it { expect(described_class.validate('.tag')).to be_falsey }
+ it { expect(described_class.validate('my branch')).to be_falsey }
end
diff --git a/spec/lib/gitlab/gpg/commit_spec.rb b/spec/lib/gitlab/gpg/commit_spec.rb
index b07462e4978..a6c99bc07d4 100644
--- a/spec/lib/gitlab/gpg/commit_spec.rb
+++ b/spec/lib/gitlab/gpg/commit_spec.rb
@@ -63,6 +63,45 @@ describe Gitlab::Gpg::Commit do
it_behaves_like 'returns the cached signature on second call'
end
+ context 'commit signed with a subkey' do
+ let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User3.emails.first }
+
+ let!(:user) { create(:user, email: GpgHelpers::User3.emails.first) }
+
+ let!(:gpg_key) do
+ create :gpg_key, key: GpgHelpers::User3.public_key, user: user
+ end
+
+ let(:gpg_key_subkey) do
+ gpg_key.subkeys.find_by(fingerprint: '0522DD29B98F167CD8421752E38FFCAF75ABD92A')
+ end
+
+ before do
+ allow(Rugged::Commit).to receive(:extract_signature)
+ .with(Rugged::Repository, commit_sha)
+ .and_return(
+ [
+ GpgHelpers::User3.signed_commit_signature,
+ GpgHelpers::User3.signed_commit_base_data
+ ]
+ )
+ end
+
+ it 'returns a valid signature' do
+ expect(described_class.new(commit).signature).to have_attributes(
+ commit_sha: commit_sha,
+ project: project,
+ gpg_key: gpg_key_subkey,
+ gpg_key_primary_keyid: gpg_key_subkey.keyid,
+ gpg_key_user_name: GpgHelpers::User3.names.first,
+ gpg_key_user_email: GpgHelpers::User3.emails.first,
+ verification_status: 'verified'
+ )
+ end
+
+ it_behaves_like 'returns the cached signature on second call'
+ end
+
context 'user email does not match the committer email, but is the same user' do
let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User2.emails.first }
diff --git a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
index b9fd4d02156..d6000af0ecd 100644
--- a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
+++ b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
@@ -2,17 +2,16 @@ require 'rails_helper'
RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
describe '#run' do
- let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
- let!(:project) { create :project, :repository, path: 'sample-project' }
+ let(:signature) { [GpgHelpers::User1.signed_commit_signature, GpgHelpers::User1.signed_commit_base_data] }
+ let(:committer_email) { GpgHelpers::User1.emails.first }
+ let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
+ let!(:project) { create :project, :repository, path: 'sample-project' }
let!(:raw_commit) do
raw_commit = double(
:raw_commit,
- signature: [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ],
+ signature: signature,
sha: commit_sha,
- committer_email: GpgHelpers::User1.emails.first
+ committer_email: committer_email
)
allow(raw_commit).to receive :save!
@@ -29,12 +28,7 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
allow(Rugged::Commit).to receive(:extract_signature)
.with(Rugged::Repository, commit_sha)
- .and_return(
- [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
- )
+ .and_return(signature)
end
context 'gpg signature did have an associated gpg key which was removed later' do
@@ -183,5 +177,34 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
)
end
end
+
+ context 'gpg signature did not have an associated gpg subkey' do
+ let(:signature) { [GpgHelpers::User3.signed_commit_signature, GpgHelpers::User3.signed_commit_base_data] }
+ let(:committer_email) { GpgHelpers::User3.emails.first }
+ let!(:user) { create :user, email: GpgHelpers::User3.emails.first }
+
+ let!(:invalid_gpg_signature) do
+ create :gpg_signature,
+ project: project,
+ commit_sha: commit_sha,
+ gpg_key: nil,
+ gpg_key_primary_keyid: GpgHelpers::User3.subkey_fingerprints.last[24..-1],
+ verification_status: 'unknown_key'
+ end
+
+ it 'updates the signature to being valid when the missing gpg key is added' do
+ # InvalidGpgSignatureUpdater is called by the after_create hook
+ gpg_key = create(:gpg_key, key: GpgHelpers::User3.public_key, user: user)
+ subkey = gpg_key.subkeys.last
+
+ expect(invalid_gpg_signature.reload).to have_attributes(
+ project: project,
+ commit_sha: commit_sha,
+ gpg_key_subkey_id: subkey.id,
+ gpg_key_primary_keyid: subkey.keyid,
+ verification_status: 'verified'
+ )
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/gpg_spec.rb b/spec/lib/gitlab/gpg_spec.rb
index 11a2aea1915..ab9a166db00 100644
--- a/spec/lib/gitlab/gpg_spec.rb
+++ b/spec/lib/gitlab/gpg_spec.rb
@@ -28,6 +28,23 @@ describe Gitlab::Gpg do
end
end
+ describe '.subkeys_from_key' do
+ it 'returns the subkeys by primary key' do
+ all_subkeys = described_class.subkeys_from_key(GpgHelpers::User1.public_key)
+ subkeys = all_subkeys[GpgHelpers::User1.primary_keyid]
+
+ expect(subkeys).to be_present
+ expect(subkeys.first[:keyid]).to be_present
+ expect(subkeys.first[:fingerprint]).to be_present
+ end
+
+ it 'returns an empty array when there are not subkeys' do
+ all_subkeys = described_class.subkeys_from_key(GpgHelpers::User4.public_key)
+
+ expect(all_subkeys[GpgHelpers::User4.primary_keyid]).to be_empty
+ end
+ end
+
describe '.user_infos_from_key' do
it 'returns the names and emails' do
user_infos = described_class.user_infos_from_key(GpgHelpers::User1.public_key)
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index d96ea7b5417..e41300441d6 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -224,6 +224,7 @@ Ci::Pipeline:
- auto_canceled_by_id
- pipeline_schedule_id
- config_source
+- failure_reason
- protected
Ci::Stage:
- id
diff --git a/spec/lib/rspec_flaky/config_spec.rb b/spec/lib/rspec_flaky/config_spec.rb
new file mode 100644
index 00000000000..83556787e85
--- /dev/null
+++ b/spec/lib/rspec_flaky/config_spec.rb
@@ -0,0 +1,102 @@
+require 'spec_helper'
+
+describe RspecFlaky::Config, :aggregate_failures do
+ before do
+ # Stub these env variables otherwise specs don't behave the same on the CI
+ stub_env('FLAKY_RSPEC_GENERATE_REPORT', nil)
+ stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', nil)
+ stub_env('FLAKY_RSPEC_REPORT_PATH', nil)
+ stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', nil)
+ end
+
+ describe '.generate_report?' do
+ context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is not set" do
+ it 'returns false' do
+ expect(described_class).not_to be_generate_report
+ end
+ end
+
+ context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'false'" do
+ before do
+ stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false')
+ end
+
+ it 'returns false' do
+ expect(described_class).not_to be_generate_report
+ end
+ end
+
+ context "when ENV['FLAKY_RSPEC_GENERATE_REPORT'] is set to 'true'" do
+ before do
+ stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true')
+ end
+
+ it 'returns true' do
+ expect(described_class).to be_generate_report
+ end
+ end
+ end
+
+ describe '.suite_flaky_examples_report_path' do
+ context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is not set" do
+ it 'returns the default path' do
+ expect(Rails.root).to receive(:join).with('rspec_flaky/suite-report.json')
+ .and_return('root/rspec_flaky/suite-report.json')
+
+ expect(described_class.suite_flaky_examples_report_path).to eq('root/rspec_flaky/suite-report.json')
+ end
+ end
+
+ context "when ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'] is set" do
+ before do
+ stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', 'foo/suite-report.json')
+ end
+
+ it 'returns the value of the env variable' do
+ expect(described_class.suite_flaky_examples_report_path).to eq('foo/suite-report.json')
+ end
+ end
+ end
+
+ describe '.flaky_examples_report_path' do
+ context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is not set" do
+ it 'returns the default path' do
+ expect(Rails.root).to receive(:join).with('rspec_flaky/report.json')
+ .and_return('root/rspec_flaky/report.json')
+
+ expect(described_class.flaky_examples_report_path).to eq('root/rspec_flaky/report.json')
+ end
+ end
+
+ context "when ENV['FLAKY_RSPEC_REPORT_PATH'] is set" do
+ before do
+ stub_env('FLAKY_RSPEC_REPORT_PATH', 'foo/report.json')
+ end
+
+ it 'returns the value of the env variable' do
+ expect(described_class.flaky_examples_report_path).to eq('foo/report.json')
+ end
+ end
+ end
+
+ describe '.new_flaky_examples_report_path' do
+ context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is not set" do
+ it 'returns the default path' do
+ expect(Rails.root).to receive(:join).with('rspec_flaky/new-report.json')
+ .and_return('root/rspec_flaky/new-report.json')
+
+ expect(described_class.new_flaky_examples_report_path).to eq('root/rspec_flaky/new-report.json')
+ end
+ end
+
+ context "when ENV['NEW_FLAKY_RSPEC_REPORT_PATH'] is set" do
+ before do
+ stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', 'foo/new-report.json')
+ end
+
+ it 'returns the value of the env variable' do
+ expect(described_class.new_flaky_examples_report_path).to eq('foo/new-report.json')
+ end
+ end
+ end
+end
diff --git a/spec/lib/rspec_flaky/flaky_example_spec.rb b/spec/lib/rspec_flaky/flaky_example_spec.rb
index cbfc1e538ab..d19c34bebb3 100644
--- a/spec/lib/rspec_flaky/flaky_example_spec.rb
+++ b/spec/lib/rspec_flaky/flaky_example_spec.rb
@@ -1,6 +1,6 @@
require 'spec_helper'
-describe RspecFlaky::FlakyExample do
+describe RspecFlaky::FlakyExample, :aggregate_failures do
let(:flaky_example_attrs) do
{
example_id: 'spec/foo/bar_spec.rb:2',
@@ -9,6 +9,7 @@ describe RspecFlaky::FlakyExample do
description: 'hello world',
first_flaky_at: 1234,
last_flaky_at: 2345,
+ last_flaky_job: 'https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/12',
last_attempts_count: 2,
flaky_reports: 1
}
@@ -27,57 +28,78 @@ describe RspecFlaky::FlakyExample do
end
let(:example) { double(example_attrs) }
+ before do
+ # Stub these env variables otherwise specs don't behave the same on the CI
+ stub_env('CI_PROJECT_URL', nil)
+ stub_env('CI_JOB_ID', nil)
+ end
+
describe '#initialize' do
shared_examples 'a valid FlakyExample instance' do
- it 'returns valid attributes' do
- flaky_example = described_class.new(args)
+ let(:flaky_example) { described_class.new(args) }
+ it 'returns valid attributes' do
expect(flaky_example.uid).to eq(flaky_example_attrs[:uid])
- expect(flaky_example.example_id).to eq(flaky_example_attrs[:example_id])
+ expect(flaky_example.file).to eq(flaky_example_attrs[:file])
+ expect(flaky_example.line).to eq(flaky_example_attrs[:line])
+ expect(flaky_example.description).to eq(flaky_example_attrs[:description])
+ expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
+ expect(flaky_example.last_flaky_at).to eq(expected_last_flaky_at)
+ expect(flaky_example.last_attempts_count).to eq(flaky_example_attrs[:last_attempts_count])
+ expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
end
end
context 'when given an Rspec::Example' do
- let(:args) { example }
-
- it_behaves_like 'a valid FlakyExample instance'
+ it_behaves_like 'a valid FlakyExample instance' do
+ let(:args) { example }
+ let(:expected_first_flaky_at) { nil }
+ let(:expected_last_flaky_at) { nil }
+ let(:expected_flaky_reports) { 0 }
+ end
end
context 'when given a hash' do
- let(:args) { flaky_example_attrs }
-
- it_behaves_like 'a valid FlakyExample instance'
+ it_behaves_like 'a valid FlakyExample instance' do
+ let(:args) { flaky_example_attrs }
+ let(:expected_flaky_reports) { flaky_example_attrs[:flaky_reports] }
+ let(:expected_first_flaky_at) { flaky_example_attrs[:first_flaky_at] }
+ let(:expected_last_flaky_at) { flaky_example_attrs[:last_flaky_at] }
+ end
end
end
- describe '#to_h' do
- before do
- # Stub these env variables otherwise specs don't behave the same on the CI
- stub_env('CI_PROJECT_URL', nil)
- stub_env('CI_JOB_ID', nil)
- end
+ describe '#update_flakiness!' do
+ shared_examples 'an up-to-date FlakyExample instance' do
+ let(:flaky_example) { described_class.new(args) }
- shared_examples 'a valid FlakyExample hash' do
- let(:additional_attrs) { {} }
+ it 'updates the first_flaky_at' do
+ now = Time.now
+ expected_first_flaky_at = flaky_example.first_flaky_at ? flaky_example.first_flaky_at : now
+ Timecop.freeze(now) { flaky_example.update_flakiness! }
- it 'returns a valid hash' do
- flaky_example = described_class.new(args)
- final_hash = flaky_example_attrs
- .merge(last_flaky_at: instance_of(Time), last_flaky_job: nil)
- .merge(additional_attrs)
+ expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
+ end
+
+ it 'updates the last_flaky_at' do
+ now = Time.now
+ Timecop.freeze(now) { flaky_example.update_flakiness! }
- expect(flaky_example.to_h).to match(hash_including(final_hash))
+ expect(flaky_example.last_flaky_at).to eq(now)
end
- end
- context 'when given an Rspec::Example' do
- let(:args) { example }
+ it 'updates the flaky_reports' do
+ expected_flaky_reports = flaky_example.first_flaky_at ? flaky_example.flaky_reports + 1 : 1
+
+ expect { flaky_example.update_flakiness! }.to change { flaky_example.flaky_reports }.by(1)
+ expect(flaky_example.flaky_reports).to eq(expected_flaky_reports)
+ end
+
+ context 'when passed a :last_attempts_count' do
+ it 'updates the last_attempts_count' do
+ flaky_example.update_flakiness!(last_attempts_count: 42)
- context 'when run locally' do
- it_behaves_like 'a valid FlakyExample hash' do
- let(:additional_attrs) do
- { first_flaky_at: instance_of(Time) }
- end
+ expect(flaky_example.last_attempts_count).to eq(42)
end
end
@@ -87,10 +109,45 @@ describe RspecFlaky::FlakyExample do
stub_env('CI_JOB_ID', 42)
end
- it_behaves_like 'a valid FlakyExample hash' do
- let(:additional_attrs) do
- { first_flaky_at: instance_of(Time), last_flaky_job: "https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/42" }
- end
+ it 'updates the last_flaky_job' do
+ flaky_example.update_flakiness!
+
+ expect(flaky_example.last_flaky_job).to eq('https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/42')
+ end
+ end
+ end
+
+ context 'when given an Rspec::Example' do
+ it_behaves_like 'an up-to-date FlakyExample instance' do
+ let(:args) { example }
+ end
+ end
+
+ context 'when given a hash' do
+ it_behaves_like 'an up-to-date FlakyExample instance' do
+ let(:args) { flaky_example_attrs }
+ end
+ end
+ end
+
+ describe '#to_h' do
+ shared_examples 'a valid FlakyExample hash' do
+ let(:additional_attrs) { {} }
+
+ it 'returns a valid hash' do
+ flaky_example = described_class.new(args)
+ final_hash = flaky_example_attrs.merge(additional_attrs)
+
+ expect(flaky_example.to_h).to eq(final_hash)
+ end
+ end
+
+ context 'when given an Rspec::Example' do
+ let(:args) { example }
+
+ it_behaves_like 'a valid FlakyExample hash' do
+ let(:additional_attrs) do
+ { first_flaky_at: nil, last_flaky_at: nil, last_flaky_job: nil, flaky_reports: 0 }
end
end
end
diff --git a/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb b/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb
new file mode 100644
index 00000000000..06a8ba0d02e
--- /dev/null
+++ b/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb
@@ -0,0 +1,79 @@
+require 'spec_helper'
+
+describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do
+ let(:collection_hash) do
+ {
+ a: { example_id: 'spec/foo/bar_spec.rb:2' },
+ b: { example_id: 'spec/foo/baz_spec.rb:3' }
+ }
+ end
+ let(:collection_report) do
+ {
+ a: {
+ example_id: 'spec/foo/bar_spec.rb:2',
+ first_flaky_at: nil,
+ last_flaky_at: nil,
+ last_flaky_job: nil
+ },
+ b: {
+ example_id: 'spec/foo/baz_spec.rb:3',
+ first_flaky_at: nil,
+ last_flaky_at: nil,
+ last_flaky_job: nil
+ }
+ }
+ end
+
+ describe '.from_json' do
+ it 'accepts a JSON' do
+ collection = described_class.from_json(JSON.pretty_generate(collection_hash))
+
+ expect(collection.to_report).to eq(described_class.new(collection_hash).to_report)
+ end
+ end
+
+ describe '#initialize' do
+ it 'accepts no argument' do
+ expect { described_class.new }.not_to raise_error
+ end
+
+ it 'accepts a hash' do
+ expect { described_class.new(collection_hash) }.not_to raise_error
+ end
+
+ it 'does not accept anything else' do
+ expect { described_class.new([1, 2, 3]) }.to raise_error(ArgumentError, "`collection` must be a Hash, Array given!")
+ end
+ end
+
+ describe '#to_report' do
+ it 'calls #to_h on the values' do
+ collection = described_class.new(collection_hash)
+
+ expect(collection.to_report).to eq(collection_report)
+ end
+ end
+
+ describe '#-' do
+ it 'returns only examples that are not present in the given collection' do
+ collection1 = described_class.new(collection_hash)
+ collection2 = described_class.new(
+ a: { example_id: 'spec/foo/bar_spec.rb:2' },
+ c: { example_id: 'spec/bar/baz_spec.rb:4' })
+
+ expect((collection2 - collection1).to_report).to eq(
+ c: {
+ example_id: 'spec/bar/baz_spec.rb:4',
+ first_flaky_at: nil,
+ last_flaky_at: nil,
+ last_flaky_job: nil
+ })
+ end
+
+ it 'fails if the given collection does not respond to `#key?`' do
+ collection = described_class.new(collection_hash)
+
+ expect { collection - [1, 2, 3] }.to raise_error(ArgumentError, "`other` must respond to `#key?`, Array does not!")
+ end
+ end
+end
diff --git a/spec/lib/rspec_flaky/listener_spec.rb b/spec/lib/rspec_flaky/listener_spec.rb
index 0e193bf408b..7590ea9576d 100644
--- a/spec/lib/rspec_flaky/listener_spec.rb
+++ b/spec/lib/rspec_flaky/listener_spec.rb
@@ -1,22 +1,35 @@
require 'spec_helper'
-describe RspecFlaky::Listener do
- let(:flaky_example_report) do
+describe RspecFlaky::Listener, :aggregate_failures do
+ let(:already_flaky_example_uid) { '6e869794f4cfd2badd93eb68719371d1' }
+ let(:suite_flaky_example_report) do
{
- 'abc123' => {
+ already_flaky_example_uid => {
example_id: 'spec/foo/bar_spec.rb:2',
file: 'spec/foo/bar_spec.rb',
line: 2,
description: 'hello world',
first_flaky_at: 1234,
- last_flaky_at: instance_of(Time),
- last_attempts_count: 2,
+ last_flaky_at: 4321,
+ last_attempts_count: 3,
flaky_reports: 1,
last_flaky_job: nil
}
}
end
- let(:example_attrs) do
+ let(:already_flaky_example_attrs) do
+ {
+ id: 'spec/foo/bar_spec.rb:2',
+ metadata: {
+ file_path: 'spec/foo/bar_spec.rb',
+ line_number: 2,
+ full_description: 'hello world'
+ },
+ execution_result: double(status: 'passed', exception: nil)
+ }
+ end
+ let(:already_flaky_example) { RspecFlaky::FlakyExample.new(suite_flaky_example_report[already_flaky_example_uid]) }
+ let(:new_example_attrs) do
{
id: 'spec/foo/baz_spec.rb:3',
metadata: {
@@ -36,14 +49,14 @@ describe RspecFlaky::Listener do
describe '#initialize' do
shared_examples 'a valid Listener instance' do
- let(:expected_all_flaky_examples) { {} }
+ let(:expected_suite_flaky_examples) { {} }
it 'returns a valid Listener instance' do
listener = described_class.new
- expect(listener.to_report(listener.all_flaky_examples))
- .to match(hash_including(expected_all_flaky_examples))
- expect(listener.new_flaky_examples).to eq({})
+ expect(listener.to_report(listener.suite_flaky_examples))
+ .to eq(expected_suite_flaky_examples)
+ expect(listener.flaky_examples).to eq({})
end
end
@@ -51,16 +64,16 @@ describe RspecFlaky::Listener do
it_behaves_like 'a valid Listener instance'
end
- context 'when a report file exists and set by ALL_FLAKY_RSPEC_REPORT_PATH' do
+ context 'when a report file exists and set by SUITE_FLAKY_RSPEC_REPORT_PATH' do
let(:report_file) do
Tempfile.new(%w[rspec_flaky_report .json]).tap do |f|
- f.write(JSON.pretty_generate(flaky_example_report))
+ f.write(JSON.pretty_generate(suite_flaky_example_report))
f.rewind
end
end
before do
- stub_env('ALL_FLAKY_RSPEC_REPORT_PATH', report_file.path)
+ stub_env('SUITE_FLAKY_RSPEC_REPORT_PATH', report_file.path)
end
after do
@@ -69,74 +82,122 @@ describe RspecFlaky::Listener do
end
it_behaves_like 'a valid Listener instance' do
- let(:expected_all_flaky_examples) { flaky_example_report }
+ let(:expected_suite_flaky_examples) { suite_flaky_example_report }
end
end
end
describe '#example_passed' do
- let(:rspec_example) { double(example_attrs) }
+ let(:rspec_example) { double(new_example_attrs) }
let(:notification) { double(example: rspec_example) }
+ let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
shared_examples 'a non-flaky example' do
it 'does not change the flaky examples hash' do
- expect { subject.example_passed(notification) }
- .not_to change { subject.all_flaky_examples }
+ expect { listener.example_passed(notification) }
+ .not_to change { listener.flaky_examples }
end
end
- describe 'when the RSpec example does not respond to attempts' do
- it_behaves_like 'a non-flaky example'
- end
+ shared_examples 'an existing flaky example' do
+ let(:expected_flaky_example) do
+ {
+ example_id: 'spec/foo/bar_spec.rb:2',
+ file: 'spec/foo/bar_spec.rb',
+ line: 2,
+ description: 'hello world',
+ first_flaky_at: 1234,
+ last_attempts_count: 2,
+ flaky_reports: 2,
+ last_flaky_job: nil
+ }
+ end
- describe 'when the RSpec example has 1 attempt' do
- let(:rspec_example) { double(example_attrs.merge(attempts: 1)) }
+ it 'changes the flaky examples hash' do
+ new_example = RspecFlaky::Example.new(rspec_example)
- it_behaves_like 'a non-flaky example'
+ now = Time.now
+ Timecop.freeze(now) do
+ expect { listener.example_passed(notification) }
+ .to change { listener.flaky_examples[new_example.uid].to_h }
+ end
+
+ expect(listener.flaky_examples[new_example.uid].to_h)
+ .to eq(expected_flaky_example.merge(last_flaky_at: now))
+ end
end
- describe 'when the RSpec example has 2 attempts' do
- let(:rspec_example) { double(example_attrs.merge(attempts: 2)) }
- let(:expected_new_flaky_example) do
+ shared_examples 'a new flaky example' do
+ let(:expected_flaky_example) do
{
example_id: 'spec/foo/baz_spec.rb:3',
file: 'spec/foo/baz_spec.rb',
line: 3,
description: 'hello GitLab',
- first_flaky_at: instance_of(Time),
- last_flaky_at: instance_of(Time),
last_attempts_count: 2,
flaky_reports: 1,
last_flaky_job: nil
}
end
- it 'does not change the flaky examples hash' do
- expect { subject.example_passed(notification) }
- .to change { subject.all_flaky_examples }
-
+ it 'changes the all flaky examples hash' do
new_example = RspecFlaky::Example.new(rspec_example)
- expect(subject.all_flaky_examples[new_example.uid].to_h)
- .to match(hash_including(expected_new_flaky_example))
+ now = Time.now
+ Timecop.freeze(now) do
+ expect { listener.example_passed(notification) }
+ .to change { listener.flaky_examples[new_example.uid].to_h }
+ end
+
+ expect(listener.flaky_examples[new_example.uid].to_h)
+ .to eq(expected_flaky_example.merge(first_flaky_at: now, last_flaky_at: now))
+ end
+ end
+
+ describe 'when the RSpec example does not respond to attempts' do
+ it_behaves_like 'a non-flaky example'
+ end
+
+ describe 'when the RSpec example has 1 attempt' do
+ let(:rspec_example) { double(new_example_attrs.merge(attempts: 1)) }
+
+ it_behaves_like 'a non-flaky example'
+ end
+
+ describe 'when the RSpec example has 2 attempts' do
+ let(:rspec_example) { double(new_example_attrs.merge(attempts: 2)) }
+
+ it_behaves_like 'a new flaky example'
+
+ context 'with an existing flaky example' do
+ let(:rspec_example) { double(already_flaky_example_attrs.merge(attempts: 2)) }
+
+ it_behaves_like 'an existing flaky example'
end
end
end
describe '#dump_summary' do
- let(:rspec_example) { double(example_attrs) }
- let(:notification) { double(example: rspec_example) }
+ let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
+ let(:new_flaky_rspec_example) { double(new_example_attrs.merge(attempts: 2)) }
+ let(:already_flaky_rspec_example) { double(already_flaky_example_attrs.merge(attempts: 2)) }
+ let(:notification_new_flaky_rspec_example) { double(example: new_flaky_rspec_example) }
+ let(:notification_already_flaky_rspec_example) { double(example: already_flaky_rspec_example) }
- context 'when a report file path is set by ALL_FLAKY_RSPEC_REPORT_PATH' do
+ context 'when a report file path is set by FLAKY_RSPEC_REPORT_PATH' do
let(:report_file_path) { Rails.root.join('tmp', 'rspec_flaky_report.json') }
+ let(:new_report_file_path) { Rails.root.join('tmp', 'rspec_flaky_new_report.json') }
before do
- stub_env('ALL_FLAKY_RSPEC_REPORT_PATH', report_file_path)
+ stub_env('FLAKY_RSPEC_REPORT_PATH', report_file_path)
+ stub_env('NEW_FLAKY_RSPEC_REPORT_PATH', new_report_file_path)
FileUtils.rm(report_file_path) if File.exist?(report_file_path)
+ FileUtils.rm(new_report_file_path) if File.exist?(new_report_file_path)
end
after do
FileUtils.rm(report_file_path) if File.exist?(report_file_path)
+ FileUtils.rm(new_report_file_path) if File.exist?(new_report_file_path)
end
context 'when FLAKY_RSPEC_GENERATE_REPORT == "false"' do
@@ -144,12 +205,13 @@ describe RspecFlaky::Listener do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'false')
end
- it 'does not write the report file' do
- subject.example_passed(notification)
+ it 'does not write any report file' do
+ listener.example_passed(notification_new_flaky_rspec_example)
- subject.dump_summary(nil)
+ listener.dump_summary(nil)
expect(File.exist?(report_file_path)).to be(false)
+ expect(File.exist?(new_report_file_path)).to be(false)
end
end
@@ -158,21 +220,39 @@ describe RspecFlaky::Listener do
stub_env('FLAKY_RSPEC_GENERATE_REPORT', 'true')
end
- it 'writes the report file' do
- subject.example_passed(notification)
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ it 'writes the report files' do
+ listener.example_passed(notification_new_flaky_rspec_example)
+ listener.example_passed(notification_already_flaky_rspec_example)
- subject.dump_summary(nil)
+ listener.dump_summary(nil)
expect(File.exist?(report_file_path)).to be(true)
+ expect(File.exist?(new_report_file_path)).to be(true)
+
+ expect(File.read(report_file_path))
+ .to eq(JSON.pretty_generate(listener.to_report(listener.flaky_examples)))
+
+ new_example = RspecFlaky::Example.new(notification_new_flaky_rspec_example)
+ new_flaky_example = RspecFlaky::FlakyExample.new(new_example)
+ new_flaky_example.update_flakiness!
+
+ expect(File.read(new_report_file_path))
+ .to eq(JSON.pretty_generate(listener.to_report(new_example.uid => new_flaky_example)))
end
end
end
end
describe '#to_report' do
+ let(:listener) { described_class.new(suite_flaky_example_report.to_json) }
+
it 'transforms the internal hash to a JSON-ready hash' do
- expect(subject.to_report('abc123' => RspecFlaky::FlakyExample.new(flaky_example_report['abc123'])))
- .to match(hash_including(flaky_example_report))
+ expect(listener.to_report(already_flaky_example_uid => already_flaky_example))
+ .to match(hash_including(suite_flaky_example_report))
end
end
end
diff --git a/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb b/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb
new file mode 100644
index 00000000000..0e884a7d910
--- /dev/null
+++ b/spec/migrations/schedule_create_gpg_key_subkeys_from_gpg_keys_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys')
+
+describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do
+ matcher :be_scheduled_migration do |*expected|
+ match do |migration|
+ BackgroundMigrationWorker.jobs.any? do |job|
+ job['args'] == [migration, expected]
+ end
+ end
+
+ failure_message do |migration|
+ "Migration `#{migration}` with args `#{expected.inspect}` not scheduled!"
+ end
+ end
+
+ before do
+ create(:gpg_key, id: 1, key: GpgHelpers::User1.public_key)
+ create(:gpg_key, id: 2, key: GpgHelpers::User3.public_key)
+ # Delete all subkeys so they can be recreated
+ GpgKeySubkey.destroy_all
+ end
+
+ it 'correctly schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_migration(1)
+ expect(described_class::MIGRATION).to be_scheduled_migration(2)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+
+ it 'schedules background migrations' do
+ Sidekiq::Testing.inline! do
+ expect(GpgKeySubkey.count).to eq(0)
+
+ migrate!
+
+ expect(GpgKeySubkey.count).to eq(3)
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 9c1e460ab20..2c9e7013b77 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -238,7 +238,7 @@ describe Ci::Pipeline, :mailer do
describe '#stage_seeds' do
let(:pipeline) do
- create(:ci_pipeline, config: { rspec: { script: 'rake' } })
+ build(:ci_pipeline, config: { rspec: { script: 'rake' } })
end
it 'returns preseeded stage seeds object' do
@@ -247,6 +247,14 @@ describe Ci::Pipeline, :mailer do
end
end
+ describe '#seeds_size' do
+ let(:pipeline) { build(:ci_pipeline_with_one_job) }
+
+ it 'returns number of jobs in stage seeds' do
+ expect(pipeline.seeds_size).to eq 1
+ end
+ end
+
describe '#legacy_stages' do
subject { pipeline.legacy_stages }
diff --git a/spec/models/concerns/has_status_spec.rb b/spec/models/concerns/has_status_spec.rb
index a38f2553eb1..6866b43432c 100644
--- a/spec/models/concerns/has_status_spec.rb
+++ b/spec/models/concerns/has_status_spec.rb
@@ -231,6 +231,18 @@ describe HasStatus do
end
end
+ describe '.alive' do
+ subject { CommitStatus.alive }
+
+ %i[running pending created].each do |status|
+ it_behaves_like 'containing the job', status
+ end
+
+ %i[failed success].each do |status|
+ it_behaves_like 'not containing the job', status
+ end
+ end
+
describe '.created_or_pending' do
subject { CommitStatus.created_or_pending }
diff --git a/spec/models/gpg_key_spec.rb b/spec/models/gpg_key_spec.rb
index 743f2cfcab5..33e6f1de3d1 100644
--- a/spec/models/gpg_key_spec.rb
+++ b/spec/models/gpg_key_spec.rb
@@ -3,6 +3,7 @@ require 'rails_helper'
describe GpgKey do
describe "associations" do
it { is_expected.to belong_to(:user) }
+ it { is_expected.to have_many(:subkeys) }
end
describe "validation" do
@@ -38,6 +39,14 @@ describe GpgKey do
expect(gpg_key.primary_keyid).to eq GpgHelpers::User1.primary_keyid
end
end
+
+ describe 'generate_subkeys' do
+ it 'extracts the subkeys from the gpg key' do
+ gpg_key = create(:gpg_key, key: GpgHelpers::User1.public_key_with_extra_signing_key)
+
+ expect(gpg_key.subkeys.count).to eq(2)
+ end
+ end
end
describe '#key=' do
@@ -182,5 +191,29 @@ describe GpgKey do
expect(unrelated_gpg_key.destroyed?).to be false
end
+
+ it 'deletes all the associated subkeys' do
+ gpg_key = create :gpg_key, key: GpgHelpers::User3.public_key
+
+ expect(gpg_key.subkeys).to be_present
+
+ gpg_key.revoke
+
+ expect(gpg_key.subkeys(true)).to be_blank
+ end
+
+ it 'invalidates all signatures associated to the subkeys' do
+ gpg_key = create :gpg_key, key: GpgHelpers::User3.public_key
+ gpg_key_subkey = gpg_key.subkeys.last
+ gpg_signature = create :gpg_signature, verification_status: :verified, gpg_key: gpg_key_subkey
+
+ gpg_key.revoke
+
+ expect(gpg_signature.reload).to have_attributes(
+ verification_status: 'unknown_key',
+ gpg_key: nil,
+ gpg_key_subkey: nil
+ )
+ end
end
end
diff --git a/spec/models/gpg_key_subkey_spec.rb b/spec/models/gpg_key_subkey_spec.rb
new file mode 100644
index 00000000000..3c86837f47f
--- /dev/null
+++ b/spec/models/gpg_key_subkey_spec.rb
@@ -0,0 +1,15 @@
+require 'rails_helper'
+
+describe GpgKeySubkey do
+ subject { build(:gpg_key_subkey) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:gpg_key) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:gpg_key_id) }
+ it { is_expected.to validate_presence_of(:fingerprint) }
+ it { is_expected.to validate_presence_of(:keyid) }
+ end
+end
diff --git a/spec/models/gpg_signature_spec.rb b/spec/models/gpg_signature_spec.rb
index c58fd46762a..db033016c37 100644
--- a/spec/models/gpg_signature_spec.rb
+++ b/spec/models/gpg_signature_spec.rb
@@ -1,9 +1,13 @@
require 'rails_helper'
RSpec.describe GpgSignature do
+ let(:gpg_key) { create(:gpg_key) }
+ let(:gpg_key_subkey) { create(:gpg_key_subkey) }
+
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:gpg_key) }
+ it { is_expected.to belong_to(:gpg_key_subkey) }
end
describe 'validation' do
@@ -25,4 +29,26 @@ RSpec.describe GpgSignature do
gpg_signature.commit
end
end
+
+ describe '#gpg_key=' do
+ it 'supports the assignment of a GpgKey' do
+ gpg_signature = create(:gpg_signature, gpg_key: gpg_key)
+
+ expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKey)
+ end
+
+ it 'supports the assignment of a GpgKeySubkey' do
+ gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
+
+ expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
+ end
+
+ it 'clears gpg_key and gpg_key_subkey_id when passing nil' do
+ gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
+ gpg_signature.update_attribute(:gpg_key, nil)
+
+ expect(gpg_signature.gpg_key_id).to be_nil
+ expect(gpg_signature.gpg_key_subkey_id).to be_nil
+ end
+ end
end
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index e4886a8f019..f7ceaf844be 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -51,4 +51,21 @@ describe Ci::PipelinePresenter do
end
end
end
+
+ context '#failure_reason' do
+ context 'when pipeline has failure reason' do
+ it 'represents a failure reason sentence' do
+ pipeline.failure_reason = :config_error
+
+ expect(presenter.failure_reason)
+ .to eq 'CI/CD YAML configuration error!'
+ end
+ end
+
+ context 'when pipeline does not have failure reason' do
+ it 'returns nil' do
+ expect(presenter.failure_reason).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 060c8902471..862920ad7c3 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -1,4 +1,6 @@
require 'spec_helper'
+require 'raven/transports/dummy'
+require_relative '../../../config/initializers/sentry'
describe API::Helpers do
include API::APIGuard::HelperMethods
@@ -476,7 +478,7 @@ describe API::Helpers do
allow(exception).to receive(:backtrace).and_return(caller)
expect_any_instance_of(self.class).to receive(:sentry_context)
- expect(Raven).to receive(:capture_exception).with(exception)
+ expect(Raven).to receive(:capture_exception).with(exception, extra: {})
handle_api_exception(exception)
end
@@ -501,6 +503,30 @@ describe API::Helpers do
expect(json_response['message']).to start_with("\nRuntimeError (Runtime Error!):")
end
end
+
+ context 'extra information' do
+ # Sentry events are an array of the form [auth_header, data, options]
+ let(:event_data) { Raven.client.transport.events.first[1] }
+
+ before do
+ stub_application_setting(
+ sentry_enabled: true,
+ sentry_dsn: "dummy://12345:67890@sentry.localdomain/sentry/42"
+ )
+ configure_sentry
+ Raven.client.configuration.encoding = 'json'
+ end
+
+ it 'sends the params, excluding confidential values' do
+ expect(Gitlab::Sentry).to receive(:enabled?).twice.and_return(true)
+ expect(ProjectsFinder).to receive(:new).and_raise('Runtime Error!')
+
+ get api('/projects', user), password: 'dont_send_this', other_param: 'send_this'
+
+ expect(event_data).to include('other_param=send_this')
+ expect(event_data).to include('password=********')
+ end
+ end
end
describe '.authenticate_non_get!' do
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index f8df461bc81..248552d1858 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -108,5 +108,18 @@ describe PipelineEntity do
expect(subject[:ref][:path]).to be_nil
end
end
+
+ context 'when pipeline has a failure reason set' do
+ let(:pipeline) { create(:ci_empty_pipeline) }
+
+ before do
+ pipeline.drop!(:config_error)
+ end
+
+ it 'has a correct failure reason' do
+ expect(subject[:failure_reason])
+ .to eq 'CI/CD YAML configuration error!'
+ end
+ end
end
end
diff --git a/spec/services/gpg_keys/create_service_spec.rb b/spec/services/gpg_keys/create_service_spec.rb
index 20382a3a618..1cd2625531e 100644
--- a/spec/services/gpg_keys/create_service_spec.rb
+++ b/spec/services/gpg_keys/create_service_spec.rb
@@ -18,4 +18,14 @@ describe GpgKeys::CreateService do
it 'creates a gpg key' do
expect { subject.execute }.to change { user.gpg_keys.where(params).count }.by(1)
end
+
+ context 'when the public key contains subkeys' do
+ let(:params) { attributes_for(:gpg_key_with_subkeys) }
+
+ it 'generates the gpg subkeys' do
+ gpg_key = subject.execute
+
+ expect(gpg_key.subkeys.count).to eq(2)
+ end
+ end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 576e4ae1d38..48cacba6a8a 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -81,7 +81,10 @@ RSpec.configure do |config|
if ENV['CI']
# This includes the first try, i.e. tests will be run 4 times before failing.
config.default_retry_count = 4
- config.reporter.register_listener(RspecFlaky::Listener.new, :example_passed, :dump_summary)
+ config.reporter.register_listener(
+ RspecFlaky::Listener.new,
+ :example_passed,
+ :dump_summary)
end
config.before(:suite) do
diff --git a/spec/support/gpg_helpers.rb b/spec/support/gpg_helpers.rb
index 65b38626a51..3f7279a50e0 100644
--- a/spec/support/gpg_helpers.rb
+++ b/spec/support/gpg_helpers.rb
@@ -92,6 +92,46 @@ module GpgHelpers
KEY
end
+ def public_key_with_extra_signing_key
+ <<~KEY.strip
+ -----BEGIN PGP PUBLIC KEY BLOCK-----
+ Version: GnuPG v1
+
+ mI0EWK7VJwEEANSFayuVYenl7sBKUjmIxwDRc3jd+K+FWUZgknLgiLcevaLh/mxV
+ 98dLxDKGDHHNKc/B7Y4qdlZYv1wfNQVuIbd8dqUQFOOkH7ukbgcGBTxH+2IM67y+
+ QBH618luS5Gz1d4bd0YoFf/xZGEh9G5xicz7TiXYzLKjnMjHu2EmbFePABEBAAG0
+ LU5hbm5pZSBCZXJuaGFyZCA8bmFubmllLmJlcm5oYXJkQGV4YW1wbGUuY29tPoi4
+ BBMBAgAiBQJYrtUnAhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRDM++Gf
+ AKyLHaeSA/99oUWpb02PlfkALcx5RncboMHkgczYEU9wOFIgvXIReswThCMOvPZa
+ piui+ItyJfV3ijJfO8IvbbFcvU7jjGA073Bb7tbzAEOQLA16mWgBLQlGaRWbHDW4
+ uwFxvkJKA0GzEsadEXeniESaZPc4rOXKPO3+/MSQWS2bmvvGsBTEuriNBFiu1ScB
+ BADIXkITf+kKCkD+n8tMsdTLInefu8KrJ8p7YRYCCabEXnWRsDb5zxUAG2VXCVUh
+ Yl6QXQybkNiBaduS+uxilz7gtYZUMFJvQ09+fV7D2N9B7u/1bGdIYz+cDFJnEJit
+ LY4w/nju2Sno5CL5Ead8sZuslKetSXPYHR/kbW462EOw5wARAQABiJ8EGAECAAkF
+ Aliu1ScCGwwACgkQzPvhnwCsix2WRQQAtOXpBS60myrBUXhlcqabDQgSTw+Spbgb
+ 61hEMckyzpk7SfMNLz0EbYMvj9SU6znBG8RGeUljPTVMxPGr9yIpoFMSPKAUi/0K
+ AgRmH3tVpxlMipwXjST1Jukk2eHckt/3jGw3E1ElMSFtULe6u5p4gu578hHukEwT
+ IKzj0ZyC7DK5AQ0EWcx23AEIANwpAq85bT10JCBuNhOMyF2jKVt5wHbI9wBtjWYG
+ fgJFBkRvm6IsbmR0Y5DSBvF/of0UX1iGMfx6mvCDJkb1okquhCUef6MONWRpzXYE
+ CIZDm1TXu6yv0D35tkLfPo+/sY9UHHp1zGRcPAU46e8ztRwoD+zEJwy7lobLHGOL
+ 9OdWtCGjsutLOTqKRK4jsifr8n3rePU09rejhDkRONNs7ufn9GRcWMN7RWiFDtpU
+ gNe84AJ38qaXPU8GHNTrDtDtRRPmn68ezMmE1qTNsxQxD4Isexe5Wsfc4+ElaP9s
+ zaHgij7npX1HS9RpmhnOa2h1ESroM9cqDh3IJVhf+eP6/uMAEQEAAYkBxAQYAQIA
+ DwUCWcx23AIbAgUJAeEzgAEpCRDM++GfAKyLHcBdIAQZAQIABgUCWcx23AAKCRDk
+ garE0uOuES7DCAC2Kgl6zO+NqIBIS6McgcEN0sGyvOvZ8Ps4hBiMwCyDAnsIRAUi
+ v4KZMtQMAyl9njJ3YjPWBsdieuTz45O06DDnrzJpZO5rUGJjAcEue4zvRRWIyu3H
+ qHC8MsvkslsNCygJHoWlknm+HucroskTNtxHQ+FdKZ6Tey+twl1u+PhV8PQVyFkl
+ 4G1chO90EP4dvYrye26CC+ik2JkvC7Vy5M+U0PJikme8pFMjcdNks25BnAKcdqKU
+ AU8RTkSjoYvb8qSmZyldJjYjQRkTPRX1ZdaOID1EdiWl+s5cn0Oypo3z7BChcEMx
+ IWB/gmXQJQXVr5qNQnJObyMO/RczXYi9qNnyGMED/2EJJERLR0nefjHQalwDKQVP
+ s5lX1OKAcf2CrV6ZarckqaQgtqjZbuV2C2mrOHUs5uojlXaopj5gA4yJSGDcYhj1
+ Rg9jdHWBtkHBj3eL32ZqrHDs3ap8ErZMmPE8A+mn9TTnQS+FY2QF7vBjJKM3qPT7
+ DMVGWrg4m1NF8N6yMPMP
+ =RB1y
+ -----END PGP PUBLIC KEY BLOCK-----
+ KEY
+ end
+
def primary_keyid
fingerprint[-16..-1]
end
@@ -201,4 +241,277 @@ module GpgHelpers
['bette.cartwright@example.com', 'bette.cartwright@example.net']
end
end
+
+ # GPG Key with extra signing key
+ module User3
+ extend self
+
+ def signed_commit_signature
+ <<~SIGNATURE
+ -----BEGIN PGP SIGNATURE-----
+
+ iQEzBAABCAAdFiEEBSLdKbmPFnzYQhdS44/8r3Wr2SoFAlnNlT8ACgkQ44/8r3Wr
+ 2SqP1wf9FC4J2S8LIHs/fpxgkYzsyCp5lCbS7JuoD4pqmI2KWyBx+vi9/3mZPCsm
+ Fj9f0vFEtNOb39GNGZbaA8DdGw30/WAS6kI6yco0WSK53KHrLw9Kqd+3e/NAVSsl
+ 991Gq4n8X1U5izSH+gZOMtEEUBGqIlZKgRrEh7lhNcz0G7JTF2VCE4NNtZdq7GDA
+ N6jOQxDGUwi9wQBYORQzIBc3NihfhGloII1hXf0XzrgUY3zNYHTT7QipCxKAmH54
+ skwW+wi8RpBedar4saf7fs5xZbP/0yyVz98MJMdHBL68++Xt1AIHoqrb7eWszqnd
+ PCo/fnz1iHKCig602KLj0/zhADcNkg==
+ =LsTi
+ -----END PGP SIGNATURE-----
+ SIGNATURE
+ end
+
+ def signed_commit_base_data
+ <<~SIGNEDDATA
+ tree 86ec18bfe87ad42a782fdabd8310f9b7ac750f51
+ parent 2d1096e3a0ecf1d2baf6dee036cc80775d4940ba
+ author John Doe <john.doe@example.com> 1506645311 -0500
+ committer John Doe <john.doe@example.com> 1506645311 -0500
+
+ Commit signed with subkey by John Doe
+ SIGNEDDATA
+ end
+
+ def public_key
+ <<~KEY.strip
+ -----BEGIN PGP PUBLIC KEY BLOCK-----
+
+ mQENBFnNgbIBCAC9/WblcR4s/pFTwh9cm2iS59YRhtGfbrnfNE6QMIFIRFaK0u6J
+ LDy+scipXLoGg7X0XNFLW6Y457UUpkaPDVLPuVMONhMXdVqndGVvk9jq3D4oDtRa
+ ukucuXr9F84lHnjL3EosmAUiK3xBmHOGHm8+bvKbgPOdvre48YxoJ1POTen+chfo
+ YtLUfnC9EEGY/bn00aaXm3NV+zZK2zio5bFX9YLWSNh/JaXxuJsLoHR/lVrU7CLt
+ FCaGcPQ9SU46LHPshEYWO7ZsjEYJsYYOIOEzfcfR47T2EDTa6mVc++gC5TCoY3Ql
+ jccgm+EM0LvyEHwupEpxzCg2VsT0yoedhUhtABEBAAG0H0pvaG4gRG9lIDxqb2hu
+ LmRvZUBleGFtcGxlLmNvbT6JAVQEEwEIAD4WIQTqP4uIlyqP1HSHLy8RWzrxqtPt
+ ugUCWc2BsgIbAwUJA8JnAAULCQgHAgYVCAkKCwIEFgIDAQIeAQIXgAAKCRARWzrx
+ qtPturMDCACc1Pi1sLJFcCnJEc9sCInCO4LH8fntNjRLN3MTPU5YCYmFM3fAl5ly
+ vXPZ4jNWZxKbQVeFnkDOg5Ti8bzmFEMc8KbZuguktVFizxnLdFCTTRO89i3HDVSN
+ bIosrs5HJwRKOzul6i2whn3dsr8/P8WJczYjZGiw29hGwH3md4Thn/aAGbzjoCEF
+ IfIb1kccyHMJkaj79S8B2agsbEJLuTSfsXC3kGZIJyKG1DNmDUHW/ZE6ro/Kkhik
+ 3w6Jw14cMsKUIOBkOgsD/gXgX9xxWjYHmKrbCXucTKUevNlaCy5tzwrC0Am3prl9
+ OJj3macOA8hNaTVDflEHNCwHOwqnVQYyuQENBFnNgbIBCAC59MmKc0cqPRPTpCZ5
+ arKRoj23SNKWMDWaxSELdU91Wd/NJk4wF25urk9BtBuwjzaBMqb/xPD88yJC3ucs
+ 2LwCyAb5C/dHcPOpys8Pd+KrdHDR3zAMjcASsizlW/qFI9MtjhcU9Yd6iTUejAZG
+ NEC76WALJ3SLYzCaDkHFjWpH3Xq6ck3/9jpL3csn/5GLCsZQUDYGrZSXvHAIigwW
+ Xo6tMs5LCCO9CZg2qGDpvqlzcmy6CRkf0h/UFYJzGqfbJtxeCIxa93WIPE8eGwao
+ aneDlNtIoYiP6krC3OLsaPWT58QltNKaQuZSpjwtQBHa4JIt55vx+FcvRb7Kflgf
+ fT8bABEBAAGJATwEGAEIACYWIQTqP4uIlyqP1HSHLy8RWzrxqtPtugUCWc2BsgIb
+ DAUJA8JnAAAKCRARWzrxqtPtuqJjCACj+Z4qtgMpJXx3u58wCzkVLl5IylD/tEPA
+ cNIrj8QS8ec+woTJaMGVCh96VC2FPl8KR4Hjhy0yaupyPbTI6VWib63S/NcDfG7r
+ tviRFG2Gf8yduERebyC0cpgnmjVgFfJs7N3K3ncz6myOr9idNI05OC9poL73sDUv
+ jRXhm7uy938bT/R4MQdpYuxucgU3MiwvfG5ht+oJ4Yp+/IrR2PTqRGqMCsodnroa
+ TBKq2kW565TtCvrFkNub/ytorDbIVN9VrIMcuTiOv8sLoQRDJO9HvWUhYAqMY6Uh
+ dy12jR9FrquZnGsDKKs9V0Y6J4Wi8vnmdgWVZUc40pfXq3APAB6suQENBFnNgeAB
+ CADFqQRxLHxLIQ7B72diTMI2tPk9d5c67k+Gzkrg1QYoxBLdRCmhM4ydYqZzvIz4
+ 1npkK20w4somOCwvyAOjO46IGb3f/Wk8mY8o5HMpI1miAfze0YTZKzRo2DmrvwbV
+ /h8jvZNCISwtrOgaaszWSVSuEQQCA1jf4qixfCb3ReETvZc3MTZXhw8IUbszXh5d
+ a6CYqq/fr5Zw4Dc19ZSoHSTh0Wn03mEm/kaYtia/wt1I+xVvTSaC2Pf/kUtr7UEf
+ 3NMc0YF0s4KgeW8KwjHa7Sz9wzydLPRH5kJ26SDUGUhrFf1jNLIegtDsoISV/86G
+ ztXcVq5GY6lXMwmsggRe++7xABEBAAGJAmwEGAEIACAWIQTqP4uIlyqP1HSHLy8R
+ WzrxqtPtugUCWc2B4AIbAgFACRARWzrxqtPtusB0IAQZAQgAHRYhBAUi3Sm5jxZ8
+ 2EIXUuOP/K91q9kqBQJZzYHgAAoJEOOP/K91q9kqlHcH+wbvD14ITYDMfgIfy67O
+ 4Qcmgf1qzGXhpsABz/i/EPgRD990eNBI0YGuvoKRJfetEGn7LerrrCB8Z+ICFPHF
+ rzXoe10zm+QTREck0OB8nPFRycJ+Fbl6JX+cnkEx27Mmg1aVb7+H5LMDaWO1KjLs
+ g2wIDo/jrDfW7NoZzy4XTd7jFCOt4fftL/dhiujQmhLzugZXCxRISOVdmgilDJQo
+ Tz1sEm34ida98JFjdzSgkUvJ/pFTZ21ThCNxlUf01Hr2Pdcg1e2/97sZocMFTY2J
+ KwmiW2LG3B05/VrRtdvsCVj8G49coxgPPKty+m71ovAXdS/CvVqE7TefCplsYJ1d
+ V3abwwf/Xl2SxzbAKbrYMgZfdGzpPg2u6982WvfGIVfjFJh9veHZAbfkPcjdAD2X
+ e67Y4BeKG2OQQqeOY2y+81A7PaehgHzbFHJG/4RjqB66efrZAg4DgIdbr4oyMoUJ
+ VVsl0wfYSIvnd4kvWXYICVwk53HLA3wIowZAsJ1LT2byAKbUzayLzTekrTzGcwQk
+ g2XT798ev2QuR5Ki5x8MULBFX4Lhd03+uGOxjhNPQD6DAAHCQLaXQhaGuyMgt5hD
+ t0nF3yuw3Eg4Ygcbtm24rZXOHJc9bDKeRiWZz1dIyYYVJmHSQwOVXkAwJlF1sIgy
+ /jQYeOgFDKq20x86WulkvsUtBoaZJg==
+ =Q5Z7
+ -----END PGP PUBLIC KEY BLOCK-----
+ KEY
+ end
+
+ def secret_key
+ <<~SECRET
+ -----BEGIN PGP PRIVATE KEY BLOCK-----
+
+ lQPGBFnNgbIBCAC9/WblcR4s/pFTwh9cm2iS59YRhtGfbrnfNE6QMIFIRFaK0u6J
+ LDy+scipXLoGg7X0XNFLW6Y457UUpkaPDVLPuVMONhMXdVqndGVvk9jq3D4oDtRa
+ ukucuXr9F84lHnjL3EosmAUiK3xBmHOGHm8+bvKbgPOdvre48YxoJ1POTen+chfo
+ YtLUfnC9EEGY/bn00aaXm3NV+zZK2zio5bFX9YLWSNh/JaXxuJsLoHR/lVrU7CLt
+ FCaGcPQ9SU46LHPshEYWO7ZsjEYJsYYOIOEzfcfR47T2EDTa6mVc++gC5TCoY3Ql
+ jccgm+EM0LvyEHwupEpxzCg2VsT0yoedhUhtABEBAAH+BwMCOqjIWtWBMo3mjIP1
+ OnIbZ+YJxSUZ/B8wU2loMv4XiKmeXLbjD6h3jojxYlnreSHA9QvoY8uNaWElL/n2
+ jv6bxluivk8tA9FWJVv4HaSlMDd2J2YmUW17r8z9Kvm7b7pFVSrEoYV93Wdj5FJ7
+ ciKrFhYNSD7tH1sHwkrFAbiv6aHyk9h48YmR3kx2wBvz+pWk7M2srCJx2b6DXkj/
+ fsj1c/vnzUUGooOJgOvYAWrpg/rJUNxSsFypAHf8Xtk+xt8S1aZ9jaCmYk6I1B2L
+ m00HP43cXUpKcmETW1zXvfMLKjjoUEAJhSJhbCwiEzGL4ojQTarl8qbb+MisakEJ
+ DkPYtrhiiuVzUIFfqE86yO0UKidtzBmJAW3c6zeiUATvACzU09aGyUY1cJi93oXD
+ w4PCyVZ+nMvGD1wx+gyYdDINwpX4y6od9RDr06DGCzwu+S2vxsu1T8LdSv52fhBr
+ U0FY3Z3VN1ytay4SHi/8Y9VBYQFBh7R7Ch0gEMxLVKXVNqOXHUdGrKWV/WmyLKuZ
+ W9DEnWU4Mpz/di5jU8EDW7EB9DZZhVk3mQw3nuAZrBGD4azmmD5mgSgLeBGmKZ1e
+ q/9IWO44mRBBUtNv+rAkmmYF3MCNHuc7EMj+c/IgBUC7d5qBzGWA3UJ0vKX4xcIQ
+ X/PnU+nGxNvBrdqQaMLczeg28SerojxuX79prOsoySctLAbajd9HshW5SfOZ0rvb
+ BNHPqolQDijYEHGxANh4BbamRMGi60Rop7vJsZOLAemz17x/mvCtAHISOJT77/IM
+ oWC+IksJ5XsA/klJGe/tkx11aRQDDmKvIJXmMuRdvnIR23UBbzRQlWWq0l6CdoF6
+ 6SQ9BJBFq0WY32No9WZAPnDO3buUzWc1Y3uwn/+h7TVYVyTlEqzpYJ9FoJwBHbor
+ 0663eoyz6+AUtB9Kb2huIERvZSA8am9obi5kb2VAZXhhbXBsZS5jb20+iQFUBBMB
+ CAA+FiEE6j+LiJcqj9R0hy8vEVs68arT7boFAlnNgbICGwMFCQPCZwAFCwkIBwIG
+ FQgJCgsCBBYCAwECHgECF4AACgkQEVs68arT7bqzAwgAnNT4tbCyRXApyRHPbAiJ
+ wjuCx/H57TY0SzdzEz1OWAmJhTN3wJeZcr1z2eIzVmcSm0FXhZ5AzoOU4vG85hRD
+ HPCm2boLpLVRYs8Zy3RQk00TvPYtxw1UjWyKLK7ORycESjs7peotsIZ93bK/Pz/F
+ iXM2I2RosNvYRsB95neE4Z/2gBm846AhBSHyG9ZHHMhzCZGo+/UvAdmoLGxCS7k0
+ n7Fwt5BmSCcihtQzZg1B1v2ROq6PypIYpN8OicNeHDLClCDgZDoLA/4F4F/ccVo2
+ B5iq2wl7nEylHrzZWgsubc8KwtAJt6a5fTiY95mnDgPITWk1Q35RBzQsBzsKp1UG
+ Mp0DxgRZzYGyAQgAufTJinNHKj0T06QmeWqykaI9t0jSljA1msUhC3VPdVnfzSZO
+ MBdubq5PQbQbsI82gTKm/8Tw/PMiQt7nLNi8AsgG+Qv3R3DzqcrPD3fiq3Rw0d8w
+ DI3AErIs5Vv6hSPTLY4XFPWHeok1HowGRjRAu+lgCyd0i2Mwmg5BxY1qR916unJN
+ //Y6S93LJ/+RiwrGUFA2Bq2Ul7xwCIoMFl6OrTLOSwgjvQmYNqhg6b6pc3JsugkZ
+ H9If1BWCcxqn2ybcXgiMWvd1iDxPHhsGqGp3g5TbSKGIj+pKwtzi7Gj1k+fEJbTS
+ mkLmUqY8LUAR2uCSLeeb8fhXL0W+yn5YH30/GwARAQAB/gcDAuYn/gmAA3OC5p5Q
+ Pat5kE7MtmSvSPmdjVA2o+6RtqZf81JqtAgtDVDwj7SPFsH6ue5P+iAn9938YYek
+ WQU2+0GXeUbSJt+u4VAchgwA5mYsEnEr1/E5KEfWPWO3jJol1rJG99adrjkMxvug
+ QJmwieqhu0368w1FU0tKstxYbr3Tz3nPCPDJoigMEUkXiFklDCUgeNk0g+zd5ytE
+ lXuuLYcGZX7njxL5jD+cMIKqua5zv8WbvNf/BhM1nCarxp4qzKWim8J8jY+iR+/E
+ qOar4aliGRez0j+qh/r8ywgPwfOO89zrKrMfaclL7dN9yuecmBHKWZvfrP5JKMHj
+ yTU3nRMhUGbfVUaaZI2Ccz2rNOU4oF9wuzpzQi8qOysZixRmH61Nw3ULIKoQgiWp
+ 0p5A3L94OaEfZEq3plVaIXI2YWYFSEAlIAc2dq4GxynousLdhNACi9bHhXrfFUhK
+ ckw1QlbhguO/j63/x8ygsmLZVwHG0fJZtMhT3+EGam9cuMBibIYyu3ufJRy7kMKt
+ kmyuk02X+hYJ7w8Pu6b8zYHBXbsEKamipMgd4oKtc8WnXILZo4lwDSArgs7ZVCBa
+ vGBbpTOsr54WjsyuCdX/wv0F2l31J87UxVtTKXx/+nfMfCE02zd+NsTgqvgqmkaA
+ Sy3qvv326kJNx7p+5hRwDzlAZ7vGJjj5TwCbGYDvctIf6MFrGDRNYwrGwNkPc3TG
+ rturfeL/ioua0Smj8LIbOv9Ir93gUIseNpxv8tXV/lffdIplcw802b3aXIKyv4fq
+ b9y3Oq/pDHFukKuBe9WTXJvjT0+ME+a0C8KIb/sts95pmjZsgN1kPmvuT0ReQwUR
+ eGrqz387bnVUzo4RgM3IERs/0EYzPzE8A2vc1e4/87b5J+1Xnov8Phd29vW8Td5l
+ ApiFrFO2r+/Np4kBPAQYAQgAJhYhBOo/i4iXKo/UdIcvLxFbOvGq0+26BQJZzYGy
+ AhsMBQkDwmcAAAoJEBFbOvGq0+26omMIAKP5niq2AyklfHe7nzALORUuXkjKUP+0
+ Q8Bw0iuPxBLx5z7ChMlowZUKH3pULYU+XwpHgeOHLTJq6nI9tMjpVaJvrdL81wN8
+ buu2+JEUbYZ/zJ24RF5vILRymCeaNWAV8mzs3credzPqbI6v2J00jTk4L2mgvvew
+ NS+NFeGbu7L3fxtP9HgxB2li7G5yBTcyLC98bmG36gnhin78itHY9OpEaowKyh2e
+ uhpMEqraRbnrlO0K+sWQ25v/K2isNshU31Wsgxy5OI6/ywuhBEMk70e9ZSFgCoxj
+ pSF3LXaNH0Wuq5mcawMoqz1XRjonhaLy+eZ2BZVlRzjSl9ercA8AHqydA8YEWc2B
+ 4AEIAMWpBHEsfEshDsHvZ2JMwja0+T13lzruT4bOSuDVBijEEt1EKaEzjJ1ipnO8
+ jPjWemQrbTDiyiY4LC/IA6M7jogZvd/9aTyZjyjkcykjWaIB/N7RhNkrNGjYOau/
+ BtX+HyO9k0IhLC2s6BpqzNZJVK4RBAIDWN/iqLF8JvdF4RO9lzcxNleHDwhRuzNe
+ Hl1roJiqr9+vlnDgNzX1lKgdJOHRafTeYSb+Rpi2Jr/C3Uj7FW9NJoLY9/+RS2vt
+ QR/c0xzRgXSzgqB5bwrCMdrtLP3DPJ0s9EfmQnbpINQZSGsV/WM0sh6C0OyghJX/
+ zobO1dxWrkZjqVczCayCBF777vEAEQEAAf4HAwKESvCIDq5QNeadnSvpkZemItPO
+ lDf+7Wiue2gt776D5xkVyT7WkgTQv+IGWGtqz7pCCO2rMp/F9u1BghdjY46jtrK6
+ MMFKta4YENUhRliH6M2YmRjq5p7xZgH6UOnDlqsafbfyUx30t59tbQj+07aMnH5J
+ LMm37nVkDvo3wpPQPuo7L6qizYsrHrQKeJZ8636u41UjC99lVH7vXzqXw68FJImi
+ XdMZbEVBIprYfCDem+fD6gJBA4JBqWJMxuFMfhWp+1WtYoeNojDm4KxBzc2fvYV/
+ HOIUfLFBvACD/UwU5ovllHN39/O8SMgyLm9ymx2/qXcdIkUz4l7fhOCY1OW12DMu
+ 5OFrrTteGK/Sj4Z8pYRdMdaKyjIlxuVzEQGWsU5+J2ALao5atEHguqwlD3cKh3G8
+ 1sA/l5eTFDt84erYv1MVStV0BhZaCE4mNL4WpnQGDdW05yoGq9jIyLcurb/k/atU
+ TUkAF1csgNlJlR3IP+7U9xfHkjMO5+SV82xoNf9nBjz06TRdnvOSKsMNKp0RxC/L
+ Hbiee9o7Rxqdiyv0ly6bCCymwfvlsEIqo3YKssBfe3XI5yQI2hF9QZaH1ywzmgaH
+ o+rbME/XxddRJueS79eipT7K05Z3ulSHTXzpDw+jZcdUV0Ac72Q9FTDPMl3xc6NW
+ DrYwWw/3+kyZ4SkP56l7KlGczTyNPvU9iou4Cj/cAZk/pHx68Chq8ZZNznFm/bIF
+ gWt3fqE/n+y78B6MI8qTjGJOR0jycxrLH82Z2F+FpMShI2C5NnOa/Ilkv3e2Q5U6
+ MOAwaCIz6RHhcI99O/yta2vLelWZqn2g86rLzTG0HlIABTCPYotwetHh0hsrkSv9
+ Kh6rOzGB4i8lRqcBVY+alMSiRBlzkwpL4YUcO6f3vEDncQ9evE1AQCpD4jUJjB1H
+ JSSJAmwEGAEIACAWIQTqP4uIlyqP1HSHLy8RWzrxqtPtugUCWc2B4AIbAgFACRAR
+ WzrxqtPtusB0IAQZAQgAHRYhBAUi3Sm5jxZ82EIXUuOP/K91q9kqBQJZzYHgAAoJ
+ EOOP/K91q9kqlHcH+wbvD14ITYDMfgIfy67O4Qcmgf1qzGXhpsABz/i/EPgRD990
+ eNBI0YGuvoKRJfetEGn7LerrrCB8Z+ICFPHFrzXoe10zm+QTREck0OB8nPFRycJ+
+ Fbl6JX+cnkEx27Mmg1aVb7+H5LMDaWO1KjLsg2wIDo/jrDfW7NoZzy4XTd7jFCOt
+ 4fftL/dhiujQmhLzugZXCxRISOVdmgilDJQoTz1sEm34ida98JFjdzSgkUvJ/pFT
+ Z21ThCNxlUf01Hr2Pdcg1e2/97sZocMFTY2JKwmiW2LG3B05/VrRtdvsCVj8G49c
+ oxgPPKty+m71ovAXdS/CvVqE7TefCplsYJ1dV3abwwf/Xl2SxzbAKbrYMgZfdGzp
+ Pg2u6982WvfGIVfjFJh9veHZAbfkPcjdAD2Xe67Y4BeKG2OQQqeOY2y+81A7Paeh
+ gHzbFHJG/4RjqB66efrZAg4DgIdbr4oyMoUJVVsl0wfYSIvnd4kvWXYICVwk53HL
+ A3wIowZAsJ1LT2byAKbUzayLzTekrTzGcwQkg2XT798ev2QuR5Ki5x8MULBFX4Lh
+ d03+uGOxjhNPQD6DAAHCQLaXQhaGuyMgt5hDt0nF3yuw3Eg4Ygcbtm24rZXOHJc9
+ bDKeRiWZz1dIyYYVJmHSQwOVXkAwJlF1sIgy/jQYeOgFDKq20x86WulkvsUtBoaZ
+ Jg==
+ =TKlF
+ -----END PGP PRIVATE KEY BLOCK-----
+ SECRET
+ end
+
+ def fingerprint
+ 'EA3F8B88972A8FD474872F2F115B3AF1AAD3EDBA'
+ end
+
+ def subkey_fingerprints
+ %w(159AD5DDF199591D67D2B87AA3CEC5C0A7C270EC 0522DD29B98F167CD8421752E38FFCAF75ABD92A)
+ end
+
+ def names
+ ['John Doe']
+ end
+
+ def emails
+ ['john.doe@example.com']
+ end
+ end
+
+ # GPG Key containing just the main key
+ module User4
+ extend self
+
+ def public_key
+ <<~KEY.strip
+ -----BEGIN PGP PUBLIC KEY BLOCK-----
+
+ mQENBFnWcesBCAC6Y8FXl9ZJ9HPa6dIYcgQrvjIQcwoQCUEsaXNRpc+206RPCIXK
+ aIYr0nTD8GeovMuUONXTj+DdueQU2GAAqHHOqvDDVXqRrW3xfWnSwix7sTuhG1Ew
+ PLHYmjLENqaTsdyliEo3N8VWy2k0QRbC3R6xvop4Ooa87D5vcATIl0gYFtSiHIL+
+ TervYvTG9Eq1qSLZHbe2x4IzeqX2luikPKokL7j8FTZaCmC5MezIUur1ulfyYY/j
+ SkST/1aUFc5QXJJSZA0MYJWZX6x7Y3l7yl0dkHqmK8OTuo8RPWd3ybEiuvRsOL8K
+ GAv/PmVJRGDAf7GGbwXXsE9MiZ5GzVPxHnexABEBAAG0G0pvaG4gRG9lIDxqb2hu
+ QGV4YW1wbGUuY29tPokBTgQTAQgAOBYhBAh0izYM0lwuzJnVlAcBbPnhOj+bBQJZ
+ 1nHrAhsDBQsJCAcCBhUICQoLAgQWAgMBAh4BAheAAAoJEAcBbPnhOj+bkywH/i4w
+ OwpDxoTjUQlPlqGAGuzvWaPzSJndawgmMTr68oRsD+wlQmQQTR5eqxCpUIyV4aYb
+ D697RYzoqbT4mlU49ymzfKSAxFe88r1XQWdm81DcofHVPmw2GBrIqaX3Du4Z7xkI
+ Q9/S43orwknh5FoVwU8Nau7qBuv9vbw2apSkuA1oBj3spQ8hqwLavACyQ+fQloAT
+ hSDNqPiCZj6L0dwM1HYiqVoN3Q7qjgzzeBzlXzljJoWblhxllvMK20bVoa7H+uR2
+ lczFHfsX8VTIMjyTGP7R3oHN91DEahlQybVVNLmNSDKZM2P/0d28BRUmWxQJ4Ws3
+ J4hOWDKnLMed3VOIWzM=
+ =xVuW
+ -----END PGP PUBLIC KEY BLOCK-----
+ KEY
+ end
+
+ def secret_key
+ <<~KEY.strip
+ -----BEGIN PGP PRIVATE KEY BLOCK-----
+
+ lQPGBFnWcesBCAC6Y8FXl9ZJ9HPa6dIYcgQrvjIQcwoQCUEsaXNRpc+206RPCIXK
+ aIYr0nTD8GeovMuUONXTj+DdueQU2GAAqHHOqvDDVXqRrW3xfWnSwix7sTuhG1Ew
+ PLHYmjLENqaTsdyliEo3N8VWy2k0QRbC3R6xvop4Ooa87D5vcATIl0gYFtSiHIL+
+ TervYvTG9Eq1qSLZHbe2x4IzeqX2luikPKokL7j8FTZaCmC5MezIUur1ulfyYY/j
+ SkST/1aUFc5QXJJSZA0MYJWZX6x7Y3l7yl0dkHqmK8OTuo8RPWd3ybEiuvRsOL8K
+ GAv/PmVJRGDAf7GGbwXXsE9MiZ5GzVPxHnexABEBAAH+BwMC4UwgHgH5Cp7meY39
+ G5Q3GV2xtwADoaAvlOvPOLPK2fQqxQfb4WN4eZECp2wQuMRBMj52c4i9yphab1mQ
+ vOzoPIRGvkcJoxG++OxQ0kRk0C0gX6wM6SGVdb1nQnfZnoJCCU3IwCaSGktkLDs1
+ jwdI+VmXJbSugUbd25bakHQcE2BaNHuRBlQWQfFbhGBy0+uMfNDBZ6FRipBu47hO
+ f/wm/xXuV8N8BSgvNR/qtAqSQI34CdsnWAhMYm9rqmTNyt0nq4dveX+E0YzVn4lH
+ lOEa7cpYeuBwIL8L3EvSPNCICiJlF3gVqiYzyqRElnCkv1OGc0x3W5onY/agHgGZ
+ KYyi/ubOdqqDgBR+eMt0JKSGH2EPxUAGFPY5F37u4erdxH86GzIinAExLSmADiVR
+ KtxluZP6S2KLbETN5uVbrfa+HVcMbbUZaBHHtL+YbY8PqaFUIvIUR1HM2SK7IrFw
+ KuQ8ibRgooyP7VgMNiPzlFpY4NXUv+FXIrNJ6ELuIaENi0izJ7aIbVBM8SijDz6u
+ 5EEmodnDvmU2hmQNZJ17TxggE7oeT0rKdDGHM5zBvqZ3deqE9sgKx/aTKcj61ID3
+ M80ZkHPDFazUCohLpYgFN20bYYSmxU4LeNFy8YEiuic8QQKaAFxSf9Lf87UFQwyF
+ dduI1RWEbjMsbEJXwlmGM02ssQHsgoVKwZxijq5A5R1Ul6LowazQ8obPiwRS4NZ4
+ Z+QKDon79MMXiFEeh1jeG/MKKWPxFg3pdtCWhC7WdH4hfkBsCVKf+T58yB2Gzziy
+ fOHvAl7v3PtdZgf1xikF8spGYGCWo4B2lxC79xIflKAb2U6myb5I4dpUYxzxoMxT
+ zxHwxEie3NxzZGUyXSt3LqYe2r4CxWnOCXWjIxxRlLue1BE5Za1ycnDRjgUO24+Z
+ uDQne6KLkhAotBtKb2huIERvZSA8am9obkBleGFtcGxlLmNvbT6JAU4EEwEIADgW
+ IQQIdIs2DNJcLsyZ1ZQHAWz54To/mwUCWdZx6wIbAwULCQgHAgYVCAkKCwIEFgID
+ AQIeAQIXgAAKCRAHAWz54To/m5MsB/4uMDsKQ8aE41EJT5ahgBrs71mj80iZ3WsI
+ JjE6+vKEbA/sJUJkEE0eXqsQqVCMleGmGw+ve0WM6Km0+JpVOPcps3ykgMRXvPK9
+ V0FnZvNQ3KHx1T5sNhgayKml9w7uGe8ZCEPf0uN6K8JJ4eRaFcFPDWru6gbr/b28
+ NmqUpLgNaAY97KUPIasC2rwAskPn0JaAE4Ugzaj4gmY+i9HcDNR2IqlaDd0O6o4M
+ 83gc5V85YyaFm5YcZZbzCttG1aGux/rkdpXMxR37F/FUyDI8kxj+0d6BzfdQxGoZ
+ UMm1VTS5jUgymTNj/9HdvAUVJlsUCeFrNyeITlgypyzHnd1TiFsz
+ =/37z
+ -----END PGP PRIVATE KEY BLOCK-----
+ KEY
+ end
+
+ def primary_keyid
+ fingerprint[-16..-1]
+ end
+
+ def fingerprint
+ '08748B360CD25C2ECC99D59407016CF9E13A3F9B'
+ end
+ end
end