Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--app/assets/javascripts/graphql_shared/possible_types.json1
-rw-r--r--app/graphql/resolvers/base_resolver.rb9
-rw-r--r--app/graphql/resolvers/work_items/work_item_discussions_resolver.rb65
-rw-r--r--app/graphql/types/base_field.rb7
-rw-r--r--app/graphql/types/notes/note_type.rb8
-rw-r--r--app/graphql/types/work_items/notes_filter_type_enum.rb20
-rw-r--r--app/graphql/types/work_items/widget_interface.rb5
-rw-r--r--app/graphql/types/work_items/widgets/notes_type.rb26
-rw-r--r--app/helpers/web_hooks/web_hooks_helper.rb1
-rw-r--r--app/models/ml/candidate.rb1
-rw-r--r--app/models/ml/candidate_metadata.rb14
-rw-r--r--app/models/ml/experiment.rb1
-rw-r--r--app/models/ml/experiment_metadata.rb14
-rw-r--r--app/models/synthetic_note.rb1
-rw-r--r--app/models/work_items/type.rb55
-rw-r--r--app/models/work_items/widgets/notes.rb14
-rw-r--r--app/services/ml/experiment_tracking/candidate_repository.rb59
-rw-r--r--app/services/ml/experiment_tracking/experiment_repository.rb41
-rw-r--r--config/feature_flags/development/webhooks_failed_callout.yml8
-rw-r--r--db/docs/ml_candidate_metadata.yml11
-rw-r--r--db/docs/ml_experiment_metadata.yml11
-rw-r--r--db/docs/views/postgres_autovacuum_activity.yml10
-rw-r--r--db/docs/views/postgres_constraints.yml10
-rw-r--r--db/docs/views/postgres_foreign_keys.yml10
-rw-r--r--db/docs/views/postgres_index_bloat_estimates.yml10
-rw-r--r--db/docs/views/postgres_indexes.yml10
-rw-r--r--db/docs/views/postgres_partitioned_tables.yml10
-rw-r--r--db/docs/views/postgres_partitions.yml10
-rw-r--r--db/migrate/20221116160204_create_ml_experiment_metadata_and_ml_candidate_metadata.rb29
-rw-r--r--db/schema_migrations/202211161602041
-rw-r--r--db/structure.sql62
-rw-r--r--doc/administration/server_hooks.md19
-rw-r--r--doc/api/bulk_imports.md1
-rw-r--r--doc/api/graphql/reference/index.md40
-rw-r--r--doc/development/database/adding_database_indexes.md5
-rw-r--r--doc/development/database/database_dictionary.md33
-rw-r--r--doc/development/migration_style_guide.md4
-rw-r--r--doc/development/testing_guide/frontend_testing.md18
-rw-r--r--doc/topics/offline/quick_start_guide.md10
-rw-r--r--doc/update/index.md17
-rw-r--r--doc/user/application_security/api_fuzzing/index.md59
-rw-r--r--doc/user/application_security/dast_api/index.md59
-rw-r--r--doc/user/gitlab_com/index.md2
-rw-r--r--doc/user/project/integrations/mlflow_client.md4
-rw-r--r--doc/user/project/integrations/webhooks.md9
-rw-r--r--doc/user/project/ml/experiment_tracking/index.md1
-rw-r--r--lib/api/api.rb2
-rw-r--r--lib/api/entities/ml/mlflow/experiment.rb1
-rw-r--r--lib/api/entities/ml/mlflow/key_value.rb (renamed from lib/api/entities/ml/mlflow/run_param.rb)2
-rw-r--r--lib/api/entities/ml/mlflow/run.rb3
-rw-r--r--lib/api/ml/mlflow.rb42
-rw-r--r--lib/api/usage_data.rb29
-rw-r--r--lib/gitlab/database/gitlab_schemas.yml2
-rw-r--r--lib/gitlab/graphql/extensions/forward_only_externally_paginated_array_extension.rb19
-rw-r--r--qa/Gemfile2
-rw-r--r--qa/Gemfile.lock9
-rw-r--r--spec/db/docs_spec.rb100
-rw-r--r--spec/factories/ml/candidate_metadata.rb10
-rw-r--r--spec/factories/ml/candidates.rb6
-rw-r--r--spec/factories/ml/experiment_metadata.rb10
-rw-r--r--spec/factories/ml/experiments.rb8
-rw-r--r--spec/factories/resource_milestone_events.rb (renamed from spec/factories/resource_milestone_event.rb)0
-rw-r--r--spec/factories/resource_state_events.rb (renamed from spec/factories/resource_state_event.rb)0
-rw-r--r--spec/features/reportable_note/issue_spec.rb2
-rw-r--r--spec/features/reportable_note/merge_request_spec.rb2
-rw-r--r--spec/features/reportable_note/snippets_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_comments_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_commits_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_milestones_spec.rb3
-rw-r--r--spec/features/search/user_searches_for_projects_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_users_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_wiki_pages_spec.rb3
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb2
-rw-r--r--spec/features/search/user_uses_search_filters_spec.rb2
-rw-r--r--spec/fixtures/api/schemas/ml/get_experiment.json18
-rw-r--r--spec/graphql/types/work_items/notes_filter_type_enum_spec.rb13
-rw-r--r--spec/graphql/types/work_items/widget_interface_spec.rb1
-rw-r--r--spec/graphql/types/work_items/widgets/notes_type_spec.rb11
-rw-r--r--spec/helpers/web_hooks/web_hooks_helper_spec.rb26
-rw-r--r--spec/models/milestone_note_spec.rb2
-rw-r--r--spec/models/ml/candidate_metadata_spec.rb20
-rw-r--r--spec/models/ml/candidate_spec.rb1
-rw-r--r--spec/models/ml/experiment_metadata_spec.rb20
-rw-r--r--spec/models/ml/experiment_spec.rb1
-rw-r--r--spec/models/state_note_spec.rb7
-rw-r--r--spec/models/work_items/type_spec.rb3
-rw-r--r--spec/models/work_items/widgets/notes_spec.rb20
-rw-r--r--spec/requests/api/graphql/project/work_items_spec.rb76
-rw-r--r--spec/requests/api/ml/mlflow_spec.rb133
-rw-r--r--spec/services/ml/experiment_tracking/candidate_repository_spec.rb78
-rw-r--r--spec/services/ml/experiment_tracking/experiment_repository_spec.rb39
-rw-r--r--spec/support/shared_examples/models/label_note_shared_examples.rb2
95 files changed, 1304 insertions, 257 deletions
diff --git a/app/assets/javascripts/graphql_shared/possible_types.json b/app/assets/javascripts/graphql_shared/possible_types.json
index 2fb26366575..ff73dd78490 100644
--- a/app/assets/javascripts/graphql_shared/possible_types.json
+++ b/app/assets/javascripts/graphql_shared/possible_types.json
@@ -149,6 +149,7 @@
"WorkItemWidgetIteration",
"WorkItemWidgetLabels",
"WorkItemWidgetMilestone",
+ "WorkItemWidgetNotes",
"WorkItemWidgetStartAndDueDate",
"WorkItemWidgetStatus",
"WorkItemWidgetWeight"
diff --git a/app/graphql/resolvers/base_resolver.rb b/app/graphql/resolvers/base_resolver.rb
index 2b54a3fdd55..6f847221f1b 100644
--- a/app/graphql/resolvers/base_resolver.rb
+++ b/app/graphql/resolvers/base_resolver.rb
@@ -15,6 +15,13 @@ module Resolvers
@calls_gitaly = true
end
+ # This is a flag to allow us to use `complexity_multiplier` to compute complexity for connection
+ # fields(see BaseField#connection_complexity_multiplier) in resolvers that do external connection pagination,
+ # thus disabling the default `connection` option(see self.field_options method above).
+ def self.calculate_ext_conn_complexity
+ false
+ end
+
def self.field_options
extra_options = {
requires_argument: @requires_argument,
@@ -116,7 +123,7 @@ module Resolvers
# When fetching many items, additional complexity is added to the field
# depending on how many items is fetched. For each item we add 1% of the
# original complexity - this means that loading 100 items (our default
- # maxp_age_size limit) doubles the original complexity.
+ # max_page_size limit) doubles the original complexity.
#
# Complexity is not increased when searching by specific ID(s), because
# complexity difference is minimal in this case.
diff --git a/app/graphql/resolvers/work_items/work_item_discussions_resolver.rb b/app/graphql/resolvers/work_items/work_item_discussions_resolver.rb
new file mode 100644
index 00000000000..15f17ceb8c6
--- /dev/null
+++ b/app/graphql/resolvers/work_items/work_item_discussions_resolver.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module Resolvers
+ module WorkItems
+ class WorkItemDiscussionsResolver < BaseResolver
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+ extension Gitlab::Graphql::Extensions::ForwardOnlyExternallyPaginatedArrayExtension
+
+ authorize :read_work_item
+ authorizes_object!
+
+ alias_method :notes_widget, :object
+
+ argument :filter, Types::WorkItems::NotesFilterTypeEnum,
+ required: false,
+ default_value: Types::WorkItems::NotesFilterTypeEnum.default_value,
+ description: 'Type of notes collection: ALL_NOTES, ONLY_COMMENTS, ONLY_ACTIVITY.'
+
+ type Types::Notes::DiscussionType.connection_type, null: true
+
+ def resolve(**args)
+ finder = Issuable::DiscussionsListService.new(current_user, work_item, params(args))
+
+ Gitlab::Graphql::ExternallyPaginatedArray.new(
+ finder.paginator.cursor_for_previous_page,
+ finder.paginator.cursor_for_next_page,
+ *finder.execute
+ )
+ end
+
+ def self.field_options
+ # we manage the pagination manually through external array, so opt out of the connection field extension
+ super.merge(connection: false)
+ end
+
+ def self.calculate_ext_conn_complexity
+ true
+ end
+
+ def self.complexity_multiplier(args)
+ 0.05
+ end
+
+ private
+
+ def work_item
+ notes_widget.work_item
+ end
+ strong_memoize_attr :work_item
+
+ def params(args)
+ {
+ notes_filter: args[:filter],
+ cursor: args[:after],
+ per_page: self.class.nodes_limit(args, @field, context: context)
+ }
+ end
+
+ def self.nodes_limit(args, field, **kwargs)
+ page_size = field&.max_page_size || kwargs[:context]&.schema&.default_max_page_size
+ [args[:first], page_size].compact.min
+ end
+ end
+ end
+end
diff --git a/app/graphql/types/base_field.rb b/app/graphql/types/base_field.rb
index 36ba3399754..615c143a0b9 100644
--- a/app/graphql/types/base_field.rb
+++ b/app/graphql/types/base_field.rb
@@ -135,15 +135,16 @@ module Types
:resolver_complexity, args, child_complexity: child_complexity
).to_i
complexity += 1 if calls_gitaly?
- complexity += complexity * connection_complexity_multiplier(ctx, args)
+ ext_conn = resolver&.try(:calculate_ext_conn_complexity)
+ complexity += complexity * connection_complexity_multiplier(ctx, args, calculate_ext_conn_complexity: ext_conn)
complexity.to_i
end
end
- def connection_complexity_multiplier(ctx, args)
+ def connection_complexity_multiplier(ctx, args, calculate_ext_conn_complexity:)
# Resolvers may add extra complexity depending on number of items being loaded.
- return 0 unless connection?
+ return 0 if !connection? && !calculate_ext_conn_complexity
page_size = max_page_size || ctx.schema.default_max_page_size
limit_value = [args[:first], args[:last], page_size].compact.min
diff --git a/app/graphql/types/notes/note_type.rb b/app/graphql/types/notes/note_type.rb
index eef5ce40bde..8caaf9982cd 100644
--- a/app/graphql/types/notes/note_type.rb
+++ b/app/graphql/types/notes/note_type.rb
@@ -77,6 +77,14 @@ module Types
def author
Gitlab::Graphql::Loaders::BatchModelLoader.new(User, object.author_id).find
end
+
+ # We now support also SyntheticNote notes as a NoteType, but SyntheticNote does not have a real note ID,
+ # as SyntheticNote is generated dynamically from a ResourceEvent instance.
+ def id
+ return super unless object.is_a?(SyntheticNote)
+
+ ::Gitlab::GlobalId.build(object, model_name: object.class.to_s, id: "not-persisted")
+ end
end
end
end
diff --git a/app/graphql/types/work_items/notes_filter_type_enum.rb b/app/graphql/types/work_items/notes_filter_type_enum.rb
new file mode 100644
index 00000000000..93fb4689f0b
--- /dev/null
+++ b/app/graphql/types/work_items/notes_filter_type_enum.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module Types
+ module WorkItems
+ class NotesFilterTypeEnum < BaseEnum
+ graphql_name 'NotesFilterType'
+ description 'Work item notes collection type.'
+
+ ::UserPreference::NOTES_FILTERS.each_pair do |key, value|
+ value key.upcase,
+ value: value,
+ description: UserPreference.notes_filters.invert[::UserPreference::NOTES_FILTERS[key]]
+ end
+
+ def self.default_value
+ ::UserPreference::NOTES_FILTERS[:all_notes]
+ end
+ end
+ end
+end
diff --git a/app/graphql/types/work_items/widget_interface.rb b/app/graphql/types/work_items/widget_interface.rb
index b85d0a23535..672a78f12e1 100644
--- a/app/graphql/types/work_items/widget_interface.rb
+++ b/app/graphql/types/work_items/widget_interface.rb
@@ -17,7 +17,8 @@ module Types
::Types::WorkItems::Widgets::LabelsType,
::Types::WorkItems::Widgets::AssigneesType,
::Types::WorkItems::Widgets::StartAndDueDateType,
- ::Types::WorkItems::Widgets::MilestoneType
+ ::Types::WorkItems::Widgets::MilestoneType,
+ ::Types::WorkItems::Widgets::NotesType
].freeze
def self.ce_orphan_types
@@ -41,6 +42,8 @@ module Types
::Types::WorkItems::Widgets::StartAndDueDateType
when ::WorkItems::Widgets::Milestone
::Types::WorkItems::Widgets::MilestoneType
+ when ::WorkItems::Widgets::Notes
+ ::Types::WorkItems::Widgets::NotesType
else
raise "Unknown GraphQL type for widget #{object}"
end
diff --git a/app/graphql/types/work_items/widgets/notes_type.rb b/app/graphql/types/work_items/widgets/notes_type.rb
new file mode 100644
index 00000000000..7da2777beee
--- /dev/null
+++ b/app/graphql/types/work_items/widgets/notes_type.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module Types
+ module WorkItems
+ module Widgets
+ # Disabling widget level authorization as it might be too granular
+ # and we already authorize the parent work item
+ # rubocop:disable Graphql/AuthorizeTypes
+ class NotesType < BaseObject
+ graphql_name 'WorkItemWidgetNotes'
+ description 'Represents a notes widget'
+
+ implements Types::WorkItems::WidgetInterface
+
+ # This field loads user comments, system notes and resource events as a discussion for an work item,
+ # raising the complexity considerably. In order to discourage fetching this field as part of fetching
+ # a list of issues we raise the complexity
+ field :discussions, Types::Notes::DiscussionType.connection_type,
+ null: true,
+ description: "Notes on this work item.",
+ resolver: Resolvers::WorkItems::WorkItemDiscussionsResolver
+ end
+ # rubocop:enable Graphql/AuthorizeTypes
+ end
+ end
+end
diff --git a/app/helpers/web_hooks/web_hooks_helper.rb b/app/helpers/web_hooks/web_hooks_helper.rb
index 2d74b008e10..bda9bf58fb7 100644
--- a/app/helpers/web_hooks/web_hooks_helper.rb
+++ b/app/helpers/web_hooks/web_hooks_helper.rb
@@ -7,7 +7,6 @@ module WebHooks
def show_project_hook_failed_callout?(project:)
return false if project_hook_page?
return false unless current_user
- return false unless Feature.enabled?(:webhooks_failed_callout, project)
return false unless Ability.allowed?(current_user, :read_web_hooks, project)
# Assumes include of Users::CalloutsHelper
diff --git a/app/models/ml/candidate.rb b/app/models/ml/candidate.rb
index f7da4418624..56b468f8286 100644
--- a/app/models/ml/candidate.rb
+++ b/app/models/ml/candidate.rb
@@ -11,6 +11,7 @@ module Ml
belongs_to :user
has_many :metrics, class_name: 'Ml::CandidateMetric'
has_many :params, class_name: 'Ml::CandidateParam'
+ has_many :metadata, class_name: 'Ml::CandidateMetadata'
has_many :latest_metrics, -> { latest }, class_name: 'Ml::CandidateMetric', inverse_of: :candidate
attribute :iid, default: -> { SecureRandom.uuid }
diff --git a/app/models/ml/candidate_metadata.rb b/app/models/ml/candidate_metadata.rb
new file mode 100644
index 00000000000..06b893c211f
--- /dev/null
+++ b/app/models/ml/candidate_metadata.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Ml
+ class CandidateMetadata < ApplicationRecord
+ validates :candidate, presence: true
+ validates :name,
+ length: { maximum: 250 },
+ presence: true,
+ uniqueness: { scope: :candidate, message: ->(candidate, _) { "'#{candidate.name}' already taken" } }
+ validates :value, length: { maximum: 5000 }, presence: true
+
+ belongs_to :candidate, class_name: 'Ml::Candidate'
+ end
+end
diff --git a/app/models/ml/experiment.rb b/app/models/ml/experiment.rb
index 05b238b960d..0a326b0e005 100644
--- a/app/models/ml/experiment.rb
+++ b/app/models/ml/experiment.rb
@@ -10,6 +10,7 @@ module Ml
belongs_to :project
belongs_to :user
has_many :candidates, class_name: 'Ml::Candidate'
+ has_many :metadata, class_name: 'Ml::ExperimentMetadata'
has_internal_id :iid, scope: :project
diff --git a/app/models/ml/experiment_metadata.rb b/app/models/ml/experiment_metadata.rb
new file mode 100644
index 00000000000..93496807e1a
--- /dev/null
+++ b/app/models/ml/experiment_metadata.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Ml
+ class ExperimentMetadata < ApplicationRecord
+ validates :experiment, presence: true
+ validates :name,
+ length: { maximum: 250 },
+ presence: true,
+ uniqueness: { scope: :experiment, message: ->(exp, _) { "'#{exp.name}' already taken" } }
+ validates :value, length: { maximum: 5000 }, presence: true
+
+ belongs_to :experiment, class_name: 'Ml::Experiment'
+ end
+end
diff --git a/app/models/synthetic_note.rb b/app/models/synthetic_note.rb
index dea7165af9f..a60c0d2f3bc 100644
--- a/app/models/synthetic_note.rb
+++ b/app/models/synthetic_note.rb
@@ -10,6 +10,7 @@ class SyntheticNote < Note
system: true,
author: event.user,
created_at: event.created_at,
+ updated_at: event.created_at,
discussion_id: event.discussion_id,
noteable: resource,
event: event,
diff --git a/app/models/work_items/type.rb b/app/models/work_items/type.rb
index 51ec5676ae6..9d2f3b552e6 100644
--- a/app/models/work_items/type.rb
+++ b/app/models/work_items/type.rb
@@ -36,15 +36,52 @@ module WorkItems
}.freeze
WIDGETS_FOR_TYPE = {
- issue: [Widgets::Assignees, Widgets::Labels, Widgets::Description, Widgets::Hierarchy, Widgets::StartAndDueDate,
- Widgets::Milestone],
- incident: [Widgets::Description, Widgets::Hierarchy],
- test_case: [Widgets::Description],
- requirement: [Widgets::Description],
- task: [Widgets::Assignees, Widgets::Labels, Widgets::Description, Widgets::Hierarchy, Widgets::StartAndDueDate,
- Widgets::Milestone],
- objective: [Widgets::Assignees, Widgets::Labels, Widgets::Description, Widgets::Hierarchy, Widgets::Milestone],
- key_result: [Widgets::Assignees, Widgets::Labels, Widgets::Description, Widgets::StartAndDueDate]
+ issue: [
+ Widgets::Assignees,
+ Widgets::Labels,
+ Widgets::Description,
+ Widgets::Hierarchy,
+ Widgets::StartAndDueDate,
+ Widgets::Milestone,
+ Widgets::Notes
+ ],
+ incident: [
+ Widgets::Description,
+ Widgets::Hierarchy,
+ Widgets::Notes
+ ],
+ test_case: [
+ Widgets::Description,
+ Widgets::Notes
+ ],
+ requirement: [
+ Widgets::Description,
+ Widgets::Notes
+ ],
+ task: [
+ Widgets::Assignees,
+ Widgets::Labels,
+ Widgets::Description,
+ Widgets::Hierarchy,
+ Widgets::StartAndDueDate,
+ Widgets::Milestone,
+ Widgets::Notes
+ ],
+ objective: [
+ Widgets::Assignees,
+ Widgets::Labels,
+ Widgets::Description,
+ Widgets::Hierarchy,
+ Widgets::Milestone,
+ Widgets::Notes
+ ],
+ key_result: [
+ Widgets::Assignees,
+ Widgets::Labels,
+ Widgets::Description,
+ Widgets::StartAndDueDate,
+ Widgets::Notes
+ ]
}.freeze
WI_TYPES_WITH_CREATED_HEADER = %w[issue incident].freeze
diff --git a/app/models/work_items/widgets/notes.rb b/app/models/work_items/widgets/notes.rb
new file mode 100644
index 00000000000..bde94ea8f43
--- /dev/null
+++ b/app/models/work_items/widgets/notes.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module WorkItems
+ module Widgets
+ class Notes < Base
+ delegate :notes, to: :work_item
+ delegate_missing_to :work_item
+
+ def declarative_policy_delegate
+ work_item
+ end
+ end
+ end
+end
diff --git a/app/services/ml/experiment_tracking/candidate_repository.rb b/app/services/ml/experiment_tracking/candidate_repository.rb
index b6f87995185..1dbeb30145b 100644
--- a/app/services/ml/experiment_tracking/candidate_repository.rb
+++ b/app/services/ml/experiment_tracking/candidate_repository.rb
@@ -14,11 +14,15 @@ module Ml
::Ml::Candidate.with_project_id_and_iid(project.id, iid)
end
- def create!(experiment, start_time)
- experiment.candidates.create!(
+ def create!(experiment, start_time, tags = nil)
+ candidate = experiment.candidates.create!(
user: user,
start_time: start_time || 0
)
+
+ add_tags(candidate, tags)
+
+ candidate
end
def update(candidate, status, end_time)
@@ -41,36 +45,21 @@ module Ml
candidate.params.create!(name: name, value: value)
end
- def add_metrics(candidate, metric_definitions)
- return unless candidate.present?
-
- metrics = metric_definitions.map do |metric|
- {
- candidate_id: candidate.id,
- name: metric[:key],
- value: metric[:value],
- tracked_at: metric[:timestamp],
- step: metric[:step],
- **timestamps
- }
- end
+ def add_tag!(candidate, name, value)
+ candidate.metadata.create!(name: name, value: value)
+ end
- ::Ml::CandidateMetric.insert_all(metrics, returning: false) unless metrics.empty?
+ def add_metrics(candidate, metric_definitions)
+ extra_keys = { tracked_at: :timestamp, step: :step }
+ insert_many(candidate, metric_definitions, ::Ml::CandidateMetric, extra_keys)
end
def add_params(candidate, param_definitions)
- return unless candidate.present?
-
- parameters = param_definitions.map do |p|
- {
- candidate_id: candidate.id,
- name: p[:key],
- value: p[:value],
- **timestamps
- }
- end
+ insert_many(candidate, param_definitions, ::Ml::CandidateParam)
+ end
- ::Ml::CandidateParam.insert_all(parameters, returning: false) unless parameters.empty?
+ def add_tags(candidate, tag_definitions)
+ insert_many(candidate, tag_definitions, ::Ml::CandidateMetadata)
end
private
@@ -80,6 +69,22 @@ module Ml
{ created_at: current_time, updated_at: current_time }
end
+
+ def insert_many(candidate, definitions, entity_class, extra_keys = {})
+ return unless candidate.present? && definitions.present?
+
+ entities = definitions.map do |d|
+ {
+ candidate_id: candidate.id,
+ name: d[:key],
+ value: d[:value],
+ **extra_keys.transform_values { |old_key| d[old_key] },
+ **timestamps
+ }
+ end
+
+ entity_class.insert_all(entities, returning: false) unless entities.empty?
+ end
end
end
end
diff --git a/app/services/ml/experiment_tracking/experiment_repository.rb b/app/services/ml/experiment_tracking/experiment_repository.rb
index 891674adc2a..90f4cf1abec 100644
--- a/app/services/ml/experiment_tracking/experiment_repository.rb
+++ b/app/services/ml/experiment_tracking/experiment_repository.rb
@@ -20,10 +20,43 @@ module Ml
::Ml::Experiment.by_project_id(project.id)
end
- def create!(name)
- ::Ml::Experiment.create!(name: name,
- user: user,
- project: project)
+ def create!(name, tags = nil)
+ experiment = ::Ml::Experiment.create!(name: name,
+ user: user,
+ project: project)
+
+ add_tags(experiment, tags)
+
+ experiment
+ end
+
+ def add_tag!(experiment, key, value)
+ return unless experiment.present?
+
+ experiment.metadata.create!(name: key, value: value)
+ end
+
+ private
+
+ def timestamps
+ current_time = Time.zone.now
+
+ { created_at: current_time, updated_at: current_time }
+ end
+
+ def add_tags(experiment, tag_definitions)
+ return unless experiment.present? && tag_definitions.present?
+
+ entities = tag_definitions.map do |d|
+ {
+ experiment_id: experiment.id,
+ name: d[:key],
+ value: d[:value],
+ **timestamps
+ }
+ end
+
+ ::Ml::ExperimentMetadata.insert_all(entities, returning: false) unless entities.empty?
end
end
end
diff --git a/config/feature_flags/development/webhooks_failed_callout.yml b/config/feature_flags/development/webhooks_failed_callout.yml
deleted file mode 100644
index 11de5a793f6..00000000000
--- a/config/feature_flags/development/webhooks_failed_callout.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: webhooks_failed_callout
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/91092
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/365535
-milestone: '15.2'
-type: development
-group: group::integrations
-default_enabled: false
diff --git a/db/docs/ml_candidate_metadata.yml b/db/docs/ml_candidate_metadata.yml
new file mode 100644
index 00000000000..485544f0f59
--- /dev/null
+++ b/db/docs/ml_candidate_metadata.yml
@@ -0,0 +1,11 @@
+---
+table_name: ml_candidate_metadata
+classes:
+ - Ml::CandidateMetadata
+feature_categories:
+ - mlops
+ - incubation
+gitlab_schema: gitlab_main
+description: A Candidate Metadata record holds extra information about the candidate
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104267
+milestone: '15.7'
diff --git a/db/docs/ml_experiment_metadata.yml b/db/docs/ml_experiment_metadata.yml
new file mode 100644
index 00000000000..a77781cb601
--- /dev/null
+++ b/db/docs/ml_experiment_metadata.yml
@@ -0,0 +1,11 @@
+---
+table_name: ml_experiment_metadata
+classes:
+ - Ml::ExperimentMetadata
+feature_categories:
+ - mlops
+ - incubation
+gitlab_schema: gitlab_main
+description: An Experiment Metadata record holds extra information about the experiment
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104267
+milestone: '15.7'
diff --git a/db/docs/views/postgres_autovacuum_activity.yml b/db/docs/views/postgres_autovacuum_activity.yml
new file mode 100644
index 00000000000..68878ceaaef
--- /dev/null
+++ b/db/docs/views/postgres_autovacuum_activity.yml
@@ -0,0 +1,10 @@
+---
+view_name: postgres_autovacuum_activity
+description: TODO
+classes:
+- Gitlab::Database::PostgresAutovacuumActivity
+feature_categories:
+- database
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/85103
+milestone: '15.0'
+gitlab_schema: gitlab_shared
diff --git a/db/docs/views/postgres_constraints.yml b/db/docs/views/postgres_constraints.yml
new file mode 100644
index 00000000000..133b4430ef2
--- /dev/null
+++ b/db/docs/views/postgres_constraints.yml
@@ -0,0 +1,10 @@
+---
+view_name: postgres_constraints
+description: TODO
+classes:
+- Gitlab::Database::PostgresConstraint
+feature_categories:
+- database
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96815
+milestone: '15.4'
+gitlab_schema: gitlab_shared
diff --git a/db/docs/views/postgres_foreign_keys.yml b/db/docs/views/postgres_foreign_keys.yml
new file mode 100644
index 00000000000..0124d854681
--- /dev/null
+++ b/db/docs/views/postgres_foreign_keys.yml
@@ -0,0 +1,10 @@
+---
+view_name: postgres_foreign_keys
+description: TODO
+classes:
+- Gitlab::Database::PostgresForeignKey
+feature_categories:
+- database
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66473
+milestone: '14.2'
+gitlab_schema: gitlab_shared
diff --git a/db/docs/views/postgres_index_bloat_estimates.yml b/db/docs/views/postgres_index_bloat_estimates.yml
new file mode 100644
index 00000000000..ac3fc462b58
--- /dev/null
+++ b/db/docs/views/postgres_index_bloat_estimates.yml
@@ -0,0 +1,10 @@
+---
+view_name: postgres_index_bloat_estimates
+description: TODO
+classes:
+- Gitlab::Database::PostgresIndexBloatEstimate
+feature_categories:
+- database
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/48698
+milestone: '13.7'
+gitlab_schema: gitlab_shared
diff --git a/db/docs/views/postgres_indexes.yml b/db/docs/views/postgres_indexes.yml
new file mode 100644
index 00000000000..b6c7a399216
--- /dev/null
+++ b/db/docs/views/postgres_indexes.yml
@@ -0,0 +1,10 @@
+---
+view_name: postgres_indexes
+description: TODO
+classes:
+- Gitlab::Database::PostgresIndex
+feature_categories:
+- database
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/42967
+milestone: '13.5'
+gitlab_schema: gitlab_shared
diff --git a/db/docs/views/postgres_partitioned_tables.yml b/db/docs/views/postgres_partitioned_tables.yml
new file mode 100644
index 00000000000..ddec7550e80
--- /dev/null
+++ b/db/docs/views/postgres_partitioned_tables.yml
@@ -0,0 +1,10 @@
+---
+view_name: postgres_partitioned_tables
+description: TODO
+classes:
+- Gitlab::Database::PostgresPartitionedTables
+feature_categories:
+- database
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45591
+milestone: '13.6'
+gitlab_schema: gitlab_shared
diff --git a/db/docs/views/postgres_partitions.yml b/db/docs/views/postgres_partitions.yml
new file mode 100644
index 00000000000..4cb72f71956
--- /dev/null
+++ b/db/docs/views/postgres_partitions.yml
@@ -0,0 +1,10 @@
+---
+view_name: postgres_partitions
+description: TODO
+classes:
+- Gitlab::Database::PostgresPartition
+feature_categories:
+- database
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45592
+milestone: '13.6'
+gitlab_schema: gitlab_shared
diff --git a/db/migrate/20221116160204_create_ml_experiment_metadata_and_ml_candidate_metadata.rb b/db/migrate/20221116160204_create_ml_experiment_metadata_and_ml_candidate_metadata.rb
new file mode 100644
index 00000000000..288d1e4be98
--- /dev/null
+++ b/db/migrate/20221116160204_create_ml_experiment_metadata_and_ml_candidate_metadata.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+class CreateMlExperimentMetadataAndMlCandidateMetadata < Gitlab::Database::Migration[2.0]
+ def change
+ create_table :ml_experiment_metadata do |t|
+ t.timestamps_with_timezone null: false
+ t.references :experiment,
+ foreign_key: { to_table: :ml_experiments, on_delete: :cascade },
+ index: false,
+ null: false
+ t.text :name, limit: 255, null: false
+ t.text :value, limit: 5000, null: false
+
+ t.index [:experiment_id, :name], unique: true
+ end
+
+ create_table :ml_candidate_metadata do |t|
+ t.timestamps_with_timezone null: false
+ t.references :candidate,
+ foreign_key: { to_table: :ml_candidates, on_delete: :cascade },
+ index: false,
+ null: false
+ t.text :name, limit: 255, null: false, index: true
+ t.text :value, limit: 5000, null: false
+
+ t.index [:candidate_id, :name], unique: true
+ end
+ end
+end
diff --git a/db/schema_migrations/20221116160204 b/db/schema_migrations/20221116160204
new file mode 100644
index 00000000000..3b697bb9108
--- /dev/null
+++ b/db/schema_migrations/20221116160204
@@ -0,0 +1 @@
+09e6935b54925d65dfe11c5aaf7c2b711fee204b817cdaddd6fb4066206721d6 \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index ab7b632b04a..8aff1de1b0f 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -17894,6 +17894,26 @@ CREATE SEQUENCE milestones_id_seq
ALTER SEQUENCE milestones_id_seq OWNED BY milestones.id;
+CREATE TABLE ml_candidate_metadata (
+ id bigint NOT NULL,
+ created_at timestamp with time zone NOT NULL,
+ updated_at timestamp with time zone NOT NULL,
+ candidate_id bigint NOT NULL,
+ name text NOT NULL,
+ value text NOT NULL,
+ CONSTRAINT check_6b38a286a5 CHECK ((char_length(name) <= 255)),
+ CONSTRAINT check_9453f4a8e9 CHECK ((char_length(value) <= 5000))
+);
+
+CREATE SEQUENCE ml_candidate_metadata_id_seq
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+ALTER SEQUENCE ml_candidate_metadata_id_seq OWNED BY ml_candidate_metadata.id;
+
CREATE TABLE ml_candidate_metrics (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@@ -17957,6 +17977,26 @@ CREATE SEQUENCE ml_candidates_id_seq
ALTER SEQUENCE ml_candidates_id_seq OWNED BY ml_candidates.id;
+CREATE TABLE ml_experiment_metadata (
+ id bigint NOT NULL,
+ created_at timestamp with time zone NOT NULL,
+ updated_at timestamp with time zone NOT NULL,
+ experiment_id bigint NOT NULL,
+ name text NOT NULL,
+ value text NOT NULL,
+ CONSTRAINT check_112fe5002d CHECK ((char_length(name) <= 255)),
+ CONSTRAINT check_a91c633d68 CHECK ((char_length(value) <= 5000))
+);
+
+CREATE SEQUENCE ml_experiment_metadata_id_seq
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+ALTER SEQUENCE ml_experiment_metadata_id_seq OWNED BY ml_experiment_metadata.id;
+
CREATE TABLE ml_experiments (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@@ -24138,12 +24178,16 @@ ALTER TABLE ONLY metrics_users_starred_dashboards ALTER COLUMN id SET DEFAULT ne
ALTER TABLE ONLY milestones ALTER COLUMN id SET DEFAULT nextval('milestones_id_seq'::regclass);
+ALTER TABLE ONLY ml_candidate_metadata ALTER COLUMN id SET DEFAULT nextval('ml_candidate_metadata_id_seq'::regclass);
+
ALTER TABLE ONLY ml_candidate_metrics ALTER COLUMN id SET DEFAULT nextval('ml_candidate_metrics_id_seq'::regclass);
ALTER TABLE ONLY ml_candidate_params ALTER COLUMN id SET DEFAULT nextval('ml_candidate_params_id_seq'::regclass);
ALTER TABLE ONLY ml_candidates ALTER COLUMN id SET DEFAULT nextval('ml_candidates_id_seq'::regclass);
+ALTER TABLE ONLY ml_experiment_metadata ALTER COLUMN id SET DEFAULT nextval('ml_experiment_metadata_id_seq'::regclass);
+
ALTER TABLE ONLY ml_experiments ALTER COLUMN id SET DEFAULT nextval('ml_experiments_id_seq'::regclass);
ALTER TABLE ONLY namespace_admin_notes ALTER COLUMN id SET DEFAULT nextval('namespace_admin_notes_id_seq'::regclass);
@@ -26207,6 +26251,9 @@ ALTER TABLE ONLY milestone_releases
ALTER TABLE ONLY milestones
ADD CONSTRAINT milestones_pkey PRIMARY KEY (id);
+ALTER TABLE ONLY ml_candidate_metadata
+ ADD CONSTRAINT ml_candidate_metadata_pkey PRIMARY KEY (id);
+
ALTER TABLE ONLY ml_candidate_metrics
ADD CONSTRAINT ml_candidate_metrics_pkey PRIMARY KEY (id);
@@ -26216,6 +26263,9 @@ ALTER TABLE ONLY ml_candidate_params
ALTER TABLE ONLY ml_candidates
ADD CONSTRAINT ml_candidates_pkey PRIMARY KEY (id);
+ALTER TABLE ONLY ml_experiment_metadata
+ ADD CONSTRAINT ml_experiment_metadata_pkey PRIMARY KEY (id);
+
ALTER TABLE ONLY ml_experiments
ADD CONSTRAINT ml_experiments_pkey PRIMARY KEY (id);
@@ -29902,6 +29952,10 @@ CREATE INDEX index_milestones_on_title_trigram ON milestones USING gin (title gi
CREATE INDEX index_mirror_data_non_scheduled_or_started ON project_mirror_data USING btree (next_execution_timestamp, retry_count) WHERE ((status)::text <> ALL ('{scheduled,started}'::text[]));
+CREATE UNIQUE INDEX index_ml_candidate_metadata_on_candidate_id_and_name ON ml_candidate_metadata USING btree (candidate_id, name);
+
+CREATE INDEX index_ml_candidate_metadata_on_name ON ml_candidate_metadata USING btree (name);
+
CREATE INDEX index_ml_candidate_metrics_on_candidate_id ON ml_candidate_metrics USING btree (candidate_id);
CREATE INDEX index_ml_candidate_params_on_candidate_id ON ml_candidate_params USING btree (candidate_id);
@@ -29912,6 +29966,8 @@ CREATE UNIQUE INDEX index_ml_candidates_on_experiment_id_and_iid ON ml_candidate
CREATE INDEX index_ml_candidates_on_user_id ON ml_candidates USING btree (user_id);
+CREATE UNIQUE INDEX index_ml_experiment_metadata_on_experiment_id_and_name ON ml_experiment_metadata USING btree (experiment_id, name);
+
CREATE UNIQUE INDEX index_ml_experiments_on_project_id_and_iid ON ml_experiments USING btree (project_id, iid);
CREATE UNIQUE INDEX index_ml_experiments_on_project_id_and_name ON ml_experiments USING btree (project_id, name);
@@ -34352,6 +34408,9 @@ ALTER TABLE ONLY ci_pipeline_metadata
ALTER TABLE ONLY project_repository_storage_moves
ADD CONSTRAINT fk_rails_5106dbd44a FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
+ALTER TABLE ONLY ml_candidate_metadata
+ ADD CONSTRAINT fk_rails_5117dddf22 FOREIGN KEY (candidate_id) REFERENCES ml_candidates(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY bulk_import_configurations
ADD CONSTRAINT fk_rails_536b96bff1 FOREIGN KEY (bulk_import_id) REFERENCES bulk_imports(id) ON DELETE CASCADE;
@@ -34529,6 +34588,9 @@ ALTER TABLE ONLY plan_limits
ALTER TABLE ONLY operations_feature_flags_issues
ADD CONSTRAINT fk_rails_6a8856ca4f FOREIGN KEY (feature_flag_id) REFERENCES operations_feature_flags(id) ON DELETE CASCADE;
+ALTER TABLE ONLY ml_experiment_metadata
+ ADD CONSTRAINT fk_rails_6b39844d44 FOREIGN KEY (experiment_id) REFERENCES ml_experiments(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY error_tracking_errors
ADD CONSTRAINT fk_rails_6b41f837ba FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
diff --git a/doc/administration/server_hooks.md b/doc/administration/server_hooks.md
index 448becb32dc..3d4f39b5ff0 100644
--- a/doc/administration/server_hooks.md
+++ b/doc/administration/server_hooks.md
@@ -47,15 +47,30 @@ To create server hooks for a repository:
`pre-receive` server hook, the filename should be `pre-receive` with no extension.
- To create many server hooks, create a directory for the hooks that matches the hook type. For example, for a
`pre-receive` server hook, the directory name should be `pre-receive.d`. Put the files for the hook in that directory.
-1. Make the server hook files executable and ensure that they are owned by the Git user.
+1. **Make the server hook files executable** and ensure that they are owned by the Git user.
1. Write the code to make the server hook function as expected. Git server hooks can be in any programming language. Ensure
the [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) at the top reflects the language type. For
example, if the script is in Ruby the shebang is probably `#!/usr/bin/env ruby`.
-1. Make the hook file executable, ensure that it's owned by the Git user, and ensure it does not match the backup file
+1. Ensure the hook file does not match the backup file
pattern (`*~`).
If the server hook code is properly implemented, it should execute when the Git hook is next triggered.
+### Gitaly Cluster
+
+If you use [Gitaly Cluster](gitaly/index.md), the scripts must be copied to every Gitaly node that has a replica of the repository. Every Gitaly node
+needs a copy because any node can be made a primary at any time. Server hooks only run on primary nodes.
+
+The location to copy the scripts to depends on where repositories are stored:
+
+- In GitLab 15.2 and earlier, Gitaly Cluster uses the [hashed storage path](repository_storage_types.md#hashed-storage)
+ reported by the GitLab application.
+- In GitLab 15.3 and later, new repositories are created using
+ [Praefect-generated replica paths](gitaly/index.md#praefect-generated-replica-paths-gitlab-150-and-later),
+ which are not the hashed storage path. The replica path can be identified by
+ [querying the Praefect repository metadata](../administration/gitaly/troubleshooting.md#view-repository-metadata)
+ using `-relative-path` to specify the expected GitLab hashed storage path.
+
## Create global server hooks for all repositories
To create a Git hook that applies to all repositories, set a global server hook. Global server hooks also apply to:
diff --git a/doc/api/bulk_imports.md b/doc/api/bulk_imports.md
index e18a77df6df..1e0096a6bdd 100644
--- a/doc/api/bulk_imports.md
+++ b/doc/api/bulk_imports.md
@@ -33,6 +33,7 @@ POST /bulk_imports
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/bulk_imports" \
+ --header "Content-Type: application/json" \
--data '{
"configuration": {
"url": "http://gitlab.example/",
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 1b606cc50bf..2d2b1d7adcd 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -20666,6 +20666,34 @@ Represents a milestone widget.
| <a id="workitemwidgetmilestonemilestone"></a>`milestone` | [`Milestone`](#milestone) | Milestone of the work item. |
| <a id="workitemwidgetmilestonetype"></a>`type` | [`WorkItemWidgetType`](#workitemwidgettype) | Widget type. |
+### `WorkItemWidgetNotes`
+
+Represents a notes widget.
+
+#### Fields
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| <a id="workitemwidgetnotestype"></a>`type` | [`WorkItemWidgetType`](#workitemwidgettype) | Widget type. |
+
+#### Fields with arguments
+
+##### `WorkItemWidgetNotes.discussions`
+
+Notes on this work item.
+
+Returns [`DiscussionConnection`](#discussionconnection).
+
+This field returns a [connection](#connections). It accepts the
+four standard [pagination arguments](#connection-pagination-arguments):
+`before: String`, `after: String`, `first: Int`, `last: Int`.
+
+###### Arguments
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| <a id="workitemwidgetnotesdiscussionsfilter"></a>`filter` | [`NotesFilterType`](#notesfiltertype) | Type of notes collection: ALL_NOTES, ONLY_COMMENTS, ONLY_ACTIVITY. |
+
### `WorkItemWidgetStartAndDueDate`
Represents a start and due date widget.
@@ -22025,6 +22053,16 @@ Kind of the network policy.
| <a id="networkpolicykindciliumnetworkpolicy"></a>`CiliumNetworkPolicy` | Policy kind of Cilium Network Policy. |
| <a id="networkpolicykindnetworkpolicy"></a>`NetworkPolicy` | Policy kind of Network Policy. |
+### `NotesFilterType`
+
+Work item notes collection type.
+
+| Value | Description |
+| ----- | ----------- |
+| <a id="notesfiltertypeall_notes"></a>`ALL_NOTES` | Show all activity. |
+| <a id="notesfiltertypeonly_activity"></a>`ONLY_ACTIVITY` | Show history only. |
+| <a id="notesfiltertypeonly_comments"></a>`ONLY_COMMENTS` | Show comments only. |
+
### `OncallRotationUnitEnum`
Rotation length unit of an on-call rotation.
@@ -22816,6 +22854,7 @@ Type of a work item widget.
| <a id="workitemwidgettypeiteration"></a>`ITERATION` | Iteration widget. |
| <a id="workitemwidgettypelabels"></a>`LABELS` | Labels widget. |
| <a id="workitemwidgettypemilestone"></a>`MILESTONE` | Milestone widget. |
+| <a id="workitemwidgettypenotes"></a>`NOTES` | Notes widget. |
| <a id="workitemwidgettypestart_and_due_date"></a>`START_AND_DUE_DATE` | Start And Due Date widget. |
| <a id="workitemwidgettypestatus"></a>`STATUS` | Status widget. |
| <a id="workitemwidgettypeweight"></a>`WEIGHT` | Weight widget. |
@@ -24111,6 +24150,7 @@ Implementations:
- [`WorkItemWidgetIteration`](#workitemwidgetiteration)
- [`WorkItemWidgetLabels`](#workitemwidgetlabels)
- [`WorkItemWidgetMilestone`](#workitemwidgetmilestone)
+- [`WorkItemWidgetNotes`](#workitemwidgetnotes)
- [`WorkItemWidgetStartAndDueDate`](#workitemwidgetstartandduedate)
- [`WorkItemWidgetStatus`](#workitemwidgetstatus)
- [`WorkItemWidgetWeight`](#workitemwidgetweight)
diff --git a/doc/development/database/adding_database_indexes.md b/doc/development/database/adding_database_indexes.md
index d4cd807ef22..053747bac8c 100644
--- a/doc/development/database/adding_database_indexes.md
+++ b/doc/development/database/adding_database_indexes.md
@@ -107,11 +107,10 @@ determining whether existing indexes are still required. More information on
the meaning of the various columns can be found at
<https://www.postgresql.org/docs/current/monitoring-stats.html>.
-To determine if an index is still being used on production, use the following
-Thanos query with your index name:
+To determine if an index is still being used on production, use [Thanos](https://thanos-query.ops.gitlab.net/graph?g0.expr=sum%20by%20(type)(rate(pg_stat_user_indexes_idx_scan%7Benv%3D%22gprd%22%2C%20indexrelname%3D%22INSERT%20INDEX%20NAME%20HERE%22%7D%5B30d%5D))&g0.tab=1&g0.stacked=0&g0.range_input=1h&g0.max_source_resolution=0s&g0.deduplicate=1&g0.partial_response=0&g0.store_matches=%5B%5D):
```sql
-sum(rate(pg_stat_user_indexes_idx_tup_read{env="gprd", indexrelname="index_ci_name", type="patroni-ci"}[5m]))
+sum by (type)(rate(pg_stat_user_indexes_idx_scan{env="gprd", indexrelname="INSERT INDEX NAME HERE"}[30d]))
```
Because the query output relies on the actual usage of your database, it
diff --git a/doc/development/database/database_dictionary.md b/doc/development/database/database_dictionary.md
index 474a6cbe093..afbd9a5aba8 100644
--- a/doc/development/database/database_dictionary.md
+++ b/doc/development/database/database_dictionary.md
@@ -30,14 +30,14 @@ milestone: '13.0'
## Schema
-| Attribute | Type | Required | Description |
-|----------------------|---------------|----------|--------------------------------------------------------------------------|
-| `table_name` | String | yes | Database table name |
-| `classes` | Array(String) | no | List of classes that respond to `.table_name` with the `table_name` |
-| `feature_categories` | Array(String) | yes | List of feature categories using this table |
-| `description` | String | no | Text description of the information stored in the table and it's purpose |
-| `introduced_by_url` | URL | no | URL to the merge request or commit which introduced this table |
-| `milestone` | String | no | The milestone that introduced this table |
+| Attribute | Type | Required | Description |
+|----------------------------|---------------|----------|-----------------------------------------------------------------------------------|
+| `table_name` / `view_name` | String | yes | Database table name or view name |
+| `classes` | Array(String) | no | List of classes that are associated to this table or view. |
+| `feature_categories` | Array(String) | yes | List of feature categories using this table or view. |
+| `description` | String | no | Text description of the information stored in the table or view, and its purpose. |
+| `introduced_by_url` | URL | no | URL to the merge request or commit which introduced this table or view. |
+| `milestone` | String | no | The milestone that introduced this table or view. |
## Adding tables
@@ -52,3 +52,20 @@ Include this file in the commit with the migration that creates the table.
When dropping a table, you must remove the metadata file from `db/docs/` for `main` and `ci` databases.
For the `geo` database, you must remove the file from `ee/db/docs/`.
Use the same commit with the migration that drops the table.
+
+## Adding views
+
+When adding a new view, you should:
+
+1. Create a new file for this view in the appropriate directory:
+ - `main` database: `db/docs/views/`
+ - `ci` database: `db/docs/views/`
+ - `geo` database: `ee/db/docs/views/`
+1. Name the file `<view_name>.yml`, and include as much information as you know about the view.
+1. Include this file in the commit with the migration that creates the view.
+
+## Dropping views
+
+When dropping a view, you must remove the metadata file from `db/docs/views/`.
+For the `geo` database, you must remove the file from `ee/db/docs/views/`.
+Use the same commit with the migration that drops the view.
diff --git a/doc/development/migration_style_guide.md b/doc/development/migration_style_guide.md
index faf49b26788..6e505fa0d19 100644
--- a/doc/development/migration_style_guide.md
+++ b/doc/development/migration_style_guide.md
@@ -611,10 +611,10 @@ class MyMigration < Gitlab::Database::Migration[2.0]
end
```
-Verify the index is not being used anymore with this Thanos query:
+You can verify that the index is not being used with [Thanos](https://thanos-query.ops.gitlab.net/graph?g0.expr=sum%20by%20(type)(rate(pg_stat_user_indexes_idx_scan%7Benv%3D%22gprd%22%2C%20indexrelname%3D%22INSERT%20INDEX%20NAME%20HERE%22%7D%5B30d%5D))&g0.tab=1&g0.stacked=0&g0.range_input=1h&g0.max_source_resolution=0s&g0.deduplicate=1&g0.partial_response=0&g0.store_matches=%5B%5D):
```sql
-sum by (type)(rate(pg_stat_user_indexes_idx_scan{env="gprd", indexrelname="index_groups_on_parent_id_id"}[5m]))
+sum by (type)(rate(pg_stat_user_indexes_idx_scan{env="gprd", indexrelname="INSERT INDEX NAME HERE"}[30d]))
```
Note that it is not necessary to check if the index exists prior to
diff --git a/doc/development/testing_guide/frontend_testing.md b/doc/development/testing_guide/frontend_testing.md
index 56bbcb815d1..87d8d493512 100644
--- a/doc/development/testing_guide/frontend_testing.md
+++ b/doc/development/testing_guide/frontend_testing.md
@@ -522,7 +522,7 @@ it('waits for an event', () => {
### Ensuring that tests are isolated
-Tests are normally architected in a pattern which requires a recurring setup and breakdown of the component under test. This is done by making use of the `beforeEach` and `afterEach` hooks.
+Tests are normally architected in a pattern which requires a recurring setup of the component under test. This is often achieved by making use of the `beforeEach` hook.
Example
@@ -532,16 +532,22 @@ Example
beforeEach(() => {
wrapper = mount(Component);
});
+```
+
+With [enableAutoDestroy](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/100389), it is no longer neccessary to manually call `wrapper.destroy()`.
+However, some mocks, spies, and fixtures do need to be torn down, and we can leverage the `afterEach` hook.
+
+Example
+
+```javascript
+ let wrapper;
afterEach(() => {
- wrapper.destroy();
+ fakeApollo = null;
+ store = null;
});
```
-When looking at this initially you'd suspect that the component is setup before each test and then broken down afterwards, providing isolation between tests.
-
-This is however not entirely true as the `destroy` method does not remove everything which has been mutated on the `wrapper` object. For functional components, destroy only removes the rendered DOM elements from the document.
-
### Jest best practices
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34209) in GitLab 13.2.
diff --git a/doc/topics/offline/quick_start_guide.md b/doc/topics/offline/quick_start_guide.md
index bb2f651786c..015fd9fc720 100644
--- a/doc/topics/offline/quick_start_guide.md
+++ b/doc/topics/offline/quick_start_guide.md
@@ -205,3 +205,13 @@ users are on the most up-to-date instances of GitLab. These two services can be
environments so that they do not attempt and fail to reach out to GitLab services.
Learn more about [disabling usage statistics](../../user/admin_area/settings/usage_statistics.md#enable-or-disable-usage-statistics).
+
+### Configure NTP
+
+In GitLab 15.4 and 15.5, Gitaly Cluster doesn't function if `pool.ntp.org` is unreachable.
+[Customize the time server setting](../../administration/gitaly/praefect.md#customize-time-server-setting) on the Gitaly
+and Praefect servers so they can use an accessible NTP server.
+
+On offline instances, the [GitLab Geo check Rake task](../../administration/geo/replication/troubleshooting.md#can-geo-detect-the-current-site-correctly)
+always fails because it uses `pool.ntp.org`. This error can be ignored but you can
+[read more about how to work around it](../../administration/geo/replication/troubleshooting.md#message-machine-clock-is-synchronized--exception).
diff --git a/doc/update/index.md b/doc/update/index.md
index 29b6a1fc9c2..84dfcdeea95 100644
--- a/doc/update/index.md
+++ b/doc/update/index.md
@@ -506,6 +506,8 @@ and [Helm Chart deployments](https://docs.gitlab.com/charts/). They come with ap
sidekiq['routing_rules'] = [['*', 'default']]
```
+- New Git repositories created in Gitaly cluster [no longer use the `@hashed` storage path](#change-to-praefect-generated-replica-paths-in-gitlab-153). Server
+ hooks for new repositories must be copied into a different location.
- The structure of `/etc/gitlab/gitlab-secrets.json` was modified in [GitLab 15.4](https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/6310),
and new configuration was added to `gitlab_pages`, `grafana`, and `mattermost` sections.
In a highly available or GitLab Geo environment, secrets need to be the same on all nodes.
@@ -521,6 +523,8 @@ and [Helm Chart deployments](https://docs.gitlab.com/charts/). They come with ap
- [Incorrect deletion of object storage files on Geo secondary sites](https://gitlab.com/gitlab-org/gitlab/-/issues/371397) can occur in certain situations. See [Geo: Incorrect object storage LFS file deletion on secondary site issue in GitLab 15.0.0 to 15.3.2](#geo-incorrect-object-storage-lfs-file-deletion-on-secondary-sites-in-gitlab-1500-to-1532).
- LFS transfers can [redirect to the primary from secondary site mid-session](https://gitlab.com/gitlab-org/gitlab/-/issues/371571) causing failed pull and clone requests when [Geo proxying](../administration/geo/secondary_proxy/index.md) is enabled. Geo proxying is enabled by default in GitLab 15.1 and later. See [Geo: LFS transfer redirect to primary from secondary site mid-session issue in GitLab 15.1.0 to 15.3.2](#geo-lfs-transfers-redirect-to-primary-from-secondary-site-mid-session-in-gitlab-1510-to-1532) for more details.
+- New Git repositories created in Gitaly cluster [no longer use the `@hashed` storage path](#change-to-praefect-generated-replica-paths-in-gitlab-153). Server
+ hooks for new repositories must be copied into a different location.
### 15.2.0
@@ -1197,6 +1201,19 @@ After upgraded to 11.11.8 you can safely upgrade to 12.0.Z.
See our [documentation on upgrade paths](../policy/maintenance.md#upgrade-recommendations)
for more information.
+### Change to Praefect-generated replica paths in GitLab 15.3
+
+New Git repositories created in Gitaly cluster no longer use the `@hashed` storage path.
+
+Praefect now generates replica paths for use by Gitaly cluster.
+This change is a pre-requisite for Gitaly cluster atomically creating, deleting, and
+renaming Git repositories.
+
+To identify the replica path, [query the Praefect repository metadata](../administration/gitaly/troubleshooting.md#view-repository-metadata)
+and pass the `@hashed` storage path to `-relative-path`.
+
+With this information, you can correctly install [server hooks](../administration/server_hooks.md).
+
### Maintenance mode issue in GitLab 13.9 to 14.4
When [Maintenance mode](../administration/maintenance_mode/index.md) is enabled, users cannot sign in with SSO, SAML, or LDAP.
diff --git a/doc/user/application_security/api_fuzzing/index.md b/doc/user/application_security/api_fuzzing/index.md
index 03eed6fdbf8..b14a42a7fb0 100644
--- a/doc/user/application_security/api_fuzzing/index.md
+++ b/doc/user/application_security/api_fuzzing/index.md
@@ -1551,13 +1551,13 @@ When testing an API it can be useful to exclude certain paths. For example, you
To verify the paths are excluded, review the `Tested Operations` and `Excluded Operations` portion of the job output. You should not see any excluded paths listed under `Tested Operations`.
```plaintext
-2021-05-27 21:51:08 [INF] API Security: --[ Tested Operations ]-------------------------
-2021-05-27 21:51:08 [INF] API Security: 201 POST http://target:7777/api/users CREATED
-2021-05-27 21:51:08 [INF] API Security: ------------------------------------------------
-2021-05-27 21:51:08 [INF] API Security: --[ Excluded Operations ]-----------------------
-2021-05-27 21:51:08 [INF] API Security: GET http://target:7777/api/messages
-2021-05-27 21:51:08 [INF] API Security: POST http://target:7777/api/messages
-2021-05-27 21:51:08 [INF] API Security: ------------------------------------------------
+2021-05-27 21:51:08 [INF] API Fuzzing: --[ Tested Operations ]-------------------------
+2021-05-27 21:51:08 [INF] API Fuzzing: 201 POST http://target:7777/api/users CREATED
+2021-05-27 21:51:08 [INF] API Fuzzing: ------------------------------------------------
+2021-05-27 21:51:08 [INF] API Fuzzing: --[ Excluded Operations ]-----------------------
+2021-05-27 21:51:08 [INF] API Fuzzing: GET http://target:7777/api/messages
+2021-05-27 21:51:08 [INF] API Fuzzing: POST http://target:7777/api/messages
+2021-05-27 21:51:08 [INF] API Fuzzing: ------------------------------------------------
```
#### Examples of excluding paths
@@ -1821,13 +1821,13 @@ As an alternative to excluding by paths, you can filter by any other component i
In your job output you can check if any URLs matched any provided regular expression from `FUZZAPI_EXCLUDE_URLS`. Matching operations are listed in the **Excluded Operations** section. Operations listed in the **Excluded Operations** should not be listed in the **Tested Operations** section. For example the following portion of a job output:
```plaintext
-2021-05-27 21:51:08 [INF] API Security: --[ Tested Operations ]-------------------------
-2021-05-27 21:51:08 [INF] API Security: 201 POST http://target:7777/api/users CREATED
-2021-05-27 21:51:08 [INF] API Security: ------------------------------------------------
-2021-05-27 21:51:08 [INF] API Security: --[ Excluded Operations ]-----------------------
-2021-05-27 21:51:08 [INF] API Security: GET http://target:7777/api/messages
-2021-05-27 21:51:08 [INF] API Security: POST http://target:7777/api/messages
-2021-05-27 21:51:08 [INF] API Security: ------------------------------------------------
+2021-05-27 21:51:08 [INF] API Fuzzing: --[ Tested Operations ]-------------------------
+2021-05-27 21:51:08 [INF] API Fuzzing: 201 POST http://target:7777/api/users CREATED
+2021-05-27 21:51:08 [INF] API Fuzzing: ------------------------------------------------
+2021-05-27 21:51:08 [INF] API Fuzzing: --[ Excluded Operations ]-----------------------
+2021-05-27 21:51:08 [INF] API Fuzzing: GET http://target:7777/api/messages
+2021-05-27 21:51:08 [INF] API Fuzzing: POST http://target:7777/api/messages
+2021-05-27 21:51:08 [INF] API Fuzzing: ------------------------------------------------
```
NOTE:
@@ -2242,18 +2242,18 @@ The first step to resolving performance issues is to understand what is contribu
The API Fuzzing job output contains helpful information about how fast we are testing, how fast each operation being tested responds, and summary information. Let's take a look at some sample output to see how it can be used in tracking down performance issues:
```shell
-API Security: Loaded 10 operations from: assets/har-large-response/large_responses.har
-API Security:
-API Security: Testing operation [1/10]: 'GET http://target:7777/api/large_response_json'.
-API Security: - Parameters: (Headers: 4, Query: 0, Body: 0)
-API Security: - Request body size: 0 Bytes (0 bytes)
-API Security:
-API Security: Finished testing operation 'GET http://target:7777/api/large_response_json'.
-API Security: - Excluded Parameters: (Headers: 0, Query: 0, Body: 0)
-API Security: - Performed 767 requests
-API Security: - Average response body size: 130 MB
-API Security: - Average call time: 2 seconds and 82.69 milliseconds (2.082693 seconds)
-API Security: - Time to complete: 14 minutes, 8 seconds and 788.36 milliseconds (848.788358 seconds)
+API Fuzzing: Loaded 10 operations from: assets/har-large-response/large_responses.har
+API Fuzzing:
+API Fuzzing: Testing operation [1/10]: 'GET http://target:7777/api/large_response_json'.
+API Fuzzing: - Parameters: (Headers: 4, Query: 0, Body: 0)
+API Fuzzing: - Request body size: 0 Bytes (0 bytes)
+API Fuzzing:
+API Fuzzing: Finished testing operation 'GET http://target:7777/api/large_response_json'.
+API Fuzzing: - Excluded Parameters: (Headers: 0, Query: 0, Body: 0)
+API Fuzzing: - Performed 767 requests
+API Fuzzing: - Average response body size: 130 MB
+API Fuzzing: - Average call time: 2 seconds and 82.69 milliseconds (2.082693 seconds)
+API Fuzzing: - Time to complete: 14 minutes, 8 seconds and 788.36 milliseconds (848.788358 seconds)
```
This job console output snippet starts by telling us how many operations were found (10), followed by notifications that testing has started on a specific operation and a summary of the operation has been completed. The summary is the most interesting part of this log output. In the summary, we can see that it took API Fuzzing 767 requests to fully test this operation and its related fields. We can also see that the average response time was 2 seconds and the time to complete was 14 minutes for this one operation.
@@ -2443,7 +2443,7 @@ See the following documentation sections for assistance:
See [Performance Tuning and Testing Speed](#performance-tuning-and-testing-speed)
-### Error waiting for API Security 'http://127.0.0.1:5000' to become available
+### Error waiting for API Fuzzing 'http://127.0.0.1:5000' to become available
A bug exists in versions of the API Fuzzing analyzer prior to v1.6.196 that can cause a background process to fail under certain conditions. The solution is to update to a newer version of the API Fuzzing analyzer.
@@ -2456,6 +2456,11 @@ If the issue is occurring with versions v1.6.196 or greater, contact Support and
1. The `gl-api-security-scanner.log` file available as a job artifact. In the right-hand panel of the job details page, select the **Browse** button.
1. The `apifuzzer_fuzz` job definition from your `.gitlab-ci.yml` file.
+**Error message**
+
+- In [GitLab 15.6 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/376078), `Error waiting for API Fuzzing 'http://127.0.0.1:5000' to become available`
+- In GitLab 15.5 and earlier, `Error waiting for API Security 'http://127.0.0.1:5000' to become available`.
+
### `Failed to start session with scanner. Please retry, and if the problem persists reach out to support.`
The API Fuzzing engine outputs an error message when it cannot establish a connection with the scanner application component. The error message is shown in the job output window of the `apifuzzer_fuzz` job. A common cause for this issue is that the background component cannot use the selected port as it's already in use. This error can occur intermittently if timing plays a part (race condition). This issue occurs most often with Kubernetes environments when other services are mapped into the container causing port conflicts.
diff --git a/doc/user/application_security/dast_api/index.md b/doc/user/application_security/dast_api/index.md
index d77be0f0ca9..996338d54c4 100644
--- a/doc/user/application_security/dast_api/index.md
+++ b/doc/user/application_security/dast_api/index.md
@@ -1497,13 +1497,13 @@ When testing an API it can be useful to exclude certain paths. For example, you
To verify the paths are excluded, review the `Tested Operations` and `Excluded Operations` portion of the job output. You should not see any excluded paths listed under `Tested Operations`.
```plaintext
-2021-05-27 21:51:08 [INF] API Security: --[ Tested Operations ]-------------------------
-2021-05-27 21:51:08 [INF] API Security: 201 POST http://target:7777/api/users CREATED
-2021-05-27 21:51:08 [INF] API Security: ------------------------------------------------
-2021-05-27 21:51:08 [INF] API Security: --[ Excluded Operations ]-----------------------
-2021-05-27 21:51:08 [INF] API Security: GET http://target:7777/api/messages
-2021-05-27 21:51:08 [INF] API Security: POST http://target:7777/api/messages
-2021-05-27 21:51:08 [INF] API Security: ------------------------------------------------
+2021-05-27 21:51:08 [INF] DAST API: --[ Tested Operations ]-------------------------
+2021-05-27 21:51:08 [INF] DAST API: 201 POST http://target:7777/api/users CREATED
+2021-05-27 21:51:08 [INF] DAST API: ------------------------------------------------
+2021-05-27 21:51:08 [INF] DAST API: --[ Excluded Operations ]-----------------------
+2021-05-27 21:51:08 [INF] DAST API: GET http://target:7777/api/messages
+2021-05-27 21:51:08 [INF] DAST API: POST http://target:7777/api/messages
+2021-05-27 21:51:08 [INF] DAST API: ------------------------------------------------
```
### Examples
@@ -1780,13 +1780,13 @@ As an alternative to excluding by paths, you can filter by any other component i
In your job output you can check if any URLs matched any provided regular expression from `DAST_API_EXCLUDE_URLS`. Matching operations are listed in the **Excluded Operations** section. Operations listed in the **Excluded Operations** should not be listed in the **Tested Operations** section. For example the following portion of a job output:
```plaintext
-2021-05-27 21:51:08 [INF] API Security: --[ Tested Operations ]-------------------------
-2021-05-27 21:51:08 [INF] API Security: 201 POST http://target:7777/api/users CREATED
-2021-05-27 21:51:08 [INF] API Security: ------------------------------------------------
-2021-05-27 21:51:08 [INF] API Security: --[ Excluded Operations ]-----------------------
-2021-05-27 21:51:08 [INF] API Security: GET http://target:7777/api/messages
-2021-05-27 21:51:08 [INF] API Security: POST http://target:7777/api/messages
-2021-05-27 21:51:08 [INF] API Security: ------------------------------------------------
+2021-05-27 21:51:08 [INF] DAST API: --[ Tested Operations ]-------------------------
+2021-05-27 21:51:08 [INF] DAST API: 201 POST http://target:7777/api/users CREATED
+2021-05-27 21:51:08 [INF] DAST API: ------------------------------------------------
+2021-05-27 21:51:08 [INF] DAST API: --[ Excluded Operations ]-----------------------
+2021-05-27 21:51:08 [INF] DAST API: GET http://target:7777/api/messages
+2021-05-27 21:51:08 [INF] DAST API: POST http://target:7777/api/messages
+2021-05-27 21:51:08 [INF] DAST API: ------------------------------------------------
```
NOTE:
@@ -2083,18 +2083,18 @@ The first step to resolving performance issues is to understand what is contribu
The DAST API job output contains helpful information about how fast we are testing, how fast each operation being tested responds, and summary information. Let's take a look at some sample output to see how it can be used in tracking down performance issues:
```shell
-API Security: Loaded 10 operations from: assets/har-large-response/large_responses.har
-API Security:
-API Security: Testing operation [1/10]: 'GET http://target:7777/api/large_response_json'.
-API Security: - Parameters: (Headers: 4, Query: 0, Body: 0)
-API Security: - Request body size: 0 Bytes (0 bytes)
-API Security:
-API Security: Finished testing operation 'GET http://target:7777/api/large_response_json'.
-API Security: - Excluded Parameters: (Headers: 0, Query: 0, Body: 0)
-API Security: - Performed 767 requests
-API Security: - Average response body size: 130 MB
-API Security: - Average call time: 2 seconds and 82.69 milliseconds (2.082693 seconds)
-API Security: - Time to complete: 14 minutes, 8 seconds and 788.36 milliseconds (848.788358 seconds)
+DAST API: Loaded 10 operations from: assets/har-large-response/large_responses.har
+DAST API:
+DAST API: Testing operation [1/10]: 'GET http://target:7777/api/large_response_json'.
+DAST API: - Parameters: (Headers: 4, Query: 0, Body: 0)
+DAST API: - Request body size: 0 Bytes (0 bytes)
+DAST API:
+DAST API: Finished testing operation 'GET http://target:7777/api/large_response_json'.
+DAST API: - Excluded Parameters: (Headers: 0, Query: 0, Body: 0)
+DAST API: - Performed 767 requests
+DAST API: - Average response body size: 130 MB
+DAST API: - Average call time: 2 seconds and 82.69 milliseconds (2.082693 seconds)
+DAST API: - Time to complete: 14 minutes, 8 seconds and 788.36 milliseconds (848.788358 seconds)
```
This job console output snippet starts by telling us how many operations were found (10), followed by notifications that testing has started on a specific operation and a summary of the operation has been completed. The summary is the most interesting part of this log output. In the summary, we can see that it took DAST API 767 requests to fully test this operation and its related fields. We can also see that the average response time was 2 seconds and the time to complete was 14 minutes for this one operation.
@@ -2281,7 +2281,7 @@ See the following documentation sections for assistance:
See [Performance Tuning and Testing Speed](#performance-tuning-and-testing-speed)
-### Error waiting for API Security 'http://127.0.0.1:5000' to become available
+### Error waiting for DAST API 'http://127.0.0.1:5000' to become available
A bug exists in versions of the DAST API analyzer prior to v1.6.196 that can cause a background process to fail under certain conditions. The solution is to update to a newer version of the DAST API analyzer.
@@ -2294,6 +2294,11 @@ If the issue is occurring with versions v1.6.196 or greater, contact Support and
1. The `gl-api-security-scanner.log` file available as a job artifact. In the right-hand panel of the job details page, select the **Browse** button.
1. The `dast_api` job definition from your `.gitlab-ci.yml` file.
+**Error message**
+
+- In [GitLab 15.6 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/376078), `Error waiting for DAST API 'http://127.0.0.1:5000' to become available`
+- In GitLab 15.5 and earlier, `Error waiting for API Security 'http://127.0.0.1:5000' to become available`.
+
### `Failed to start scanner session (version header not found)`
The DAST API engine outputs an error message when it cannot establish a connection with the scanner application component. The error message is shown in the job output window of the `dast_api` job. A common cause of this issue is changing the `DAST_API_API` variable from its default.
diff --git a/doc/user/gitlab_com/index.md b/doc/user/gitlab_com/index.md
index d3d50ee1a8f..0adda54bf06 100644
--- a/doc/user/gitlab_com/index.md
+++ b/doc/user/gitlab_com/index.md
@@ -189,7 +189,7 @@ the default value [is the same as for self-managed instances](../admin_area/sett
|-------------------------------|--------------------|
| [Repository size including LFS](../admin_area/settings/account_and_limit_settings.md#repository-size-limit) | 10 GB |
| [Maximum import size](../project/settings/import_export.md#maximum-import-file-size) | 5 GB |
-| Maximum attachment size | 10 MB |
+| Maximum attachment size | 100 MB |
If you are near or over the repository size limit, you can either
[reduce your repository size with Git](../project/repository/reducing_the_repo_size_using_git.md)
diff --git a/doc/user/project/integrations/mlflow_client.md b/doc/user/project/integrations/mlflow_client.md
index 82bfd08e926..111037a63d7 100644
--- a/doc/user/project/integrations/mlflow_client.md
+++ b/doc/user/project/integrations/mlflow_client.md
@@ -49,7 +49,7 @@ that can be explored by selecting an experiment.
- The API GitLab supports is the one defined at MLFlow version 1.28.0.
- API endpoints not listed above are not supported.
-- During creation of experiments and runs, tags are ExperimentTags and RunTags are ignored.
+- During creation of experiments and runs, tags are ExperimentTags and RunTags are stored, even though they are not displayed.
- MLFLow Model Registry is not supported.
## Supported methods and caveats
@@ -57,7 +57,7 @@ that can be explored by selecting an experiment.
This is a list of methods we support from the MLFlow client. Other methods might be supported but were not
tested. More information can be found in the [MLFlow Documentation](https://www.mlflow.org/docs/1.28.0/python_api/mlflow.html).
-### `set_experiment`
+### `set_experiment()`
Accepts both experiment_name and experiment_id
diff --git a/doc/user/project/integrations/webhooks.md b/doc/user/project/integrations/webhooks.md
index 5819878d728..53c396fa9af 100644
--- a/doc/user/project/integrations/webhooks.md
+++ b/doc/user/project/integrations/webhooks.md
@@ -331,13 +331,8 @@ GitLab expects a response in [10 seconds](../../../user/gitlab_com/index.md#othe
### Re-enable disabled webhooks
-> Introduced in GitLab 15.2 [with a flag](../../../administration/feature_flags.md) named `webhooks_failed_callout`. Disabled by default.
-
-FLAG:
-On self-managed GitLab, by default this feature is not available. To make it available,
-ask an administrator to [enable the feature flag](../../../administration/feature_flags.md) named `webhooks_failed_callout`.
-On GitLab.com, this feature is not available.
-The feature is not ready for production use.
+> - Introduced in GitLab 15.2 [with a flag](../../../administration/feature_flags.md) named `webhooks_failed_callout`. Disabled by default.
+> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/365535) in GitLab 15.7. Feature flag `webhooks_failed_callout` removed.
If a webhook is failing, a banner displays at the top of the edit page explaining
why it is disabled, and when it will be automatically re-enabled. For example:
diff --git a/doc/user/project/ml/experiment_tracking/index.md b/doc/user/project/ml/experiment_tracking/index.md
index e274bd7f38e..d7d737572e0 100644
--- a/doc/user/project/ml/experiment_tracking/index.md
+++ b/doc/user/project/ml/experiment_tracking/index.md
@@ -64,7 +64,6 @@ package registry. The package name for a candidate is `ml_candidate_<candidate_i
### Limitations and future
- Searching experiments, searching trials, visual comparison of trials, and creating, deleting and updating experiments and trials through GitLab UI is under development.
-- No support for experiment and trial metadata that do not classify as parameters or metrics.
## Disabling or enabling the Feature
diff --git a/lib/api/api.rb b/lib/api/api.rb
index 6d4d94caaa9..f5a6482adbb 100644
--- a/lib/api/api.rb
+++ b/lib/api/api.rb
@@ -274,6 +274,7 @@ module API
mount ::API::Terraform::StateVersion
mount ::API::Topics
mount ::API::Unleash
+ mount ::API::UsageData
mount ::API::UsageDataNonSqlMetrics
mount ::API::UserCounts
mount ::API::Wikis
@@ -330,6 +331,7 @@ module API
mount ::API::Templates
mount ::API::Todos
mount ::API::UsageData
+ mount ::API::UsageDataNonSqlMetrics
mount ::API::UsageDataQueries
mount ::API::Users
mount ::API::Ml::Mlflow
diff --git a/lib/api/entities/ml/mlflow/experiment.rb b/lib/api/entities/ml/mlflow/experiment.rb
index 54e0fe63985..51650c36d98 100644
--- a/lib/api/entities/ml/mlflow/experiment.rb
+++ b/lib/api/entities/ml/mlflow/experiment.rb
@@ -9,6 +9,7 @@ module API
expose :name
expose(:lifecycle_stage) { |experiment| experiment.deleted_on? ? 'deleted' : 'active' }
expose(:artifact_location) { |experiment| 'not_implemented' }
+ expose :metadata, as: :tags, using: KeyValue
end
end
end
diff --git a/lib/api/entities/ml/mlflow/run_param.rb b/lib/api/entities/ml/mlflow/key_value.rb
index 75fee738f8b..cf2c32f6f44 100644
--- a/lib/api/entities/ml/mlflow/run_param.rb
+++ b/lib/api/entities/ml/mlflow/key_value.rb
@@ -4,7 +4,7 @@ module API
module Entities
module Ml
module Mlflow
- class RunParam < Grape::Entity
+ class KeyValue < Grape::Entity
expose :name, as: :key
expose :value
end
diff --git a/lib/api/entities/ml/mlflow/run.rb b/lib/api/entities/ml/mlflow/run.rb
index 8b16c67611f..01d85e8862b 100644
--- a/lib/api/entities/ml/mlflow/run.rb
+++ b/lib/api/entities/ml/mlflow/run.rb
@@ -9,7 +9,8 @@ module API
expose :itself, using: RunInfo, as: :info
expose :data do
expose :metrics, using: Metric
- expose :params, using: RunParam
+ expose :params, using: KeyValue
+ expose :metadata, as: :tags, using: KeyValue
end
end
end
diff --git a/lib/api/ml/mlflow.rb b/lib/api/ml/mlflow.rb
index 56bfac1530e..54bbe0ee465 100644
--- a/lib/api/ml/mlflow.rb
+++ b/lib/api/ml/mlflow.rb
@@ -126,14 +126,31 @@ module API
end
params do
requires :name, type: String, desc: 'Experiment name'
+ optional :tags, type: Array, desc: 'Tags with information about the experiment'
optional :artifact_location, type: String, desc: 'This will be ignored'
- optional :tags, type: Array, desc: 'This will be ignored'
end
post 'create', urgency: :low do
- present experiment_repository.create!(params[:name]), with: Entities::Ml::Mlflow::NewExperiment
+ present experiment_repository.create!(params[:name], params[:tags]),
+ with: Entities::Ml::Mlflow::NewExperiment
rescue ActiveRecord::RecordInvalid
resource_already_exists!
end
+
+ desc 'Sets a tag for an experiment.' do
+ summary 'Sets a tag for an experiment. '
+
+ detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#set-experiment-tag'
+ end
+ params do
+ requires :experiment_id, type: String, desc: 'ID of the experiment.'
+ requires :key, type: String, desc: 'Name for the tag.'
+ requires :value, type: String, desc: 'Value for the tag.'
+ end
+ post 'set-experiment-tag', urgency: :low do
+ bad_request! unless experiment_repository.add_tag!(experiment, params[:key], params[:value])
+
+ {}
+ end
end
resource :runs do
@@ -148,10 +165,10 @@ module API
desc: 'Unix timestamp in milliseconds of when the run started.',
default: 0
optional :user_id, type: String, desc: 'This will be ignored'
- optional :tags, type: Array, desc: 'This will be ignored'
+ optional :tags, type: Array, desc: 'Tags are stored, but not displayed'
end
post 'create', urgency: :low do
- present candidate_repository.create!(experiment, params[:start_time]),
+ present candidate_repository.create!(experiment, params[:start_time], params[:tags]),
with: Entities::Ml::Mlflow::Run, packages_url: packages_url
end
@@ -229,6 +246,22 @@ module API
{}
end
+ desc 'Sets a tag for a run.' do
+ summary 'Sets a tag for a run. '
+
+ detail 'https://www.mlflow.org/docs/1.28.0/rest-api.html#set-tag'
+ end
+ params do
+ requires :run_id, type: String, desc: 'UUID of the run.'
+ requires :key, type: String, desc: 'Name for the tag.'
+ requires :value, type: String, desc: 'Value for the tag.'
+ end
+ post 'set-tag', urgency: :low do
+ bad_request! unless candidate_repository.add_tag!(candidate, params[:key], params[:value])
+
+ {}
+ end
+
desc 'Logs multiple parameters and metrics.' do
summary 'Log a batch of metrics and params for a run. Validation errors will block the entire batch, '\
'duplicate errors will be ignored.'
@@ -251,6 +284,7 @@ module API
post 'log-batch', urgency: :low do
candidate_repository.add_metrics(candidate, params[:metrics])
candidate_repository.add_params(candidate, params[:params])
+ candidate_repository.add_tags(candidate, params[:tags])
{}
end
diff --git a/lib/api/usage_data.rb b/lib/api/usage_data.rb
index 9e446aff605..3e2023d769f 100644
--- a/lib/api/usage_data.rb
+++ b/lib/api/usage_data.rb
@@ -12,11 +12,18 @@ module API
forbidden!('Invalid CSRF token is provided') unless verified_request?
end
- desc 'Track usage data events' do
+ desc 'Track usage data event' do
detail 'This feature was introduced in GitLab 13.4.'
+ success code: 200
+ failure [
+ { code: 403, message: 'Invalid CSRF token is provided' },
+ { code: 404, message: 'Not found' }
+ ]
+ tags %w[usage_data]
end
params do
- requires :event, type: String, desc: 'The event name that should be tracked'
+ requires :event, type: String, desc: 'The event name that should be tracked',
+ documentation: { example: 'i_quickactions_page' }
end
post 'increment_counter' do
event_name = params[:event]
@@ -26,8 +33,17 @@ module API
status :ok
end
+ desc 'Track usage data event for the current user' do
+ success code: 200
+ failure [
+ { code: 403, message: 'Invalid CSRF token is provided' },
+ { code: 404, message: 'Not found' }
+ ]
+ tags %w[usage_data]
+ end
params do
- requires :event, type: String, desc: 'The event name that should be tracked'
+ requires :event, type: String, desc: 'The event name that should be tracked',
+ documentation: { example: 'i_quickactions_page' }
end
post 'increment_unique_users', urgency: :low do
event_name = params[:event]
@@ -39,6 +55,13 @@ module API
desc 'Get a list of all metric definitions' do
detail 'This feature was introduced in GitLab 13.11.'
+ success code: 200
+ failure [
+ { code: 403, message: 'Invalid CSRF token is provided' },
+ { code: 404, message: 'Not found' }
+ ]
+ produces ['application/yaml']
+ tags %w[usage_data metrics]
end
get 'metric_definitions', urgency: :low do
content_type 'application/yaml'
diff --git a/lib/gitlab/database/gitlab_schemas.yml b/lib/gitlab/database/gitlab_schemas.yml
index 462f006fabd..53434d464ee 100644
--- a/lib/gitlab/database/gitlab_schemas.yml
+++ b/lib/gitlab/database/gitlab_schemas.yml
@@ -336,8 +336,10 @@ milestone_releases: :gitlab_main
milestones: :gitlab_main
ml_candidates: :gitlab_main
ml_experiments: :gitlab_main
+ml_experiment_metadata: :gitlab_main
ml_candidate_metrics: :gitlab_main
ml_candidate_params: :gitlab_main
+ml_candidate_metadata: :gitlab_main
namespace_admin_notes: :gitlab_main
namespace_aggregation_schedules: :gitlab_main
namespace_bans: :gitlab_main
diff --git a/lib/gitlab/graphql/extensions/forward_only_externally_paginated_array_extension.rb b/lib/gitlab/graphql/extensions/forward_only_externally_paginated_array_extension.rb
new file mode 100644
index 00000000000..651b4266756
--- /dev/null
+++ b/lib/gitlab/graphql/extensions/forward_only_externally_paginated_array_extension.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+module Gitlab
+ module Graphql
+ module Extensions
+ # This extension is meant for resolvers that only support forward looking pagination. So in order to limit
+ # confusion for allowed GraphQL pagination arguments on the field, we limit this to just `first` and `after`.
+ class ForwardOnlyExternallyPaginatedArrayExtension < ExternallyPaginatedArrayExtension
+ def apply
+ field.argument :after, GraphQL::Types::String,
+ description: "Returns the elements in the list that come after the specified cursor.",
+ required: false
+ field.argument :first, GraphQL::Types::Int,
+ description: "Returns the first _n_ elements from the list.",
+ required: false
+ end
+ end
+ end
+ end
+end
diff --git a/qa/Gemfile b/qa/Gemfile
index c9498085196..55eca70571e 100644
--- a/qa/Gemfile
+++ b/qa/Gemfile
@@ -4,7 +4,7 @@ source 'https://rubygems.org'
gem 'gitlab-qa', '~> 8', '>= 8.13.1', require: 'gitlab/qa'
gem 'activesupport', '~> 6.1.4.7' # This should stay in sync with the root's Gemfile
-gem 'allure-rspec', '~> 2.19.0'
+gem 'allure-rspec', '~> 2.20.0'
gem 'capybara', '~> 3.38.0'
gem 'capybara-screenshot', '~> 1.0.26'
gem 'rake', '~> 13', '>= 13.0.6'
diff --git a/qa/Gemfile.lock b/qa/Gemfile.lock
index 1742f6086a3..52da243550d 100644
--- a/qa/Gemfile.lock
+++ b/qa/Gemfile.lock
@@ -15,13 +15,14 @@ GEM
rack-test (>= 1.1.0, < 2.0)
rest-client (>= 2.0.2, < 3.0)
rspec (~> 3.8)
- allure-rspec (2.19.0)
- allure-ruby-commons (= 2.19.0)
+ allure-rspec (2.20.0)
+ allure-ruby-commons (= 2.20.0)
rspec-core (>= 3.8, < 4)
- allure-ruby-commons (2.19.0)
+ allure-ruby-commons (2.20.0)
mime-types (>= 3.3, < 4)
oj (>= 3.10, < 4)
require_all (>= 2, < 4)
+ rspec-expectations (~> 3.12)
uuid (>= 2.3, < 3)
ast (2.4.2)
binding_ninja (0.2.3)
@@ -300,7 +301,7 @@ PLATFORMS
DEPENDENCIES
activesupport (~> 6.1.4.7)
airborne (~> 0.3.7)
- allure-rspec (~> 2.19.0)
+ allure-rspec (~> 2.20.0)
capybara (~> 3.38.0)
capybara-screenshot (~> 1.0.26)
chemlab (~> 0.10)
diff --git a/spec/db/docs_spec.rb b/spec/db/docs_spec.rb
index 18a07a20fd0..0c7bbac64ce 100644
--- a/spec/db/docs_spec.rb
+++ b/spec/db/docs_spec.rb
@@ -2,109 +2,95 @@
require 'spec_helper'
-RSpec.describe 'Database Documentation' do
- context 'for each table' do
- # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/366834
- let(:database_base_models) { Gitlab::Database.database_base_models.select { |k, _| k != 'geo' } }
-
- let(:all_tables) do
- database_base_models.flat_map { |_, m| m.connection.tables }.sort.uniq
- end
-
- let(:metadata_required_fields) do
- %i(
- feature_categories
- table_name
- )
- end
+RSpec.shared_examples 'validate dictionary' do |objects, directory_path, required_fields|
+ context 'for each object' do
+ let(:directory_path) { directory_path }
let(:metadata_allowed_fields) do
- metadata_required_fields + %i(
+ required_fields + %i[
classes
description
introduced_by_url
milestone
gitlab_schema
- )
+ ]
end
let(:metadata) do
- all_tables.each_with_object({}) do |table_name, hash|
- next unless File.exist?(table_metadata_file_path(table_name))
+ objects.each_with_object({}) do |object_name, hash|
+ next unless File.exist?(object_metadata_file_path(object_name))
- hash[table_name] ||= load_table_metadata(table_name)
+ hash[object_name] ||= load_object_metadata(required_fields, object_name)
end
end
- let(:tables_without_metadata) do
- all_tables.reject { |t| metadata.has_key?(t) }
+ let(:objects_without_metadata) do
+ objects.reject { |t| metadata.has_key?(t) }
end
- let(:tables_without_valid_metadata) do
+ let(:objects_without_valid_metadata) do
metadata.select { |_, t| t.has_key?(:error) }.keys
end
- let(:tables_with_disallowed_fields) do
+ let(:objects_with_disallowed_fields) do
metadata.select { |_, t| t.has_key?(:disallowed_fields) }.keys
end
- let(:tables_with_missing_required_fields) do
+ let(:objects_with_missing_required_fields) do
metadata.select { |_, t| t.has_key?(:missing_required_fields) }.keys
end
it 'has a metadata file' do
- expect(tables_without_metadata).to be_empty, multiline_error(
+ expect(objects_without_metadata).to be_empty, multiline_error(
'Missing metadata files',
- tables_without_metadata.map { |t| " #{table_metadata_file(t)}" }
+ objects_without_metadata.map { |t| " #{object_metadata_file(t)}" }
)
end
it 'has a valid metadata file' do
- expect(tables_without_valid_metadata).to be_empty, table_metadata_errors(
+ expect(objects_without_valid_metadata).to be_empty, object_metadata_errors(
'Table metadata files with errors',
:error,
- tables_without_valid_metadata
+ objects_without_valid_metadata
)
end
it 'has a valid metadata file with allowed fields' do
- expect(tables_with_disallowed_fields).to be_empty, table_metadata_errors(
+ expect(objects_with_disallowed_fields).to be_empty, object_metadata_errors(
'Table metadata files with disallowed fields',
:disallowed_fields,
- tables_with_disallowed_fields
+ objects_with_disallowed_fields
)
end
it 'has a valid metadata file without missing fields' do
- expect(tables_with_missing_required_fields).to be_empty, table_metadata_errors(
+ expect(objects_with_missing_required_fields).to be_empty, object_metadata_errors(
'Table metadata files with missing fields',
:missing_required_fields,
- tables_with_missing_required_fields
+ objects_with_missing_required_fields
)
end
end
private
- def table_metadata_file(table_name)
- File.join('db', 'docs', "#{table_name}.yml")
+ def object_metadata_file(object_name)
+ File.join(directory_path, "#{object_name}.yml")
end
- def table_metadata_file_path(table_name)
- Rails.root.join(table_metadata_file(table_name))
+ def object_metadata_file_path(object_name)
+ Rails.root.join(object_metadata_file(object_name))
end
- def load_table_metadata(table_name)
+ def load_object_metadata(required_fields, object_name)
result = {}
begin
- result[:metadata] = YAML.safe_load(File.read(table_metadata_file_path(table_name))).deep_symbolize_keys
+ result[:metadata] = YAML.safe_load(File.read(object_metadata_file_path(object_name))).deep_symbolize_keys
disallowed_fields = (result[:metadata].keys - metadata_allowed_fields)
- unless disallowed_fields.empty?
- result[:disallowed_fields] = "fields not allowed: #{disallowed_fields.join(', ')}"
- end
+ result[:disallowed_fields] = "fields not allowed: #{disallowed_fields.join(', ')}" unless disallowed_fields.empty?
- missing_required_fields = (metadata_required_fields - result[:metadata].reject { |_, v| v.blank? }.keys)
+ missing_required_fields = (required_fields - result[:metadata].reject { |_, v| v.blank? }.keys)
unless missing_required_fields.empty?
result[:missing_required_fields] = "missing required fields: #{missing_required_fields.join(', ')}"
end
@@ -114,11 +100,12 @@ RSpec.describe 'Database Documentation' do
result
end
- def table_metadata_errors(title, field, tables)
- lines = tables.map do |table_name|
+ # rubocop:disable Naming/HeredocDelimiterNaming
+ def object_metadata_errors(title, field, objects)
+ lines = objects.map do |object_name|
<<~EOM
- #{table_metadata_file(table_name)}
- #{metadata[table_name][field]}
+ #{object_metadata_file(object_name)}
+ #{metadata[object_name][field]}
EOM
end
@@ -132,4 +119,23 @@ RSpec.describe 'Database Documentation' do
#{lines.join("\n")}
EOM
end
+ # rubocop:enable Naming/HeredocDelimiterNaming
+end
+
+RSpec.describe 'Views documentation', feature_category: :database do
+ database_base_models = Gitlab::Database.database_base_models.select { |k, _| k != 'geo' }
+ views = database_base_models.flat_map { |_, m| m.connection.views }.sort.uniq
+ directory_path = File.join('db', 'docs', 'views')
+ required_fields = %i[feature_categories view_name]
+
+ include_examples 'validate dictionary', views, directory_path, required_fields
+end
+
+RSpec.describe 'Tables documentation', feature_category: :database do
+ database_base_models = Gitlab::Database.database_base_models.select { |k, _| k != 'geo' }
+ tables = database_base_models.flat_map { |_, m| m.connection.tables }.sort.uniq
+ directory_path = File.join('db', 'docs')
+ required_fields = %i[feature_categories table_name]
+
+ include_examples 'validate dictionary', tables, directory_path, required_fields
end
diff --git a/spec/factories/ml/candidate_metadata.rb b/spec/factories/ml/candidate_metadata.rb
new file mode 100644
index 00000000000..e941ae4deb8
--- /dev/null
+++ b/spec/factories/ml/candidate_metadata.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ml_candidate_metadata, class: '::Ml::CandidateMetadata' do
+ association :candidate, factory: :ml_candidates
+
+ sequence(:name) { |n| "metadata_#{n}" }
+ sequence(:value) { |n| "value#{n}" }
+ end
+end
diff --git a/spec/factories/ml/candidates.rb b/spec/factories/ml/candidates.rb
index 4fbcdc46103..2daed36d777 100644
--- a/spec/factories/ml/candidates.rb
+++ b/spec/factories/ml/candidates.rb
@@ -10,5 +10,11 @@ FactoryBot.define do
candidate.params = FactoryBot.create_list(:ml_candidate_params, 2, candidate: candidate )
end
end
+
+ trait :with_metadata do
+ after(:create) do |candidate|
+ candidate.metadata = FactoryBot.create_list(:ml_candidate_metadata, 2, candidate: candidate )
+ end
+ end
end
end
diff --git a/spec/factories/ml/experiment_metadata.rb b/spec/factories/ml/experiment_metadata.rb
new file mode 100644
index 00000000000..d3ece9630a4
--- /dev/null
+++ b/spec/factories/ml/experiment_metadata.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ml_experiment_metadata, class: '::Ml::ExperimentMetadata' do
+ association :experiment, factory: :ml_experiments
+
+ sequence(:name) { |n| "metadata_#{n}" }
+ sequence(:value) { |n| "value#{n}" }
+ end
+end
diff --git a/spec/factories/ml/experiments.rb b/spec/factories/ml/experiments.rb
index e4f5a0da6cf..0acb4c5c5fc 100644
--- a/spec/factories/ml/experiments.rb
+++ b/spec/factories/ml/experiments.rb
@@ -4,6 +4,12 @@ FactoryBot.define do
sequence(:name) { |n| "experiment#{n}" }
project
- user
+ user { project&.creator }
+
+ trait :with_metadata do
+ after(:create) do |e|
+ e.metadata = FactoryBot.create_list(:ml_experiment_metadata, 2, experiment: e) # rubocop:disable StrategyInCallback
+ end
+ end
end
end
diff --git a/spec/factories/resource_milestone_event.rb b/spec/factories/resource_milestone_events.rb
index a3944e013da..a3944e013da 100644
--- a/spec/factories/resource_milestone_event.rb
+++ b/spec/factories/resource_milestone_events.rb
diff --git a/spec/factories/resource_state_event.rb b/spec/factories/resource_state_events.rb
index 926c6dd8cbc..926c6dd8cbc 100644
--- a/spec/factories/resource_state_event.rb
+++ b/spec/factories/resource_state_events.rb
diff --git a/spec/features/reportable_note/issue_spec.rb b/spec/features/reportable_note/issue_spec.rb
index 80c321d0f5a..55e7f5897bc 100644
--- a/spec/features/reportable_note/issue_spec.rb
+++ b/spec/features/reportable_note/issue_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Reportable note on issue', :js do
+RSpec.describe 'Reportable note on issue', :js, feature_category: :team_planning do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
diff --git a/spec/features/reportable_note/merge_request_spec.rb b/spec/features/reportable_note/merge_request_spec.rb
index 58a39bac707..39048495e5d 100644
--- a/spec/features/reportable_note/merge_request_spec.rb
+++ b/spec/features/reportable_note/merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Reportable note on merge request', :js do
+RSpec.describe 'Reportable note on merge request', :js, feature_category: :team_planning do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/reportable_note/snippets_spec.rb b/spec/features/reportable_note/snippets_spec.rb
index 92bf304ac86..7e8c2c2f989 100644
--- a/spec/features/reportable_note/snippets_spec.rb
+++ b/spec/features/reportable_note/snippets_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Reportable note on snippets', :js do
+RSpec.describe 'Reportable note on snippets', :js, feature_category: :team_planning do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index ee74ac84a73..14d67bac85f 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches for code', :js, :disable_rate_limiter do
+RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
diff --git a/spec/features/search/user_searches_for_comments_spec.rb b/spec/features/search/user_searches_for_comments_spec.rb
index 3c39e9f41d4..d7f6143d173 100644
--- a/spec/features/search/user_searches_for_comments_spec.rb
+++ b/spec/features/search/user_searches_for_comments_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches for comments', :js, :disable_rate_limiter do
+RSpec.describe 'User searches for comments', :js, :disable_rate_limiter, feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
let_it_be(:project) { create(:project, :repository) }
diff --git a/spec/features/search/user_searches_for_commits_spec.rb b/spec/features/search/user_searches_for_commits_spec.rb
index e5d86c27942..1fd62a01c78 100644
--- a/spec/features/search/user_searches_for_commits_spec.rb
+++ b/spec/features/search/user_searches_for_commits_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches for commits', :js, :clean_gitlab_redis_rate_limiting do
+RSpec.describe 'User searches for commits', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
index 22d48bd38f2..6ebbe86d1a9 100644
--- a/spec/features/search/user_searches_for_issues_spec.rb
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches for issues', :js, :clean_gitlab_redis_rate_limiting do
+RSpec.describe 'User searches for issues', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
index 9bbf2cf16d8..69f62a4c1e2 100644
--- a/spec/features/search/user_searches_for_merge_requests_spec.rb
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches for merge requests', :js, :clean_gitlab_redis_rate_limiting do
+RSpec.describe 'User searches for merge requests', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
let(:user) { create(:user) }
diff --git a/spec/features/search/user_searches_for_milestones_spec.rb b/spec/features/search/user_searches_for_milestones_spec.rb
index 702d4e60022..e87c2176380 100644
--- a/spec/features/search/user_searches_for_milestones_spec.rb
+++ b/spec/features/search/user_searches_for_milestones_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe 'User searches for milestones', :js, :clean_gitlab_redis_rate_limiting do
+RSpec.describe 'User searches for milestones', :js, :clean_gitlab_redis_rate_limiting,
+feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
diff --git a/spec/features/search/user_searches_for_projects_spec.rb b/spec/features/search/user_searches_for_projects_spec.rb
index 15c6224b61b..48a94161927 100644
--- a/spec/features/search/user_searches_for_projects_spec.rb
+++ b/spec/features/search/user_searches_for_projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches for projects', :js, :disable_rate_limiter do
+RSpec.describe 'User searches for projects', :js, :disable_rate_limiter, feature_category: :global_search do
let!(:project) { create(:project, :public, name: 'Shop') }
context 'when signed out' do
diff --git a/spec/features/search/user_searches_for_users_spec.rb b/spec/features/search/user_searches_for_users_spec.rb
index 1d649b42c8c..4737cef98c7 100644
--- a/spec/features/search/user_searches_for_users_spec.rb
+++ b/spec/features/search/user_searches_for_users_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User searches for users', :js, :clean_gitlab_redis_rate_limiting do
+RSpec.describe 'User searches for users', :js, :clean_gitlab_redis_rate_limiting, feature_category: :global_search do
let_it_be(:user1) { create(:user, username: 'gob_bluth', name: 'Gob Bluth') }
let_it_be(:user2) { create(:user, username: 'michael_bluth', name: 'Michael Bluth') }
let_it_be(:user3) { create(:user, username: 'gob_2018', name: 'George Oscar Bluth') }
diff --git a/spec/features/search/user_searches_for_wiki_pages_spec.rb b/spec/features/search/user_searches_for_wiki_pages_spec.rb
index 0f20ad0aa07..c7dc3e34bb7 100644
--- a/spec/features/search/user_searches_for_wiki_pages_spec.rb
+++ b/spec/features/search/user_searches_for_wiki_pages_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe 'User searches for wiki pages', :js, :clean_gitlab_redis_rate_limiting do
+RSpec.describe 'User searches for wiki pages', :js, :clean_gitlab_redis_rate_limiting,
+feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index 04f22cd2a31..334a192bec4 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User uses header search field', :js, :disable_rate_limiter do
+RSpec.describe 'User uses header search field', :js, :disable_rate_limiter, feature_category: :global_search do
include FilteredSearchHelpers
let_it_be(:project) { create(:project, :repository) }
diff --git a/spec/features/search/user_uses_search_filters_spec.rb b/spec/features/search/user_uses_search_filters_spec.rb
index 24f6c70e64c..2e3aaab563d 100644
--- a/spec/features/search/user_uses_search_filters_spec.rb
+++ b/spec/features/search/user_uses_search_filters_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User uses search filters', :js do
+RSpec.describe 'User uses search filters', :js, feature_category: :global_search do
let(:group) { create(:group) }
let!(:group_project) { create(:project, group: group) }
let(:project) { create(:project, namespace: user.namespace) }
diff --git a/spec/fixtures/api/schemas/ml/get_experiment.json b/spec/fixtures/api/schemas/ml/get_experiment.json
index 482455a89e1..3402415b962 100644
--- a/spec/fixtures/api/schemas/ml/get_experiment.json
+++ b/spec/fixtures/api/schemas/ml/get_experiment.json
@@ -29,6 +29,24 @@
"deleted"
]
}
+ },
+ "tags": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": [
+ "key",
+ "value"
+ ],
+ "properties": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "string"
+ }
+ }
+ }
}
}
}
diff --git a/spec/graphql/types/work_items/notes_filter_type_enum_spec.rb b/spec/graphql/types/work_items/notes_filter_type_enum_spec.rb
new file mode 100644
index 00000000000..13ce559c529
--- /dev/null
+++ b/spec/graphql/types/work_items/notes_filter_type_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['NotesFilterType'], feature_category: :team_planning do
+ specify { expect(described_class.graphql_name).to eq('NotesFilterType') }
+
+ it 'exposes all the existing widget type values' do
+ expect(described_class.values.transform_values(&:value)).to include(
+ "ALL_NOTES" => 0, "ONLY_ACTIVITY" => 2, "ONLY_COMMENTS" => 1
+ )
+ end
+end
diff --git a/spec/graphql/types/work_items/widget_interface_spec.rb b/spec/graphql/types/work_items/widget_interface_spec.rb
index b9e8edacf15..a2b12ed52dc 100644
--- a/spec/graphql/types/work_items/widget_interface_spec.rb
+++ b/spec/graphql/types/work_items/widget_interface_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Types::WorkItems::WidgetInterface do
WorkItems::Widgets::Hierarchy | Types::WorkItems::Widgets::HierarchyType
WorkItems::Widgets::Assignees | Types::WorkItems::Widgets::AssigneesType
WorkItems::Widgets::Labels | Types::WorkItems::Widgets::LabelsType
+ WorkItems::Widgets::Notes | Types::WorkItems::Widgets::NotesType
end
with_them do
diff --git a/spec/graphql/types/work_items/widgets/notes_type_spec.rb b/spec/graphql/types/work_items/widgets/notes_type_spec.rb
new file mode 100644
index 00000000000..3ac61a59a9c
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/notes_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::Widgets::NotesType, feature_category: :team_planning do
+ it 'exposes the expected fields' do
+ expected_fields = %i[discussions type]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/helpers/web_hooks/web_hooks_helper_spec.rb b/spec/helpers/web_hooks/web_hooks_helper_spec.rb
index d0ae898ad1d..bcd9d2df1dc 100644
--- a/spec/helpers/web_hooks/web_hooks_helper_spec.rb
+++ b/spec/helpers/web_hooks/web_hooks_helper_spec.rb
@@ -7,29 +7,16 @@ RSpec.describe WebHooks::WebHooksHelper do
let(:current_user) { nil }
let(:callout_dismissed) { false }
- let(:webhooks_failed_callout) { false }
before do
allow(helper).to receive(:current_user).and_return(current_user)
allow(helper).to receive(:web_hook_disabled_dismissed?).with(project).and_return(callout_dismissed)
-
- stub_feature_flags(
- webhooks_failed_callout: webhooks_failed_callout
- )
end
shared_context 'user is logged in' do
let(:current_user) { create(:user) }
end
- shared_context 'webhooks_failed_callout is enabled' do
- let(:webhooks_failed_callout) { true }
- end
-
- shared_context 'webhooks_failed_callout is enabled for this project' do
- let(:webhooks_failed_callout) { project }
- end
-
shared_context 'the user has permission' do
before do
project.add_maintainer(current_user)
@@ -49,7 +36,6 @@ RSpec.describe WebHooks::WebHooksHelper do
describe '#show_project_hook_failed_callout?' do
context 'all conditions are met' do
include_context 'user is logged in'
- include_context 'webhooks_failed_callout is enabled'
include_context 'the user has permission'
include_context 'a hook has failed'
@@ -74,21 +60,9 @@ RSpec.describe WebHooks::WebHooksHelper do
end
end
- context 'all conditions are met, project scoped flags' do
- include_context 'user is logged in'
- include_context 'webhooks_failed_callout is enabled for this project'
- include_context 'the user has permission'
- include_context 'a hook has failed'
-
- it 'is true' do
- expect(helper).to be_show_project_hook_failed_callout(project: project)
- end
- end
-
context 'one condition is not met' do
contexts = [
'user is logged in',
- 'webhooks_failed_callout is enabled',
'the user has permission',
'a hook has failed'
]
diff --git a/spec/models/milestone_note_spec.rb b/spec/models/milestone_note_spec.rb
index db1a7ca05f8..9371cef7540 100644
--- a/spec/models/milestone_note_spec.rb
+++ b/spec/models/milestone_note_spec.rb
@@ -20,6 +20,8 @@ RSpec.describe MilestoneNote do
it 'creates the expected note' do
expect(subject.note_html).to include('removed milestone')
expect(subject.note_html).not_to include('changed milestone to')
+ expect(subject.created_at).to eq(event.created_at)
+ expect(subject.updated_at).to eq(event.created_at)
end
end
end
diff --git a/spec/models/ml/candidate_metadata_spec.rb b/spec/models/ml/candidate_metadata_spec.rb
new file mode 100644
index 00000000000..94e21a910be
--- /dev/null
+++ b/spec/models/ml/candidate_metadata_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::CandidateMetadata, feature_category: :mlops do
+ describe 'associations' do
+ it { is_expected.to belong_to(:candidate) }
+ end
+
+ describe 'uniqueness of name' do
+ let_it_be(:metadata) { create(:ml_candidate_metadata, name: 'some_metadata') }
+ let_it_be(:candidate) { metadata.candidate }
+
+ it 'is unique within candidate' do
+ expect do
+ candidate.metadata.create!(name: 'some_metadata', value: 'blah')
+ end.to raise_error.with_message(/Name 'some_metadata' already taken/)
+ end
+ end
+end
diff --git a/spec/models/ml/candidate_spec.rb b/spec/models/ml/candidate_spec.rb
index b35496363fe..8a1e18d55c1 100644
--- a/spec/models/ml/candidate_spec.rb
+++ b/spec/models/ml/candidate_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Ml::Candidate, factory_default: :keep do
it { is_expected.to belong_to(:user) }
it { is_expected.to have_many(:params) }
it { is_expected.to have_many(:metrics) }
+ it { is_expected.to have_many(:metadata) }
end
describe '.artifact_root' do
diff --git a/spec/models/ml/experiment_metadata_spec.rb b/spec/models/ml/experiment_metadata_spec.rb
new file mode 100644
index 00000000000..e989d495a1c
--- /dev/null
+++ b/spec/models/ml/experiment_metadata_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::ExperimentMetadata, feature_category: :mlops do
+ describe 'associations' do
+ it { is_expected.to belong_to(:experiment) }
+ end
+
+ describe 'uniqueness of name' do
+ let_it_be(:metadata) { create(:ml_experiment_metadata, name: 'some_metadata') }
+ let_it_be(:experiment) { metadata.experiment }
+
+ it 'is unique within experiment' do
+ expect do
+ experiment.metadata.create!(name: 'some_metadata', value: 'blah')
+ end.to raise_error.with_message(/Name 'some_metadata' already taken/)
+ end
+ end
+end
diff --git a/spec/models/ml/experiment_spec.rb b/spec/models/ml/experiment_spec.rb
index 789bb3aa88a..52e9f9217f5 100644
--- a/spec/models/ml/experiment_spec.rb
+++ b/spec/models/ml/experiment_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Ml::Experiment do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:user) }
it { is_expected.to have_many(:candidates) }
+ it { is_expected.to have_many(:metadata) }
end
describe '#by_project_id_and_iid' do
diff --git a/spec/models/state_note_spec.rb b/spec/models/state_note_spec.rb
index e91150695b0..0afdf6bbcb9 100644
--- a/spec/models/state_note_spec.rb
+++ b/spec/models/state_note_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe StateNote do
it 'contains the expected values' do
expect(subject.author).to eq(author)
expect(subject.created_at).to eq(event.created_at)
+ expect(subject.updated_at).to eq(event.created_at)
expect(subject.note).to eq(state)
end
end
@@ -33,7 +34,8 @@ RSpec.describe StateNote do
it 'contains the expected values' do
expect(subject.author).to eq(author)
- expect(subject.created_at).to eq(subject.created_at)
+ expect(subject.created_at).to eq(event.created_at)
+ expect(subject.updated_at).to eq(event.created_at)
expect(subject.note).to eq("closed via commit #{commit.id}")
end
end
@@ -45,6 +47,7 @@ RSpec.describe StateNote do
it 'contains the expected values' do
expect(subject.author).to eq(author)
expect(subject.created_at).to eq(event.created_at)
+ expect(subject.updated_at).to eq(event.created_at)
expect(subject.note).to eq("closed via merge request !#{merge_request.iid}")
end
end
@@ -55,6 +58,7 @@ RSpec.describe StateNote do
it 'contains the expected values' do
expect(subject.author).to eq(author)
expect(subject.created_at).to eq(event.created_at)
+ expect(subject.updated_at).to eq(event.created_at)
expect(subject.note).to eq('resolved the corresponding error and closed the issue')
end
end
@@ -65,6 +69,7 @@ RSpec.describe StateNote do
it 'contains the expected values' do
expect(subject.author).to eq(author)
expect(subject.created_at).to eq(event.created_at)
+ expect(subject.updated_at).to eq(event.created_at)
expect(subject.note).to eq('automatically closed this incident because the alert resolved')
end
end
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index 1d8c5e79bf2..c69411cf877 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -70,7 +70,8 @@ RSpec.describe WorkItems::Type do
::WorkItems::Widgets::Labels,
::WorkItems::Widgets::Assignees,
::WorkItems::Widgets::StartAndDueDate,
- ::WorkItems::Widgets::Milestone
+ ::WorkItems::Widgets::Milestone,
+ ::WorkItems::Widgets::Notes
)
end
end
diff --git a/spec/models/work_items/widgets/notes_spec.rb b/spec/models/work_items/widgets/notes_spec.rb
new file mode 100644
index 00000000000..cc98f1ebe54
--- /dev/null
+++ b/spec/models/work_items/widgets/notes_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Notes, feature_category: :team_planning do
+ let_it_be(:work_item) { create(:work_item) }
+ let_it_be(:note) { create(:note, noteable: work_item, project: work_item.project) }
+
+ describe '.type' do
+ it { expect(described_class.type).to eq(:notes) }
+ end
+
+ describe '#type' do
+ it { expect(described_class.new(work_item).type).to eq(:notes) }
+ end
+
+ describe '#notes' do
+ it { expect(described_class.new(work_item).notes).to eq(work_item.notes) }
+ end
+end
diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb
index 6d20799c9ec..a42b8f93ce0 100644
--- a/spec/requests/api/graphql/project/work_items_spec.rb
+++ b/spec/requests/api/graphql/project/work_items_spec.rb
@@ -188,6 +188,60 @@ RSpec.describe 'getting a work item list for a project' do
end
end
+ describe 'fetching work item notes widget' do
+ let(:item_filter_params) { { iid: item2.iid.to_s } }
+ let(:fields) do
+ <<~GRAPHQL
+ edges {
+ node {
+ widgets {
+ type
+ ... on WorkItemWidgetNotes {
+ system: discussions(filter: ONLY_ACTIVITY, first: 10) { nodes { id notes { nodes { id system internal body } } } },
+ comments: discussions(filter: ONLY_COMMENTS, first: 10) { nodes { id notes { nodes { id system internal body } } } },
+ all_notes: discussions(filter: ALL_NOTES, first: 10) { nodes { id notes { nodes { id system internal body } } } }
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ before do
+ create_notes(item1, "some note1")
+ create_notes(item2, "some note2")
+ end
+
+ shared_examples 'fetches work item notes' do |user_comments_count:, system_notes_count:|
+ it "fetches notes" do
+ post_graphql(query, current_user: current_user)
+
+ all_widgets = graphql_dig_at(items_data, :node, :widgets)
+ notes_widget = all_widgets.find { |x| x["type"] == "NOTES" }
+
+ all_notes = graphql_dig_at(notes_widget["all_notes"], :nodes)
+ system_notes = graphql_dig_at(notes_widget["system"], :nodes)
+ comments = graphql_dig_at(notes_widget["comments"], :nodes)
+
+ expect(comments.count).to eq(user_comments_count)
+ expect(system_notes.count).to eq(system_notes_count)
+ expect(all_notes.count).to eq(user_comments_count + system_notes_count)
+ end
+ end
+
+ context 'when user has permission to view internal notes' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it_behaves_like 'fetches work item notes', user_comments_count: 2, system_notes_count: 5
+ end
+
+ context 'when user cannot view internal notes' do
+ it_behaves_like 'fetches work item notes', user_comments_count: 1, system_notes_count: 5
+ end
+ end
+
def item_ids
graphql_dig_at(items_data, :node, :id)
end
@@ -199,4 +253,26 @@ RSpec.describe 'getting a work item list for a project' do
query_graphql_field('workItems', params, fields)
)
end
+
+ def create_notes(work_item, note_body)
+ create(:note, system: true, project: work_item.project, noteable: work_item)
+
+ disc_start = create(:discussion_note_on_issue, noteable: work_item, project: work_item.project, note: note_body)
+ create(:note,
+ discussion_id: disc_start.discussion_id, noteable: work_item,
+ project: work_item.project, note: "reply on #{note_body}")
+
+ create(:resource_label_event, user: current_user, issue: work_item, label: label1, action: 'add')
+ create(:resource_label_event, user: current_user, issue: work_item, label: label1, action: 'remove')
+
+ create(:resource_milestone_event, issue: work_item, milestone: milestone1, action: 'add')
+ create(:resource_milestone_event, issue: work_item, milestone: milestone1, action: 'remove')
+
+ # confidential notes are currently available only on issues and epics
+ conf_disc_start = create(:discussion_note_on_issue, :confidential,
+ noteable: work_item, project: work_item.project, note: "confidential #{note_body}")
+ create(:note, :confidential,
+ discussion_id: conf_disc_start.discussion_id, noteable: work_item,
+ project: work_item.project, note: "reply on confidential #{note_body}")
+ end
end
diff --git a/spec/requests/api/ml/mlflow_spec.rb b/spec/requests/api/ml/mlflow_spec.rb
index 9448f009742..9538d2b5e8d 100644
--- a/spec/requests/api/ml/mlflow_spec.rb
+++ b/spec/requests/api/ml/mlflow_spec.rb
@@ -12,12 +12,13 @@ RSpec.describe API::Ml::Mlflow do
let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
let_it_be(:another_project) { build(:project).tap { |p| p.add_developer(developer) } }
let_it_be(:experiment) do
- create(:ml_experiments, user: project.creator, project: project)
+ create(:ml_experiments, :with_metadata, project: project)
end
let_it_be(:candidate) do
create(:ml_candidates,
- :with_metrics_and_params, user: experiment.user, start_time: 1234, experiment: experiment)
+ :with_metrics_and_params, :with_metadata,
+ user: experiment.user, start_time: 1234, experiment: experiment)
end
let_it_be(:tokens) do
@@ -151,7 +152,17 @@ RSpec.describe API::Ml::Mlflow do
'experiment_id' => experiment_iid,
'name' => experiment.name,
'lifecycle_stage' => 'active',
- 'artifact_location' => 'not_implemented'
+ 'artifact_location' => 'not_implemented',
+ 'tags' => [
+ {
+ 'key' => experiment.metadata[0].name,
+ 'value' => experiment.metadata[0].value
+ },
+ {
+ 'key' => experiment.metadata[1].name,
+ 'value' => experiment.metadata[1].value
+ }
+ ]
}
})
end
@@ -187,7 +198,17 @@ RSpec.describe API::Ml::Mlflow do
'experiment_id' => experiment.iid.to_s,
'name' => experiment.name,
'lifecycle_stage' => 'active',
- 'artifact_location' => 'not_implemented'
+ 'artifact_location' => 'not_implemented',
+ 'tags' => [
+ {
+ 'key' => experiment.metadata[0].name,
+ 'value' => experiment.metadata[0].value
+ },
+ {
+ 'key' => experiment.metadata[1].name,
+ 'value' => experiment.metadata[1].value
+ }
+ ]
]
})
end
@@ -220,7 +241,17 @@ RSpec.describe API::Ml::Mlflow do
'experiment_id' => experiment.iid.to_s,
'name' => experiment_name,
'lifecycle_stage' => 'active',
- 'artifact_location' => 'not_implemented'
+ 'artifact_location' => 'not_implemented',
+ 'tags' => [
+ {
+ 'key' => experiment.metadata[0].name,
+ 'value' => experiment.metadata[0].value
+ },
+ {
+ 'key' => experiment.metadata[1].name,
+ 'value' => experiment.metadata[1].value
+ }
+ ]
}
})
end
@@ -284,10 +315,44 @@ RSpec.describe API::Ml::Mlflow do
end
end
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/set-experiment-tag' do
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/set-experiment-tag" }
+ let(:default_params) { { experiment_id: experiment.iid.to_s, key: 'some_key', value: 'value' } }
+ let(:params) { default_params }
+ let(:request) { post api(route), params: params, headers: headers }
+
+ it 'logs the tag', :aggregate_failures do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_empty
+ expect(experiment.reload.metadata.map(&:name)).to include('some_key')
+ end
+
+ describe 'Error Cases' do
+ context 'when tag was already set' do
+ let(:params) { default_params.merge(key: experiment.metadata[0].name) }
+
+ it_behaves_like 'Bad Request'
+ end
+
+ it_behaves_like 'shared error cases'
+ it_behaves_like 'Requires api scope'
+ it_behaves_like 'Bad Request on missing required', [:key, :value]
+ end
+ end
+
describe 'Runs' do
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/create' do
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/create" }
- let(:params) { { experiment_id: experiment.iid.to_s, start_time: Time.now.to_i } }
+ let(:params) do
+ {
+ experiment_id: experiment.iid.to_s,
+ start_time: Time.now.to_i,
+ tags: [
+ { key: 'hello', value: 'world' }
+ ]
+ }
+ end
+
let(:request) { post api(route), params: params, headers: headers }
it 'creates the run', :aggregate_failures do
@@ -295,14 +360,18 @@ RSpec.describe API::Ml::Mlflow do
'experiment_id' => params[:experiment_id],
'user_id' => current_user.id.to_s,
'start_time' => params[:start_time],
- 'status' => "RUNNING",
- 'lifecycle_stage' => "active"
+ 'status' => 'RUNNING',
+ 'lifecycle_stage' => 'active'
}
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('ml/run')
expect(json_response['run']).to include('info' => hash_including(**expected_properties),
- 'data' => { 'metrics' => [], 'params' => [] })
+ 'data' => {
+ 'metrics' => [],
+ 'params' => [],
+ 'tags' => [{ 'key' => 'hello', 'value' => 'world' }]
+ })
end
describe 'Error States' do
@@ -355,6 +424,10 @@ RSpec.describe API::Ml::Mlflow do
'params' => [
{ 'key' => candidate.params[0].name, 'value' => candidate.params[0].value },
{ 'key' => candidate.params[1].name, 'value' => candidate.params[1].value }
+ ],
+ 'tags' => [
+ { 'key' => 'metadata_1', 'value' => 'value1' },
+ { 'key' => 'metadata_2', 'value' => 'value2' }
]
})
end
@@ -454,6 +527,31 @@ RSpec.describe API::Ml::Mlflow do
end
end
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/set-tag' do
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/set-tag" }
+ let(:default_params) { { run_id: candidate.iid.to_s, key: 'some_key', value: 'value' } }
+ let(:request) { post api(route), params: params, headers: headers }
+
+ it 'logs the tag', :aggregate_failures do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_empty
+ expect(candidate.reload.metadata.map(&:name)).to include('some_key')
+ end
+
+ describe 'Error Cases' do
+ context 'when tag was already logged' do
+ let(:params) { default_params.tap { |p| p[:key] = candidate.metadata[0].name } }
+
+ it_behaves_like 'Bad Request'
+ end
+
+ it_behaves_like 'shared error cases'
+ it_behaves_like 'Requires api scope'
+ it_behaves_like 'run_id param error cases'
+ it_behaves_like 'Bad Request on missing required', [:key, :value]
+ end
+ end
+
describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-batch' do
let(:candidate2) do
create(:ml_candidates, user: experiment.user, start_time: 1234, experiment: experiment)
@@ -467,7 +565,8 @@ RSpec.describe API::Ml::Mlflow do
{ key: 'mae', value: 2.5, timestamp: 1552550804 },
{ key: 'rmse', value: 2.7, timestamp: 1552550804 }
],
- params: [{ key: 'model_class', value: 'LogisticRegression' }]
+ params: [{ key: 'model_class', value: 'LogisticRegression' }],
+ tags: [{ key: 'tag1', value: 'tag.value.1' }]
}
end
@@ -477,6 +576,7 @@ RSpec.describe API::Ml::Mlflow do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_empty
expect(candidate2.params.size).to eq(1)
+ expect(candidate2.metadata.size).to eq(1)
expect(candidate2.metrics.size).to eq(2)
end
@@ -493,6 +593,19 @@ RSpec.describe API::Ml::Mlflow do
end
end
+ context 'when tag was already logged' do
+ let(:params) do
+ default_params.tap { |p| p[:tags] = [{ key: 'tag1', value: 'a' }, { key: 'tag1', value: 'b' }] }
+ end
+
+ it 'logs only 1', :aggregate_failures do
+ candidate.metadata.reload
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(candidate2.metadata.size).to eq(1)
+ end
+ end
+
describe 'Error Cases' do
context 'when required metric key is missing' do
let(:params) { default_params.tap { |p| p[:metrics] = [p[:metrics][0].delete(:key)] } }
diff --git a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
index 8002b2ebc86..ff3b295d185 100644
--- a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
@@ -31,13 +31,17 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
end
describe '#create!' do
- subject { repository.create!(experiment, 1234) }
+ subject { repository.create!(experiment, 1234, [{ key: 'hello', value: 'world' }]) }
it 'creates the candidate' do
expect(subject.start_time).to eq(1234)
expect(subject.iid).not_to be_nil
expect(subject.end_time).to be_nil
end
+
+ it 'creates with tag' do
+ expect(subject.metadata.length).to eq(1)
+ end
end
describe '#update' do
@@ -118,6 +122,32 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
end
end
+ describe '#add_tag!' do
+ let(:props) { { name: 'abc', value: 'def' } }
+
+ subject { repository.add_tag!(candidate, props[:name], props[:value]) }
+
+ it 'adds a new tag' do
+ expect { subject }.to change { candidate.reload.metadata.size }.by(1)
+ end
+
+ context 'when name missing' do
+ let(:props) { { value: 1234 } }
+
+ it 'throws RecordInvalid' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
+ context 'when tag was already added' do
+ it 'throws RecordInvalid' do
+ repository.add_tag!(candidate, 'new', props[:value])
+
+ expect { repository.add_tag!(candidate, 'new', props[:value]) }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+
describe "#add_params" do
let(:params) do
[{ key: 'model_class', value: 'LogisticRegression' }, { 'key': 'pythonEnv', value: '3.10' }]
@@ -196,4 +226,50 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
end
end
end
+
+ describe "#add_tags" do
+ let(:tags) do
+ [{ key: 'gitlab.tag1', value: 'hello' }, { 'key': 'gitlab.tag2', value: 'world' }]
+ end
+
+ subject { repository.add_tags(candidate, tags) }
+
+ it 'adds the tags' do
+ expect { subject }.to change { candidate.reload.metadata.size }.by(2)
+ end
+
+ context 'if tags misses key' do
+ let(:tags) { [{ value: 'hello' }] }
+
+ it 'does throw and does not add' do
+ expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
+ end
+ end
+
+ context 'if tag misses value' do
+ let(:tags) { [{ key: 'gitlab.tag1' }] }
+
+ it 'does throw and does not add' do
+ expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
+ end
+ end
+
+ context 'if tag repeated' do
+ let(:params) do
+ [
+ { 'key': 'gitlab.tag1', value: 'hello' },
+ { 'key': 'gitlab.tag2', value: 'world' },
+ { 'key': 'gitlab.tag1', value: 'gitlab' }
+ ]
+ end
+
+ before do
+ repository.add_tag!(candidate, 'gitlab.tag2', '0')
+ end
+
+ it 'does not throw and adds only the first of each kind' do
+ expect { subject }.to change { candidate.reload.metadata.size }.by(1)
+ end
+ end
+ end
end
diff --git a/spec/services/ml/experiment_tracking/experiment_repository_spec.rb b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
index 80e1fa025d1..c3c716b831a 100644
--- a/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
@@ -59,10 +59,11 @@ RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository do
describe '#create!' do
let(:name) { 'hello' }
+ let(:tags) { nil }
- subject { repository.create!(name) }
+ subject { repository.create!(name, tags) }
- it 'creates the candidate' do
+ it 'creates the experiment' do
expect { subject }.to change { repository.all.size }.by(1)
end
@@ -74,6 +75,14 @@ RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository do
end
end
+ context 'when has tags' do
+ let(:tags) { [{ key: 'hello', value: 'world' }] }
+
+ it 'creates the experiment with tag' do
+ expect(subject.metadata.length).to eq(1)
+ end
+ end
+
context 'when name is missing' do
let(:name) { nil }
@@ -82,4 +91,30 @@ RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository do
end
end
end
+
+ describe '#add_tag!' do
+ let(:props) { { name: 'abc', value: 'def' } }
+
+ subject { repository.add_tag!(experiment, props[:name], props[:value]) }
+
+ it 'adds a new tag' do
+ expect { subject }.to change { experiment.reload.metadata.size }.by(1)
+ end
+
+ context 'when name missing' do
+ let(:props) { { value: 1234 } }
+
+ it 'throws RecordInvalid' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
+ context 'when tag was already added' do
+ it 'throws RecordInvalid' do
+ repository.add_tag!(experiment, 'new', props[:value])
+
+ expect { repository.add_tag!(experiment, 'new', props[:value]) }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/models/label_note_shared_examples.rb b/spec/support/shared_examples/models/label_note_shared_examples.rb
index f61007f57fd..3facd533d7a 100644
--- a/spec/support/shared_examples/models/label_note_shared_examples.rb
+++ b/spec/support/shared_examples/models/label_note_shared_examples.rb
@@ -30,6 +30,8 @@ RSpec.shared_examples 'label note created from events' do
expect(note.noteable).to eq event.issuable
expect(note.note).to be_present
expect(note.note_html).to be_present
+ expect(note.created_at).to eq create_event.created_at
+ expect(note.updated_at).to eq create_event.created_at
end
it 'updates markdown cache if reference is not set yet' do