Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-04-21 18:21:10 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-04-21 18:21:10 +0300
commite33f87ac0fabaab468ce4b457996cc0f1b1bb648 (patch)
tree8bf0de72a9acac014cfdaddab7d463b208294af2 /spec
parent5baf990db20a75078684702782c24399ef9eb0fa (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/config/smime_signature_settings_spec.rb9
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb9
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb24
-rw-r--r--spec/controllers/admin/users_controller_spec.rb11
-rw-r--r--spec/controllers/dashboard/projects_controller_spec.rb51
-rw-r--r--spec/controllers/groups/settings/ci_cd_controller_spec.rb84
-rw-r--r--spec/controllers/groups/settings/integrations_controller_spec.rb7
-rw-r--r--spec/controllers/groups/settings/repository_controller_spec.rb98
-rw-r--r--spec/controllers/projects/import/jira_controller_spec.rb12
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb93
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb176
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb27
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb38
-rw-r--r--spec/factories/ci/bridge.rb16
-rw-r--r--spec/factories/ci/builds.rb12
-rw-r--r--spec/factories/ci/job_artifacts.rb20
-rw-r--r--spec/factories/ci/pipelines.rb16
-rw-r--r--spec/factories/import_failures.rb23
-rw-r--r--spec/factories/merge_requests.rb12
-rw-r--r--spec/factories/projects.rb4
-rw-r--r--spec/factories/terraform/state.rb4
-rw-r--r--spec/factories/users.rb9
-rw-r--r--spec/features/admin/admin_mode/workers_spec.rb8
-rw-r--r--spec/features/cycle_analytics_spec.rb12
-rw-r--r--spec/features/dashboard/snippets_spec.rb41
-rw-r--r--spec/features/groups/settings/ci_cd_spec.rb13
-rw-r--r--spec/features/groups/settings/repository_spec.rb28
-rw-r--r--spec/features/issues/csv_spec.rb100
-rw-r--r--spec/features/issues/spam_issues_spec.rb121
-rw-r--r--spec/features/merge_request/user_resolves_wip_mr_spec.rb48
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb28
-rw-r--r--spec/features/projects/settings/ci_cd_settings_spec.rb2
-rw-r--r--spec/features/projects/snippets/create_snippet_spec.rb1
-rw-r--r--spec/features/projects/snippets/user_updates_snippet_spec.rb2
-rw-r--r--spec/features/snippets/spam_snippets_spec.rb52
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb1
-rw-r--r--spec/features/snippets/user_edits_snippet_spec.rb2
-rw-r--r--spec/finders/autocomplete/move_to_project_finder_spec.rb13
-rw-r--r--spec/finders/members_finder_spec.rb4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/snippets.json3
-rw-r--r--spec/fixtures/lib/elasticsearch/query.json2
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_container.json2
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_cursor.json2
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_end_time.json2
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_filebeat_6.json40
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_search.json2
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_start_time.json2
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_times.json2
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/development_metrics.yml20
-rw-r--r--spec/fixtures/terraform/tfplan.json1
-rw-r--r--spec/fixtures/terraform/tfplan_with_corrupted_data.json1
-rw-r--r--spec/fixtures/x509/ZZZZZZA6.crlbin0 -> 205280 bytes
-rw-r--r--spec/frontend/alert_management/components/alert_management_list_spec.js38
-rw-r--r--spec/frontend/blob/components/blob_edit_content_spec.js2
-rw-r--r--spec/frontend/ci_variable_list/components/ci_key_field_spec.js244
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js153
-rw-r--r--spec/frontend/clusters/components/applications_spec.js33
-rw-r--r--spec/frontend/clusters/components/fluentd_output_settings_spec.js158
-rw-r--r--spec/frontend/clusters/services/mock_data.js1
-rw-r--r--spec/frontend/clusters/stores/clusters_store_spec.js16
-rw-r--r--spec/frontend/diffs/components/diff_table_cell_spec.js15
-rw-r--r--spec/frontend/diffs/store/getters_versions_dropdowns_spec.js99
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js67
-rw-r--r--spec/frontend/helpers/dom_events_helper.js10
-rw-r--r--spec/frontend/jira_import/components/jira_import_app_spec.js207
-rw-r--r--spec/frontend/jira_import/components/jira_import_form_spec.js134
-rw-r--r--spec/frontend/jira_import/components/jira_import_progress_spec.js70
-rw-r--r--spec/frontend/jira_import/components/jira_import_setup_spec.js17
-rw-r--r--spec/frontend/jira_import/utils_spec.js27
-rw-r--r--spec/frontend/monitoring/__snapshots__/alert_widget_spec.js.snap43
-rw-r--r--spec/frontend/monitoring/alert_widget_spec.js422
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap169
-rw-r--r--spec/frontend/monitoring/components/alert_widget_form_spec.js188
-rw-r--r--spec/frontend/monitoring/components/charts/annotations_spec.js11
-rw-r--r--spec/frontend/monitoring/components/charts/options_spec.js29
-rw-r--r--spec/frontend/monitoring/components/charts/single_stat_spec.js14
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js40
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js78
-rw-r--r--spec/frontend/monitoring/components/embeds/metric_embed_spec.js2
-rw-r--r--spec/frontend/monitoring/components/panel_type_spec.js164
-rw-r--r--spec/frontend/monitoring/components/panel_type_with_alerts_spec.js73
-rw-r--r--spec/frontend/monitoring/fixture_data.js24
-rw-r--r--spec/frontend/monitoring/mock_data.js99
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js33
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js31
-rw-r--r--spec/frontend/monitoring/stubs/modal_stub.js11
-rw-r--r--spec/frontend/monitoring/utils_spec.js13
-rw-r--r--spec/frontend/monitoring/validators_spec.js80
-rw-r--r--spec/frontend/notes/components/note_header_spec.js67
-rw-r--r--spec/frontend/notes/mock_data.js1
-rw-r--r--spec/frontend/notes/stores/actions_spec.js29
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js36
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js31
-rw-r--r--spec/frontend/pipelines/graph/action_component_spec.js9
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js305
-rw-r--r--spec/frontend/pipelines/graph/job_group_dropdown_spec.js (renamed from spec/javascripts/pipelines/graph/job_group_dropdown_spec.js)19
-rw-r--r--spec/frontend/pipelines/graph/job_item_spec.js8
-rw-r--r--spec/frontend/pipelines/graph/job_name_component_spec.js36
-rw-r--r--spec/frontend/pipelines/graph/linked_pipeline_spec.js24
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_column_spec.js38
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_mock_data.js4084
-rw-r--r--spec/frontend/pipelines/graph/mock_data.js (renamed from spec/javascripts/pipelines/graph/mock_data.js)0
-rw-r--r--spec/frontend/pipelines/graph/stage_column_component_spec.js136
-rw-r--r--spec/frontend/pipelines/test_reports/test_summary_table_spec.js36
-rw-r--r--spec/frontend/registry/explorer/pages/list_spec.js49
-rw-r--r--spec/frontend/registry/explorer/stores/actions_spec.js29
-rw-r--r--spec/frontend/registry/explorer/stores/mutations_spec.js22
-rw-r--r--spec/frontend/releases/components/release_block_spec.js13
-rw-r--r--spec/frontend/reports/accessibility_report/mock_data.js86
-rw-r--r--spec/frontend/reports/accessibility_report/store/actions_spec.js111
-rw-r--r--spec/frontend/reports/accessibility_report/store/mutations_spec.js61
-rw-r--r--spec/frontend/reports/accessibility_report/store/utils_spec.js35
-rw-r--r--spec/frontend/snippet/snippet_edit_spec.js45
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap1
-rw-r--r--spec/frontend/snippets/components/edit_spec.js295
-rw-r--r--spec/frontend/static_site_editor/components/saved_changes_message_spec.js21
-rw-r--r--spec/frontend/static_site_editor/components/static_site_editor_spec.js26
-rw-r--r--spec/frontend/static_site_editor/mock_data.js4
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap287
-rw-r--r--spec/frontend/vue_shared/components/awards_list_spec.js213
-rw-r--r--spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap4
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js73
-rw-r--r--spec/graphql/resolvers/board_lists_resolver_spec.rb82
-rw-r--r--spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb60
-rw-r--r--spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb8
-rw-r--r--spec/graphql/types/board_list_type_spec.rb13
-rw-r--r--spec/graphql/types/metrics/dashboard_type_spec.rb11
-rw-r--r--spec/graphql/types/metrics/dashboards/annotation_type_spec.rb17
-rw-r--r--spec/graphql/types/user_type_spec.rb2
-rw-r--r--spec/haml_lint/linter/no_plain_nodes_spec.rb38
-rw-r--r--spec/helpers/environments_helper_spec.rb18
-rw-r--r--spec/helpers/projects/alert_management_helper_spec.rb27
-rw-r--r--spec/helpers/snippets_helper_spec.rb31
-rw-r--r--spec/initializers/lograge_spec.rb4
-rw-r--r--spec/javascripts/pipelines/graph/graph_component_spec.js274
-rw-r--r--spec/javascripts/pipelines/graph/job_name_component_spec.js27
-rw-r--r--spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js43
-rw-r--r--spec/javascripts/pipelines/graph/linked_pipelines_mock_data.js3
-rw-r--r--spec/javascripts/pipelines/graph/stage_column_component_spec.js122
-rw-r--r--spec/javascripts/reports/components/grouped_test_reports_app_spec.js21
-rw-r--r--spec/javascripts/reports/mock_data/mock_data.js18
-rw-r--r--spec/lib/api/entities/project_import_failed_relation_spec.rb23
-rw-r--r--spec/lib/api/entities/project_import_status_spec.rb49
-rw-r--r--spec/lib/api/entities/snippet_spec.rb118
-rw-r--r--spec/lib/api/entities/user_spec.rb26
-rw-r--r--spec/lib/api/validations/validators/limit_spec.rb25
-rw-r--r--spec/lib/banzai/pipeline_spec.rb64
-rw-r--r--spec/lib/csv_builder_spec.rb109
-rw-r--r--spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb2
-rw-r--r--spec/lib/gitlab/chat/responder/mattermost_spec.rb117
-rw-r--r--spec/lib/gitlab/ci/config/entry/artifacts_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/jwt_spec.rb124
-rw-r--r--spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb51
-rw-r--r--spec/lib/gitlab/ci/parsers/test/junit_spec.rb60
-rw-r--r--spec/lib/gitlab/ci/parsers_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/reports/terraform_reports_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/reports/test_reports_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/status/bridge/factory_spec.rb72
-rw-r--r--spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb24
-rw-r--r--spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb90
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb43
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb52
-rw-r--r--spec/lib/gitlab/elasticsearch/logs/lines_spec.rb8
-rw-r--r--spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb23
-rw-r--r--spec/lib/gitlab/email/smime/certificate_spec.rb55
-rw-r--r--spec/lib/gitlab/email/smime/signer_spec.rb35
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb53
-rw-r--r--spec/lib/gitlab/file_hook_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb9
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml1
-rw-r--r--spec/lib/gitlab/import_export/project/import_task_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml3
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb10
-rw-r--r--spec/lib/gitlab/jira_import/labels_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/json_spec.rb91
-rw-r--r--spec/lib/gitlab/kubernetes/helm/base_command_spec.rb52
-rw-r--r--spec/lib/gitlab/kubernetes/helm/init_command_spec.rb52
-rw-r--r--spec/lib/gitlab/kubernetes/helm/install_command_spec.rb16
-rw-r--r--spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb16
-rw-r--r--spec/lib/gitlab/project_template_spec.rb1
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb18
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb13
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb13
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb47
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb4
-rw-r--r--spec/lib/gitlab/utils_spec.rb18
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb86
-rw-r--r--spec/lib/marginalia_spec.rb30
-rw-r--r--spec/lib/rspec_flaky/flaky_example_spec.rb2
-rw-r--r--spec/mailers/emails/issues_spec.rb49
-rw-r--r--spec/migrations/cleanup_empty_commit_user_mentions_spec.rb2
-rw-r--r--spec/migrations/cleanup_optimistic_locking_nulls_pt2_spec.rb44
-rw-r--r--spec/migrations/cleanup_optimistic_locking_nulls_spec.rb9
-rw-r--r--spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb2
-rw-r--r--spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb14
-rw-r--r--spec/models/ci/build_spec.rb68
-rw-r--r--spec/models/ci/job_artifact_spec.rb16
-rw-r--r--spec/models/ci/pipeline_spec.rb10
-rw-r--r--spec/models/ci/processable_spec.rb161
-rw-r--r--spec/models/ci/runner_spec.rb30
-rw-r--r--spec/models/clusters/applications/elastic_stack_spec.rb17
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb6
-rw-r--r--spec/models/concerns/issuable_spec.rb34
-rw-r--r--spec/models/concerns/spammable_spec.rb91
-rw-r--r--spec/models/cycle_analytics/group_level_spec.rb2
-rw-r--r--spec/models/import_failure_spec.rb23
-rw-r--r--spec/models/jira_import_state_spec.rb20
-rw-r--r--spec/models/merge_request_diff_spec.rb39
-rw-r--r--spec/models/merge_request_spec.rb22
-rw-r--r--spec/models/namespace/root_storage_size_spec.rb85
-rw-r--r--spec/models/project_feature_spec.rb4
-rw-r--r--spec/models/project_import_state_spec.rb21
-rw-r--r--spec/models/project_services/jira_service_spec.rb20
-rw-r--r--spec/models/project_services/mattermost_slash_commands_service_spec.rb7
-rw-r--r--spec/models/project_spec.rb12
-rw-r--r--spec/models/resource_milestone_event_spec.rb26
-rw-r--r--spec/models/terraform/state_spec.rb25
-rw-r--r--spec/models/user_spec.rb44
-rw-r--r--spec/models/user_type_enums_spec.rb13
-rw-r--r--spec/policies/global_policy_spec.rb33
-rw-r--r--spec/requests/api/deploy_tokens_spec.rb7
-rw-r--r--spec/requests/api/graphql/boards/board_lists_query_spec.rb137
-rw-r--r--spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb109
-rw-r--r--spec/requests/api/graphql/mutations/jira_import/start_spec.rb14
-rw-r--r--spec/requests/api/graphql/project/jira_import_spec.rb2
-rw-r--r--spec/requests/api/internal/base_spec.rb118
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb2
-rw-r--r--spec/requests/api/issues/put_projects_issues_spec.rb9
-rw-r--r--spec/requests/api/markdown_spec.rb2
-rw-r--r--spec/requests/api/merge_requests_spec.rb16
-rw-r--r--spec/requests/api/project_milestones_spec.rb8
-rw-r--r--spec/requests/api/project_snippets_spec.rb11
-rw-r--r--spec/requests/api/project_statistics_spec.rb8
-rw-r--r--spec/requests/api/projects_spec.rb4
-rw-r--r--spec/requests/api/runners_spec.rb67
-rw-r--r--spec/requests/api/snippets_spec.rb37
-rw-r--r--spec/requests/api/terraform/state_spec.rb238
-rw-r--r--spec/routing/openid_connect_spec.rb5
-rw-r--r--spec/rubocop/cop/migration/add_limit_to_string_columns_spec.rb268
-rw-r--r--spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb160
-rw-r--r--spec/rubocop/cop/migration/prevent_strings_spec.rb143
-rw-r--r--spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb39
-rw-r--r--spec/rubocop/cop/static_translation_definition_spec.rb109
-rw-r--r--spec/serializers/analytics_summary_serializer_spec.rb5
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb26
-rw-r--r--spec/serializers/merge_request_serializer_spec.rb16
-rw-r--r--spec/serializers/test_suite_entity_spec.rb50
-rw-r--r--spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb22
-rw-r--r--spec/services/auto_merge_service_spec.rb69
-rw-r--r--spec/services/ci/compare_test_reports_service_spec.rb7
-rw-r--r--spec/services/ci/create_cross_project_pipeline_service_spec.rb40
-rw-r--r--spec/services/ci/generate_terraform_reports_service_spec.rb71
-rw-r--r--spec/services/ci/update_runner_service_spec.rb13
-rw-r--r--spec/services/emails/destroy_service_spec.rb5
-rw-r--r--spec/services/git/process_ref_changes_service_spec.rb43
-rw-r--r--spec/services/issues/create_service_spec.rb55
-rw-r--r--spec/services/issues/export_csv_service_spec.rb170
-rw-r--r--spec/services/jira_import/start_import_service_spec.rb45
-rw-r--r--spec/services/merge_requests/merge_orchestration_service_spec.rb116
-rw-r--r--spec/services/merge_requests/pushed_branches_service_spec.rb42
-rw-r--r--spec/services/merge_requests/update_service_spec.rb14
-rw-r--r--spec/services/metrics/dashboard/transient_embed_service_spec.rb50
-rw-r--r--spec/services/namespaces/check_storage_size_service_spec.rb101
-rw-r--r--spec/services/personal_access_tokens/create_service_spec.rb24
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb2
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb29
-rw-r--r--spec/services/resources/create_access_token_service_spec.rb163
-rw-r--r--spec/services/snippets/create_service_spec.rb37
-rw-r--r--spec/services/snippets/update_service_spec.rb31
-rw-r--r--spec/services/spam/spam_action_service_spec.rb (renamed from spec/services/spam/spam_check_service_spec.rb)37
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb65
-rw-r--r--spec/services/terraform/remote_state_handler_spec.rb143
-rw-r--r--spec/services/users/build_service_spec.rb20
-rw-r--r--spec/services/x509_certificate_revoke_service_spec.rb2
-rw-r--r--spec/spec_helper.rb17
-rw-r--r--spec/support/helpers/api_helpers.rb11
-rw-r--r--spec/support/helpers/smime_helper.rb14
-rw-r--r--spec/support/import_export/configuration_helper.rb4
-rw-r--r--spec/support/matchers/exclude_matcher.rb3
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb2
-rw-r--r--spec/support/shared_contexts/spam_constants.rb7
-rw-r--r--spec/support/shared_examples/controllers/deploy_token_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/features/deploy_token_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/graphql/jira_import/jira_import_resolver_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/requests/snippet_shared_examples.rb58
-rw-r--r--spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/workers/gitlab/jira_import/jira_import_workers_shared_examples.rb2
-rw-r--r--spec/support/sidekiq_middleware.rb16
-rw-r--r--spec/uploaders/terraform/state_uploader_spec.rb6
-rw-r--r--spec/views/shared/projects/_project.html.haml_spec.rb2
-rw-r--r--spec/workers/create_commit_signature_worker_spec.rb59
-rw-r--r--spec/workers/expire_pipeline_cache_worker_spec.rb8
-rw-r--r--spec/workers/export_csv_worker_spec.rb34
-rw-r--r--spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb22
-rw-r--r--spec/workers/gitlab/jira_import/stage/import_attachments_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/jira_import/stage/import_notes_worker_spec.rb4
-rw-r--r--spec/workers/post_receive_spec.rb3
-rw-r--r--spec/workers/stage_update_worker_spec.rb9
-rw-r--r--spec/workers/x509_issuer_crl_check_worker_spec.rb90
309 files changed, 14610 insertions, 2836 deletions
diff --git a/spec/config/smime_signature_settings_spec.rb b/spec/config/smime_signature_settings_spec.rb
index 4f076a92b16..7e7b42b129a 100644
--- a/spec/config/smime_signature_settings_spec.rb
+++ b/spec/config/smime_signature_settings_spec.rb
@@ -6,6 +6,7 @@ describe SmimeSignatureSettings do
describe '.parse' do
let(:default_smime_key) { Rails.root.join('.gitlab_smime_key') }
let(:default_smime_cert) { Rails.root.join('.gitlab_smime_cert') }
+ let(:default_smime_ca_certs) { nil }
it 'sets correct default values to disabled' do
parsed_settings = described_class.parse(nil)
@@ -13,6 +14,7 @@ describe SmimeSignatureSettings do
expect(parsed_settings['enabled']).to be(false)
expect(parsed_settings['key_file']).to eq(default_smime_key)
expect(parsed_settings['cert_file']).to eq(default_smime_cert)
+ expect(parsed_settings['ca_certs_file']).to eq(default_smime_ca_certs)
end
context 'when providing custom values' do
@@ -24,6 +26,7 @@ describe SmimeSignatureSettings do
expect(parsed_settings['enabled']).to be(false)
expect(parsed_settings['key_file']).to eq(default_smime_key)
expect(parsed_settings['cert_file']).to eq(default_smime_cert)
+ expect(parsed_settings['ca_certs_file']).to eq(default_smime_ca_certs)
end
it 'enables smime with default key and cert' do
@@ -36,15 +39,18 @@ describe SmimeSignatureSettings do
expect(parsed_settings['enabled']).to be(true)
expect(parsed_settings['key_file']).to eq(default_smime_key)
expect(parsed_settings['cert_file']).to eq(default_smime_cert)
+ expect(parsed_settings['ca_certs_file']).to eq(default_smime_ca_certs)
end
it 'enables smime with custom key and cert' do
custom_key = '/custom/key'
custom_cert = '/custom/cert'
+ custom_ca_certs = '/custom/ca_certs'
custom_settings = Settingslogic.new({
'enabled' => true,
'key_file' => custom_key,
- 'cert_file' => custom_cert
+ 'cert_file' => custom_cert,
+ 'ca_certs_file' => custom_ca_certs
})
parsed_settings = described_class.parse(custom_settings)
@@ -52,6 +58,7 @@ describe SmimeSignatureSettings do
expect(parsed_settings['enabled']).to be(true)
expect(parsed_settings['key_file']).to eq(custom_key)
expect(parsed_settings['cert_file']).to eq(custom_cert)
+ expect(parsed_settings['ca_certs_file']).to eq(custom_ca_certs)
end
end
end
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index 8e48ecddd0f..817223bd91a 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -49,11 +49,12 @@ describe Admin::IntegrationsController do
end
context 'invalid params' do
- let(:url) { 'https://jira.localhost' }
+ let(:url) { 'invalid' }
- it 'updates the integration' do
- expect(response).to have_gitlab_http_status(:found)
- expect(integration.reload.url).to eq(url)
+ it 'does not update the integration' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ expect(integration.reload.url).not_to eq(url)
end
end
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 7582006df36..803fcf90135 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -72,6 +72,30 @@ describe Admin::RunnersController do
expect(response).to have_gitlab_http_status(:ok)
end
+
+ describe 'Cost factors values' do
+ context 'when it is Gitlab.com' do
+ before do
+ expect(Gitlab).to receive(:com?).at_least(:once) { true }
+ end
+
+ it 'renders cost factors fields' do
+ get :show, params: { id: runner.id }
+
+ expect(response.body).to match /Private projects Minutes cost factor/
+ expect(response.body).to match /Public projects Minutes cost factor/
+ end
+ end
+
+ context 'when it is not Gitlab.com' do
+ it 'does not show cost factor fields' do
+ get :show, params: { id: runner.id }
+
+ expect(response.body).not_to match /Private projects Minutes cost factor/
+ expect(response.body).not_to match /Public projects Minutes cost factor/
+ end
+ end
+ end
end
describe '#update' do
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index a4ce510b413..387fc0407b6 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -340,6 +340,17 @@ describe Admin::UsersController do
end
end
+ describe "DELETE #remove_email" do
+ it 'deletes the email' do
+ email = create(:email, user: user)
+
+ delete :remove_email, params: { id: user.username, email_id: email.id }
+
+ expect(user.reload.emails).not_to include(email)
+ expect(flash[:notice]).to eq('Successfully removed email.')
+ end
+ end
+
describe "POST impersonate" do
context "when the user is blocked" do
before do
diff --git a/spec/controllers/dashboard/projects_controller_spec.rb b/spec/controllers/dashboard/projects_controller_spec.rb
index a13b56deb23..eeac696c3f2 100644
--- a/spec/controllers/dashboard/projects_controller_spec.rb
+++ b/spec/controllers/dashboard/projects_controller_spec.rb
@@ -86,11 +86,58 @@ describe Dashboard::ProjectsController do
end
describe 'GET /starred.json' do
+ subject { get :starred, format: :json }
+
+ let(:projects) { create_list(:project, 2, creator: user) }
+
before do
- get :starred, format: :json
+ allow(Kaminari.config).to receive(:default_per_page).and_return(1)
+
+ projects.each do |project|
+ project.add_developer(user)
+ create(:users_star_project, project_id: project.id, user_id: user.id)
+ end
end
- it { is_expected.to respond_with(:success) }
+ it 'returns success' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'paginates the records' do
+ subject
+
+ expect(assigns(:projects).count).to eq(1)
+ end
+ end
+ end
+
+ context 'atom requests' do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe '#index' do
+ context 'project pagination' do
+ let(:projects) { create_list(:project, 2, creator: user) }
+
+ before do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(1)
+
+ projects.each do |project|
+ project.add_developer(user)
+ end
+ end
+
+ it 'does not paginate projects, even if normally restricted by pagination' do
+ get :index, format: :atom
+
+ expect(assigns(:events).count).to eq(2)
+ end
+ end
end
end
end
diff --git a/spec/controllers/groups/settings/ci_cd_controller_spec.rb b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
index b2ae16e0ee6..45d62a7e6cf 100644
--- a/spec/controllers/groups/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
@@ -216,88 +216,4 @@ describe Groups::Settings::CiCdController do
end
end
end
-
- describe 'POST create_deploy_token' do
- context 'when ajax_new_deploy_token feature flag is disabled for the project' do
- before do
- stub_feature_flags(ajax_new_deploy_token: { enabled: false, thing: group })
- entity.add_owner(user)
- end
-
- it_behaves_like 'a created deploy token' do
- let(:entity) { group }
- let(:create_entity_params) { { group_id: group } }
- let(:deploy_token_type) { DeployToken.deploy_token_types[:group_type] }
- end
- end
-
- context 'when ajax_new_deploy_token feature flag is enabled for the project' do
- let(:good_deploy_token_params) do
- {
- name: 'name',
- expires_at: 1.day.from_now.to_s,
- username: 'deployer',
- read_repository: '1',
- deploy_token_type: DeployToken.deploy_token_types[:group_type]
- }
- end
- let(:request_params) do
- {
- group_id: group.to_param,
- deploy_token: deploy_token_params
- }
- end
-
- before do
- group.add_owner(user)
- end
-
- subject { post :create_deploy_token, params: request_params, format: :json }
-
- context('a good request') do
- let(:deploy_token_params) { good_deploy_token_params }
- let(:expected_response) do
- {
- 'id' => be_a(Integer),
- 'name' => deploy_token_params[:name],
- 'username' => deploy_token_params[:username],
- 'expires_at' => Time.parse(deploy_token_params[:expires_at]),
- 'token' => be_a(String),
- 'scopes' => deploy_token_params.inject([]) do |scopes, kv|
- key, value = kv
- key.to_s.start_with?('read_') && !value.to_i.zero? ? scopes << key.to_s : scopes
- end
- }
- end
-
- it 'creates the deploy token' do
- subject
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response).to match_response_schema('public_api/v4/deploy_token')
- expect(json_response).to match(expected_response)
- end
- end
-
- context('a bad request') do
- let(:deploy_token_params) { good_deploy_token_params.except(:read_repository) }
- let(:expected_response) { { 'message' => "Scopes can't be blank" } }
-
- it 'does not create the deploy token' do
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to match(expected_response)
- end
- end
-
- context('an invalid request') do
- let(:deploy_token_params) { good_deploy_token_params.except(:name) }
-
- it 'raises a validation error' do
- expect { subject }.to raise_error(ActiveRecord::StatementInvalid)
- end
- end
- end
- end
end
diff --git a/spec/controllers/groups/settings/integrations_controller_spec.rb b/spec/controllers/groups/settings/integrations_controller_spec.rb
index 6df1ad8a383..76cd74de183 100644
--- a/spec/controllers/groups/settings/integrations_controller_spec.rb
+++ b/spec/controllers/groups/settings/integrations_controller_spec.rb
@@ -100,11 +100,12 @@ describe Groups::Settings::IntegrationsController do
end
context 'invalid params' do
- let(:url) { 'https://jira.localhost' }
+ let(:url) { 'invalid' }
it 'does not update the integration' do
- expect(response).to have_gitlab_http_status(:found)
- expect(integration.reload.url).to eq(url)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ expect(integration.reload.url).not_to eq(url)
end
end
end
diff --git a/spec/controllers/groups/settings/repository_controller_spec.rb b/spec/controllers/groups/settings/repository_controller_spec.rb
new file mode 100644
index 00000000000..20070fb17a0
--- /dev/null
+++ b/spec/controllers/groups/settings/repository_controller_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Groups::Settings::RepositoryController do
+ include ExternalAuthorizationServiceHelpers
+
+ let(:group) { create(:group) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'POST create_deploy_token' do
+ context 'when ajax_new_deploy_token feature flag is disabled for the project' do
+ before do
+ stub_feature_flags(ajax_new_deploy_token: { enabled: false, thing: group })
+ entity.add_owner(user)
+ end
+
+ it_behaves_like 'a created deploy token' do
+ let(:entity) { group }
+ let(:create_entity_params) { { group_id: group } }
+ let(:deploy_token_type) { DeployToken.deploy_token_types[:group_type] }
+ end
+ end
+
+ context 'when ajax_new_deploy_token feature flag is enabled for the project' do
+ let(:good_deploy_token_params) do
+ {
+ name: 'name',
+ expires_at: 1.day.from_now.to_s,
+ username: 'deployer',
+ read_repository: '1',
+ deploy_token_type: DeployToken.deploy_token_types[:group_type]
+ }
+ end
+ let(:request_params) do
+ {
+ group_id: group.to_param,
+ deploy_token: deploy_token_params
+ }
+ end
+
+ before do
+ group.add_owner(user)
+ end
+
+ subject { post :create_deploy_token, params: request_params, format: :json }
+
+ context('a good request') do
+ let(:deploy_token_params) { good_deploy_token_params }
+ let(:expected_response) do
+ {
+ 'id' => be_a(Integer),
+ 'name' => deploy_token_params[:name],
+ 'username' => deploy_token_params[:username],
+ 'expires_at' => Time.parse(deploy_token_params[:expires_at]),
+ 'token' => be_a(String),
+ 'scopes' => deploy_token_params.inject([]) do |scopes, kv|
+ key, value = kv
+ key.to_s.start_with?('read_') && !value.to_i.zero? ? scopes << key.to_s : scopes
+ end
+ }
+ end
+
+ it 'creates the deploy token' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/deploy_token')
+ expect(json_response).to match(expected_response)
+ end
+ end
+
+ context('a bad request') do
+ let(:deploy_token_params) { good_deploy_token_params.except(:read_repository) }
+ let(:expected_response) { { 'message' => "Scopes can't be blank" } }
+
+ it 'does not create the deploy token' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to match(expected_response)
+ end
+ end
+
+ context('an invalid request') do
+ let(:deploy_token_params) { good_deploy_token_params.except(:name) }
+
+ it 'raises a validation error' do
+ expect { subject }.to raise_error(ActiveRecord::StatementInvalid)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/import/jira_controller_spec.rb b/spec/controllers/projects/import/jira_controller_spec.rb
index 4629aab65dd..c3bf0c09fba 100644
--- a/spec/controllers/projects/import/jira_controller_spec.rb
+++ b/spec/controllers/projects/import/jira_controller_spec.rb
@@ -63,7 +63,7 @@ describe Projects::Import::JiraController do
stub_feature_flags(jira_issue_import_vue: false)
end
- context 'when jira service is enabled for the project' do
+ context 'when Jira service is enabled for the project' do
let_it_be(:jira_service) { create(:jira_service, project: project) }
context 'when user is developer' do
@@ -79,7 +79,7 @@ describe Projects::Import::JiraController do
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
end
- it 'does not query jira service' do
+ it 'does not query Jira service' do
expect(project).not_to receive(:jira_service)
end
@@ -118,7 +118,7 @@ describe Projects::Import::JiraController do
end
end
- context 'when running jira import first time' do
+ context 'when running Jira import first time' do
context 'get show' do
before do
allow(JIRA::Resource::Project).to receive(:all).and_return(jira_projects)
@@ -147,12 +147,12 @@ describe Projects::Import::JiraController do
end
context 'post import' do
- context 'when jira project key is empty' do
+ context 'when Jira project key is empty' do
it 'redirects back to show with an error' do
post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: '' }
expect(response).to redirect_to(project_import_jira_path(project))
- expect(flash[:alert]).to eq('No jira project key has been provided.')
+ expect(flash[:alert]).to eq('No Jira project key has been provided.')
end
end
@@ -197,7 +197,7 @@ describe Projects::Import::JiraController do
end
end
- context 'when jira import ran before' do
+ context 'when Jira import ran before' do
let_it_be(:jira_import_state) { create(:jira_import_state, :finished, project: project, jira_project_key: jira_project_key) }
context 'get show' do
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 9526e14a748..123e022beca 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe Projects::IssuesController do
include ProjectForksHelper
+ include_context 'includes Spam constants'
let(:project) { create(:project) }
let(:user) { create(:user) }
@@ -419,11 +420,11 @@ describe Projects::IssuesController do
expect(issue.reload.title).to eq('New title')
end
- context 'when Akismet is enabled and the issue is identified as spam' do
+ context 'when the SpamVerdictService disallows' do
before do
stub_application_setting(recaptcha_enabled: true)
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(REQUIRE_RECAPTCHA)
end
end
@@ -716,16 +717,16 @@ describe Projects::IssuesController do
end
end
- context 'Akismet is enabled' do
+ context 'Recaptcha is enabled' do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
stub_application_setting(recaptcha_enabled: true)
end
- context 'when an issue is not identified as spam' do
+ context 'when SpamVerdictService allows the issue' do
before do
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: false)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(ALLOW)
end
end
@@ -735,10 +736,10 @@ describe Projects::IssuesController do
end
context 'when an issue is identified as spam' do
- context 'when captcha is not verified' do
+ context 'when recaptcha is not verified' do
before do
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(REQUIRE_RECAPTCHA)
end
end
@@ -796,7 +797,7 @@ describe Projects::IssuesController do
end
end
- context 'when captcha is verified' do
+ context 'when recaptcha is verified' do
let(:spammy_title) { 'Whatever' }
let!(:spam_logs) { create_list(:spam_log, 2, user: user, title: spammy_title) }
@@ -967,17 +968,17 @@ describe Projects::IssuesController do
end
end
- context 'Akismet is enabled' do
+ context 'Recaptcha is enabled' do
before do
stub_application_setting(recaptcha_enabled: true)
end
- context 'when an issue is not identified as spam' do
+ context 'when SpamVerdictService allows the issue' do
before do
stub_feature_flags(allow_possible_spam: false)
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: false)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(ALLOW)
end
end
@@ -986,18 +987,18 @@ describe Projects::IssuesController do
end
end
- context 'when an issue is identified as spam' do
+ context 'when SpamVerdictService requires recaptcha' do
context 'when captcha is not verified' do
- def post_spam_issue
- post_new_issue(title: 'Spam Title', description: 'Spam lives here')
- end
-
before do
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(REQUIRE_RECAPTCHA)
end
end
+ def post_spam_issue
+ post_new_issue(title: 'Spam Title', description: 'Spam lives here')
+ end
+
context 'when allow_possible_spam feature flag is false' do
before do
stub_feature_flags(allow_possible_spam: false)
@@ -1039,11 +1040,12 @@ describe Projects::IssuesController do
end
end
- context 'when captcha is verified' do
+ context 'when Recaptcha is verified' do
let!(:spam_logs) { create_list(:spam_log, 2, user: user, title: 'Title') }
+ let!(:last_spam_log) { spam_logs.last }
def post_verified_issue
- post_new_issue({}, { spam_log_id: spam_logs.last.id, recaptcha_verification: true } )
+ post_new_issue({}, { spam_log_id: last_spam_log.id, recaptcha_verification: true } )
end
before do
@@ -1055,14 +1057,14 @@ describe Projects::IssuesController do
end
it 'marks spam log as recaptcha_verified' do
- expect { post_verified_issue }.to change { SpamLog.last.recaptcha_verified }.from(false).to(true)
+ expect { post_verified_issue }.to change { last_spam_log.reload.recaptcha_verified }.from(false).to(true)
end
it 'does not mark spam log as recaptcha_verified when it does not belong to current_user' do
spam_log = create(:spam_log)
expect { post_new_issue({}, { spam_log_id: spam_log.id, recaptcha_verification: true } ) }
- .not_to change { SpamLog.last.recaptcha_verified }
+ .not_to change { last_spam_log.recaptcha_verified }
end
end
end
@@ -1427,6 +1429,45 @@ describe Projects::IssuesController do
end
end
+ describe 'POST export_csv' do
+ let(:viewer) { user }
+ let(:issue) { create(:issue, project: project) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ def request_csv
+ post :export_csv, params: { namespace_id: project.namespace.to_param, project_id: project.to_param }
+ end
+
+ context 'when logged in' do
+ before do
+ sign_in(viewer)
+ end
+
+ it 'allows CSV export' do
+ expect(ExportCsvWorker).to receive(:perform_async).with(viewer.id, project.id, anything)
+
+ request_csv
+
+ expect(response).to redirect_to(project_issues_path(project))
+ expect(response.flash[:notice]).to match(/\AYour CSV export has started/i)
+ end
+ end
+
+ context 'when not logged in' do
+ let(:project) { create(:project_empty_repo, :public) }
+
+ it 'redirects to the sign in page' do
+ request_csv
+
+ expect(ExportCsvWorker).not_to receive(:perform_async)
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+
describe 'GET #discussions' do
let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 5104c83283d..cf3c4977911 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -935,34 +935,6 @@ describe Projects::MergeRequestsController do
}])
end
end
-
- context 'when feature flag :ci_expose_arbitrary_artifacts_in_mr is disabled' do
- let(:job_options) do
- {
- artifacts: {
- paths: ['ci_artifacts.txt'],
- expose_as: 'Exposed artifact'
- }
- }
- end
- let(:report) { double }
-
- before do
- stub_feature_flags(ci_expose_arbitrary_artifacts_in_mr: false)
- end
-
- it 'does not send polling interval' do
- expect(Gitlab::PollingInterval).not_to receive(:set_header)
-
- subject
- end
-
- it 'returns 204 HTTP status' do
- subject
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
end
context 'when pipeline does not have jobs with exposed artifacts' do
@@ -1114,6 +1086,150 @@ describe Projects::MergeRequestsController do
end
end
+ describe 'GET terraform_reports' do
+ let(:merge_request) do
+ create(:merge_request,
+ :with_merge_request_pipeline,
+ target_project: project,
+ source_project: project)
+ end
+
+ let(:pipeline) do
+ create(:ci_pipeline,
+ :success,
+ :with_terraform_reports,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+
+ before do
+ allow_any_instance_of(MergeRequest)
+ .to receive(:find_terraform_reports)
+ .and_return(report)
+
+ allow_any_instance_of(MergeRequest)
+ .to receive(:actual_head_pipeline)
+ .and_return(pipeline)
+ end
+
+ subject do
+ get :terraform_reports, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: merge_request.iid
+ },
+ format: :json
+ end
+
+ describe 'permissions on a public project with private CI/CD' do
+ let(:project) { create :project, :repository, :public, :builds_private }
+ let(:report) { { status: :parsed, data: [] } }
+
+ context 'while signed out' do
+ before do
+ sign_out(user)
+ end
+
+ it 'responds with a 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_blank
+ end
+ end
+
+ context 'while signed in as an unrelated user' do
+ before do
+ sign_in(create(:user))
+ end
+
+ it 'responds with a 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.body).to be_blank
+ end
+ end
+ end
+
+ context 'when pipeline has jobs with terraform reports' do
+ before do
+ allow_next_instance_of(MergeRequest) do |merge_request|
+ allow(merge_request).to receive(:has_terraform_reports?).and_return(true)
+ end
+ end
+
+ context 'when processing terraform reports is in progress' do
+ let(:report) { { status: :parsing } }
+
+ it 'sends polling interval' do
+ expect(Gitlab::PollingInterval).to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns 204 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'when processing terraform reports is completed' do
+ let(:report) { { status: :parsed, data: pipeline.terraform_reports.plans } }
+
+ it 'returns terraform reports' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to match(
+ a_hash_including(
+ 'tfplan.json' => hash_including(
+ 'create' => 0,
+ 'delete' => 0,
+ 'update' => 1
+ )
+ )
+ )
+ end
+ end
+
+ context 'when user created corrupted terraform reports' do
+ let(:report) { { status: :error, status_reason: 'Failed to parse terraform reports' } }
+
+ it 'does not send polling interval' do
+ expect(Gitlab::PollingInterval).not_to receive(:set_header)
+
+ subject
+ end
+
+ it 'returns 400 HTTP status' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({ 'status_reason' => 'Failed to parse terraform reports' })
+ end
+ end
+ end
+
+ context 'when pipeline does not have jobs with terraform reports' do
+ before do
+ allow_next_instance_of(MergeRequest) do |merge_request|
+ allow(merge_request).to receive(:has_terraform_reports?).and_return(false)
+ end
+ end
+
+ let(:report) { { status: :error } }
+
+ it 'returns error' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+
describe 'GET test_reports' do
let(:merge_request) do
create(:merge_request,
@@ -1245,7 +1361,7 @@ describe Projects::MergeRequestsController do
end
it 'renders MergeRequest as JSON' do
- expect(json_response.keys).to include('id', 'iid')
+ expect(json_response.keys).to include('id', 'iid', 'title', 'has_ci', 'merge_status', 'can_be_merged', 'current_user')
end
end
@@ -1279,7 +1395,7 @@ describe Projects::MergeRequestsController do
it 'renders MergeRequest as JSON' do
subject
- expect(json_response.keys).to include('id', 'iid')
+ expect(json_response.keys).to include('id', 'iid', 'title', 'has_ci', 'merge_status', 'can_be_merged', 'current_user')
end
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 9d243bf5a7f..8dc337e3a3d 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -752,8 +752,6 @@ describe Projects::PipelinesController do
end
context 'when pipeline does not have a test report' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
it 'renders an empty test report' do
get_test_report_json
@@ -763,7 +761,11 @@ describe Projects::PipelinesController do
end
context 'when pipeline has a test report' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+ before do
+ create(:ci_build, name: 'rspec', pipeline: pipeline).tap do |build|
+ create(:ci_job_artifact, :junit, job: build)
+ end
+ end
it 'renders the test report' do
get_test_report_json
@@ -773,19 +775,22 @@ describe Projects::PipelinesController do
end
end
- context 'when pipeline has corrupt test reports' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
+ context 'when pipeline has a corrupt test report artifact' do
before do
- job = create(:ci_build, pipeline: pipeline)
- create(:ci_job_artifact, :junit_with_corrupted_data, job: job, project: project)
- end
+ create(:ci_build, name: 'rspec', pipeline: pipeline).tap do |build|
+ create(:ci_job_artifact, :junit_with_corrupted_data, job: build)
+ end
- it 'renders the test reports' do
get_test_report_json
+ end
+ it 'renders the test reports' do
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['status']).to eq('error_parsing_report')
+ expect(json_response['test_suites'].count).to eq(1)
+ end
+
+ it 'returns a suite_error on the suite with corrupted XML' do
+ expect(json_response['test_suites'].first['suite_error']).to eq('JUnit XML parsing failed: 1:1: FATAL: Document is empty')
end
end
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index e565c757f95..59455d90c25 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -95,7 +95,7 @@ describe Repositories::GitHttpController do
allow(controller).to receive(:access_check).and_return(nil)
end
- after do
+ def send_request
post :git_upload_pack, params: params
end
@@ -106,16 +106,46 @@ describe Repositories::GitHttpController do
it 'does not update project statistics' do
expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
+
+ send_request
end
end
if expected
- it 'updates project statistics' do
- expect(ProjectDailyStatisticsWorker).to receive(:perform_async)
+ context 'when project_statistics_sync feature flag is disabled' do
+ before do
+ stub_feature_flags(project_statistics_sync: false)
+ end
+
+ it 'updates project statistics async' do
+ expect(ProjectDailyStatisticsWorker).to receive(:perform_async)
+
+ send_request
+ end
+ end
+
+ it 'updates project statistics sync' do
+ expect { send_request }.to change {
+ Projects::DailyStatisticsFinder.new(project).total_fetch_count
+ }.from(0).to(1)
end
else
+ context 'when project_statistics_sync feature flag is disabled' do
+ before do
+ stub_feature_flags(project_statistics_sync: false)
+ end
+
+ it 'does not update project statistics' do
+ expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
+
+ send_request
+ end
+ end
+
it 'does not update project statistics' do
- expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
+ expect { send_request }.not_to change {
+ Projects::DailyStatisticsFinder.new(project).total_fetch_count
+ }.from(0)
end
end
end
diff --git a/spec/factories/ci/bridge.rb b/spec/factories/ci/bridge.rb
index bacf163896c..4c1d5f07a42 100644
--- a/spec/factories/ci/bridge.rb
+++ b/spec/factories/ci/bridge.rb
@@ -7,7 +7,7 @@ FactoryBot.define do
stage_idx { 0 }
ref { 'master' }
tag { false }
- created_at { 'Di 29. Okt 09:50:00 CET 2013' }
+ created_at { '2013-10-29 09:50:00 CET' }
status { :created }
scheduling_type { 'stage' }
@@ -39,5 +39,19 @@ FactoryBot.define do
)
end
end
+
+ trait :started do
+ started_at { '2013-10-29 09:51:28 CET' }
+ end
+
+ trait :finished do
+ started
+ finished_at { '2013-10-29 09:53:28 CET' }
+ end
+
+ trait :failed do
+ finished
+ status { 'failed' }
+ end
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index fb3c163dff1..875371d26c9 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -314,12 +314,24 @@ FactoryBot.define do
end
end
+ trait :broken_test_reports do
+ after(:build) do |build|
+ build.job_artifacts << create(:ci_job_artifact, :junit_with_corrupted_data, job: build)
+ end
+ end
+
trait :coverage_reports do
after(:build) do |build|
build.job_artifacts << create(:ci_job_artifact, :cobertura, job: build)
end
end
+ trait :terraform_reports do
+ after(:build) do |build|
+ build.job_artifacts << create(:ci_job_artifact, :terraform, job: build)
+ end
+ end
+
trait :expired do
artifacts_expire_at { 1.minute.ago }
end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 82383cfa2b0..d3f6ef37bf9 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -149,6 +149,26 @@ FactoryBot.define do
end
end
+ trait :terraform do
+ file_type { :terraform }
+ file_format { :raw }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/terraform/tfplan.json'), 'application/json')
+ end
+ end
+
+ trait :terraform_with_corrupted_data do
+ file_type { :terraform }
+ file_format { :raw }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/terraform/tfplan_with_corrupted_data.json'), 'application/json')
+ end
+ end
+
trait :coverage_gocov_xml do
file_type { :cobertura }
file_format { :gzip }
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index 257dd3337ba..f71225eac22 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -75,6 +75,14 @@ FactoryBot.define do
end
end
+ trait :with_broken_test_reports do
+ status { :success }
+
+ after(:build) do |pipeline, _evaluator|
+ pipeline.builds << build(:ci_build, :broken_test_reports, pipeline: pipeline, project: pipeline.project)
+ end
+ end
+
trait :with_coverage_reports do
status { :success }
@@ -83,6 +91,14 @@ FactoryBot.define do
end
end
+ trait :with_terraform_reports do
+ status { :success }
+
+ after(:build) do |pipeline, evaluator|
+ pipeline.builds << build(:ci_build, :terraform_reports, pipeline: pipeline, project: pipeline.project)
+ end
+ end
+
trait :with_exposed_artifacts do
status { :success }
diff --git a/spec/factories/import_failures.rb b/spec/factories/import_failures.rb
new file mode 100644
index 00000000000..376b2ff39e2
--- /dev/null
+++ b/spec/factories/import_failures.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'securerandom'
+
+FactoryBot.define do
+ factory :import_failure do
+ association :project, factory: :project
+
+ created_at { Time.parse('2020-01-01T00:00:00Z') }
+ exception_class { 'RuntimeError' }
+ exception_message { 'Something went wrong' }
+ source { 'method_call' }
+ correlation_id_value { SecureRandom.uuid }
+
+ trait :hard_failure do
+ retry_count { 0 }
+ end
+
+ trait :soft_failure do
+ retry_count { 1 }
+ end
+ end
+end
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index abccd775c8a..5916e76dce1 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -133,6 +133,18 @@ FactoryBot.define do
end
end
+ trait :with_terraform_reports do
+ after(:build) do |merge_request|
+ merge_request.head_pipeline = build(
+ :ci_pipeline,
+ :success,
+ :with_terraform_reports,
+ project: merge_request.source_project,
+ ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha)
+ end
+ end
+
trait :with_exposed_artifacts do
after(:build) do |merge_request|
merge_request.head_pipeline = build(
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 2b468ef92e1..64321c9f319 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -37,6 +37,8 @@ FactoryBot.define do
group_runners_enabled { nil }
import_status { nil }
import_jid { nil }
+ import_correlation_id { nil }
+ import_last_error { nil }
forward_deployment_enabled { nil }
end
@@ -78,6 +80,8 @@ FactoryBot.define do
import_state = project.import_state || project.build_import_state
import_state.status = evaluator.import_status
import_state.jid = evaluator.import_jid
+ import_state.correlation_id_value = evaluator.import_correlation_id
+ import_state.last_error = evaluator.import_last_error
import_state.save
end
end
diff --git a/spec/factories/terraform/state.rb b/spec/factories/terraform/state.rb
index 4b83128ff6e..74950ccf93e 100644
--- a/spec/factories/terraform/state.rb
+++ b/spec/factories/terraform/state.rb
@@ -4,8 +4,10 @@ FactoryBot.define do
factory :terraform_state, class: 'Terraform::State' do
project { create(:project) }
+ sequence(:name) { |n| "state-#{n}" }
+
trait :with_file do
- file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate') }
+ file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate', 'application/json') }
end
end
end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 0ce567e11fe..f274503f0e7 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -27,6 +27,10 @@ FactoryBot.define do
user_type { :alert_bot }
end
+ trait :project_bot do
+ user_type { :project_bot }
+ end
+
trait :external do
external { true }
end
@@ -83,12 +87,17 @@ FactoryBot.define do
transient do
developer_projects { [] }
+ maintainer_projects { [] }
end
after(:create) do |user, evaluator|
evaluator.developer_projects.each do |project|
project.add_developer(user)
end
+
+ evaluator.maintainer_projects.each do |project|
+ project.add_maintainer(user)
+ end
end
factory :omniauth_user do
diff --git a/spec/features/admin/admin_mode/workers_spec.rb b/spec/features/admin/admin_mode/workers_spec.rb
index e33c9d7e64c..0ca61e6c193 100644
--- a/spec/features/admin/admin_mode/workers_spec.rb
+++ b/spec/features/admin/admin_mode/workers_spec.rb
@@ -8,8 +8,6 @@ describe 'Admin mode for workers', :do_not_mock_admin_mode, :request_store, :cle
let(:user_to_delete) { create(:user) }
before do
- add_sidekiq_middleware
-
sign_in(user)
end
@@ -60,12 +58,6 @@ describe 'Admin mode for workers', :do_not_mock_admin_mode, :request_store, :cle
end
end
- def add_sidekiq_middleware
- Sidekiq::Testing.server_middleware do |chain|
- chain.add Gitlab::SidekiqMiddleware::AdminMode::Server
- end
- end
-
def execute_jobs_signed_out(user)
gitlab_sign_out
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 4a20d1b7d60..50d9cb1c833 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -30,6 +30,7 @@ describe 'Value Stream Analytics', :js do
expect(new_issues_counter).to have_content('-')
expect(commits_counter).to have_content('-')
expect(deploys_counter).to have_content('-')
+ expect(deployment_frequency_counter).to have_content('-')
end
it 'shows active stage with empty message' do
@@ -53,6 +54,7 @@ describe 'Value Stream Analytics', :js do
expect(new_issues_counter).to have_content('1')
expect(commits_counter).to have_content('2')
expect(deploys_counter).to have_content('1')
+ expect(deployment_frequency_counter).to have_content('0')
end
it 'shows data on each stage', :sidekiq_might_not_need_inline do
@@ -134,7 +136,15 @@ describe 'Value Stream Analytics', :js do
end
def deploys_counter
- find(:xpath, "//p[contains(text(),'Deploy')]/preceding-sibling::h3")
+ find(:xpath, "//p[contains(text(),'Deploy')]/preceding-sibling::h3", match: :first)
+ end
+
+ def deployment_frequency_counter_selector
+ "//p[contains(text(),'Deployment Frequency')]/preceding-sibling::h3"
+ end
+
+ def deployment_frequency_counter
+ find(:xpath, deployment_frequency_counter_selector)
end
def expect_issue_to_be_present
diff --git a/spec/features/dashboard/snippets_spec.rb b/spec/features/dashboard/snippets_spec.rb
index 287af7e7b11..b44deaa3174 100644
--- a/spec/features/dashboard/snippets_spec.rb
+++ b/spec/features/dashboard/snippets_spec.rb
@@ -49,47 +49,6 @@ describe 'Dashboard snippets' do
end
end
- context 'rendering file names' do
- let_it_be(:snippet) { create(:personal_snippet, :public, author: user, file_name: 'foo.txt') }
- let_it_be(:versioned_snippet) { create(:personal_snippet, :repository, :public, author: user, file_name: 'bar.txt') }
-
- before do
- sign_in(user)
- end
-
- context 'when feature flag :version_snippets is disabled' do
- before do
- stub_feature_flags(version_snippets: false)
-
- visit dashboard_snippets_path
- end
-
- it 'contains the snippet file names from the DB' do
- aggregate_failures do
- expect(page).to have_content 'foo.txt'
- expect(page).to have_content('bar.txt')
- expect(page).not_to have_content('.gitattributes')
- end
- end
- end
-
- context 'when feature flag :version_snippets is enabled' do
- before do
- stub_feature_flags(version_snippets: true)
-
- visit dashboard_snippets_path
- end
-
- it 'contains both the versioned and non-versioned filenames' do
- aggregate_failures do
- expect(page).to have_content 'foo.txt'
- expect(page).to have_content('.gitattributes')
- expect(page).not_to have_content('bar.txt')
- end
- end
- end
- end
-
context 'filtering by visibility' do
let_it_be(:snippets) do
[
diff --git a/spec/features/groups/settings/ci_cd_spec.rb b/spec/features/groups/settings/ci_cd_spec.rb
index 3fbc7c7a695..5b1a9512c55 100644
--- a/spec/features/groups/settings/ci_cd_spec.rb
+++ b/spec/features/groups/settings/ci_cd_spec.rb
@@ -37,19 +37,6 @@ describe 'Group CI/CD settings' do
end
end
- context 'Deploy tokens' do
- let!(:deploy_token) { create(:deploy_token, :group, groups: [group]) }
-
- before do
- stub_container_registry_config(enabled: true)
- visit group_settings_ci_cd_path(group)
- end
-
- it_behaves_like 'a deploy token in ci/cd settings' do
- let(:entity_type) { 'group' }
- end
- end
-
describe 'Auto DevOps form' do
before do
stub_application_setting(auto_devops_enabled: true)
diff --git a/spec/features/groups/settings/repository_spec.rb b/spec/features/groups/settings/repository_spec.rb
new file mode 100644
index 00000000000..722fd98ce59
--- /dev/null
+++ b/spec/features/groups/settings/repository_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Group Repository settings' do
+ include WaitForRequests
+
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ end
+
+ context 'Deploy tokens' do
+ let!(:deploy_token) { create(:deploy_token, :group, groups: [group]) }
+
+ before do
+ stub_container_registry_config(enabled: true)
+ visit group_settings_repository_path(group)
+ end
+
+ it_behaves_like 'a deploy token in settings' do
+ let(:entity_type) { 'group' }
+ end
+ end
+end
diff --git a/spec/features/issues/csv_spec.rb b/spec/features/issues/csv_spec.rb
new file mode 100644
index 00000000000..193c83d2a40
--- /dev/null
+++ b/spec/features/issues/csv_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Issues csv' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public) }
+ let(:milestone) { create(:milestone, title: 'v1.0', project: project) }
+ let(:idea_label) { create(:label, project: project, title: 'Idea') }
+ let(:feature_label) { create(:label, project: project, title: 'Feature', priority: 10) }
+ let!(:issue) { create(:issue, project: project, author: user) }
+
+ before do
+ sign_in(user)
+ end
+
+ def request_csv(params = {})
+ visit project_issues_path(project, params)
+ page.within('.nav-controls') do
+ click_on 'Export as CSV'
+ end
+ click_on 'Export issues'
+ end
+
+ def attachment
+ ActionMailer::Base.deliveries.last.attachments.first
+ end
+
+ def csv
+ CSV.parse(attachment.decode_body, headers: true)
+ end
+
+ it 'triggers an email export' do
+ expect(ExportCsvWorker).to receive(:perform_async).with(user.id, project.id, hash_including("project_id" => project.id))
+
+ request_csv
+ end
+
+ it "doesn't send request params to ExportCsvWorker" do
+ expect(ExportCsvWorker).to receive(:perform_async).with(anything, anything, hash_excluding("controller" => anything, "action" => anything))
+
+ request_csv
+ end
+
+ it 'displays flash message' do
+ request_csv
+
+ expect(page).to have_content 'CSV export has started'
+ expect(page).to have_content "emailed to #{user.notification_email}"
+ end
+
+ it 'includes a csv attachment', :sidekiq_might_not_need_inline do
+ request_csv
+
+ expect(attachment.content_type).to include('text/csv')
+ end
+
+ it 'ignores pagination', :sidekiq_might_not_need_inline do
+ create_list(:issue, 30, project: project, author: user)
+
+ request_csv
+
+ expect(csv.count).to eq 31
+ end
+
+ it 'uses filters from issue index', :sidekiq_might_not_need_inline do
+ request_csv(state: :closed)
+
+ expect(csv.count).to eq 0
+ end
+
+ it 'ignores sorting from issue index', :sidekiq_might_not_need_inline do
+ issue2 = create(:labeled_issue, project: project, author: user, labels: [feature_label])
+
+ request_csv(sort: :label_priority)
+
+ expected = [issue.iid.to_s, issue2.iid.to_s]
+ expect(csv.map { |row| row['Issue ID'] }).to eq expected
+ end
+
+ it 'uses array filters, such as label_name', :sidekiq_might_not_need_inline do
+ issue.update!(labels: [idea_label])
+
+ request_csv("label_name[]" => 'Bug')
+
+ expect(csv.count).to eq 0
+ end
+
+ it 'avoids excessive database calls' do
+ control_count = ActiveRecord::QueryRecorder.new { request_csv }.count
+ create_list(:labeled_issue,
+ 10,
+ project: project,
+ assignees: [user],
+ author: user,
+ milestone: milestone,
+ labels: [feature_label, idea_label])
+ expect { request_csv }.not_to exceed_query_limit(control_count + 5)
+ end
+end
diff --git a/spec/features/issues/spam_issues_spec.rb b/spec/features/issues/spam_issues_spec.rb
index b59cd2d632a..b6c936a6767 100644
--- a/spec/features/issues/spam_issues_spec.rb
+++ b/spec/features/issues/spam_issues_spec.rb
@@ -23,9 +23,13 @@ describe 'New issue', :js do
sign_in(user)
end
- context 'when identified as spam' do
+ context 'when SpamVerdictService disallows' do
+ include_context 'includes Spam constants'
+
before do
- WebMock.stub_request(:any, /.*akismet.com.*/).to_return(body: "true", status: 200)
+ allow_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ allow(verdict_service).to receive(:execute).and_return(DISALLOW)
+ end
visit new_project_issue_path(project)
end
@@ -33,23 +37,22 @@ describe 'New issue', :js do
context 'when allow_possible_spam feature flag is false' do
before do
stub_feature_flags(allow_possible_spam: false)
- end
- it 'creates an issue after solving reCaptcha' do
fill_in 'issue_title', with: 'issue title'
fill_in 'issue_description', with: 'issue description'
+ end
+ it 'rejects issue creation' do
click_button 'Submit issue'
- # it is impossible to test recaptcha automatically and there is no possibility to fill in recaptcha
- # recaptcha verification is skipped in test environment and it always returns true
+ expect(page).to have_content('discarded')
+ expect(page).not_to have_content('potential spam')
expect(page).not_to have_content('issue title')
- expect(page).to have_css('.recaptcha')
-
- click_button 'Submit issue'
+ end
- expect(page.find('.issue-details h2.title')).to have_content('issue title')
- expect(page.find('.issue-details .description')).to have_content('issue description')
+ it 'creates a spam log record' do
+ expect { click_button 'Submit issue' }
+ .to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
end
end
@@ -59,10 +62,9 @@ describe 'New issue', :js do
fill_in 'issue_description', with: 'issue description'
end
- it 'creates an issue without a need to solve reCaptcha' do
+ it 'allows issue creation' do
click_button 'Submit issue'
- expect(page).not_to have_css('.recaptcha')
expect(page.find('.issue-details h2.title')).to have_content('issue title')
expect(page.find('.issue-details .description')).to have_content('issue description')
end
@@ -74,9 +76,98 @@ describe 'New issue', :js do
end
end
- context 'when not identified as spam' do
+ context 'when SpamVerdictService requires recaptcha' do
+ include_context 'includes Spam constants'
+
before do
- WebMock.stub_request(:any, /.*akismet.com.*/).to_return(body: 'false', status: 200)
+ allow_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ allow(verdict_service).to receive(:execute).and_return(REQUIRE_RECAPTCHA)
+ end
+
+ visit new_project_issue_path(project)
+ end
+
+ context 'when recaptcha is enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: true)
+ end
+
+ context 'when allow_possible_spam feature flag is false' do
+ before do
+ stub_feature_flags(allow_possible_spam: false)
+ end
+
+ it 'creates an issue after solving reCaptcha' do
+ fill_in 'issue_title', with: 'issue title'
+ fill_in 'issue_description', with: 'issue description'
+
+ click_button 'Submit issue'
+
+ # it is impossible to test recaptcha automatically and there is no possibility to fill in recaptcha
+ # recaptcha verification is skipped in test environment and it always returns true
+ expect(page).not_to have_content('issue title')
+ expect(page).to have_css('.recaptcha')
+
+ click_button 'Submit issue'
+
+ expect(page.find('.issue-details h2.title')).to have_content('issue title')
+ expect(page.find('.issue-details .description')).to have_content('issue description')
+ end
+ end
+
+ context 'when allow_possible_spam feature flag is true' do
+ before do
+ fill_in 'issue_title', with: 'issue title'
+ fill_in 'issue_description', with: 'issue description'
+ end
+
+ it 'creates an issue without a need to solve reCAPTCHA' do
+ click_button 'Submit issue'
+
+ expect(page).not_to have_css('.recaptcha')
+ expect(page.find('.issue-details h2.title')).to have_content('issue title')
+ expect(page.find('.issue-details .description')).to have_content('issue description')
+ end
+
+ it 'creates a spam log record' do
+ expect { click_button 'Submit issue' }
+ .to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
+ end
+ end
+ end
+
+ context 'when reCAPTCHA is not enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: false)
+ end
+
+ context 'when allow_possible_spam feature flag is true' do
+ before do
+ fill_in 'issue_title', with: 'issue title'
+ fill_in 'issue_description', with: 'issue description'
+ end
+
+ it 'creates an issue without a need to solve reCaptcha' do
+ click_button 'Submit issue'
+
+ expect(page).not_to have_css('.recaptcha')
+ expect(page.find('.issue-details h2.title')).to have_content('issue title')
+ expect(page.find('.issue-details .description')).to have_content('issue description')
+ end
+
+ it 'creates a spam log record' do
+ expect { click_button 'Submit issue' }
+ .to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
+ end
+ end
+ end
+ end
+
+ context 'when the SpamVerdictService allows' do
+ before do
+ allow_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ allow(verdict_service).to receive(:execute).and_return(ALLOW)
+ end
visit new_project_issue_path(project)
end
diff --git a/spec/features/merge_request/user_resolves_wip_mr_spec.rb b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
new file mode 100644
index 00000000000..93ef0801791
--- /dev/null
+++ b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Merge request > User resolves Work in Progress', :js do
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { project.creator }
+ let(:merge_request) do
+ create(:merge_request_with_diffs, source_project: project,
+ author: user,
+ title: 'WIP: Bug NS-04',
+ merge_params: { force_remove_source_branch: '1' })
+ end
+ let(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: merge_request.diff_head_sha,
+ ref: merge_request.source_branch,
+ head_pipeline_of: merge_request)
+ end
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'when there is active pipeline for merge request' do
+ before do
+ create(:ci_build, pipeline: pipeline)
+ sign_in(user)
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+ end
+
+ it 'retains merge request data after clicking Resolve WIP status' do
+ expect(page.find('.ci-widget-content')).to have_content("Pipeline ##{pipeline.id}")
+ expect(page).to have_content "This is a Work in Progress"
+
+ click_button('Resolve WIP status')
+
+ wait_for_requests
+
+ # If we don't disable the wait here, the test will wait until the
+ # merge request widget refreshes, which masks missing elements
+ # that should already be present.
+ expect(page.find('.ci-widget-content', wait: 0)).to have_content("Pipeline ##{pipeline.id}")
+ expect(page).not_to have_content('This is a Work in Progress')
+ end
+ end
+end
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index 5364bc10b2f..5c52abaeb62 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -180,6 +180,20 @@ describe "User browses files" do
expect(page).to have_content("VERSION")
.and have_content(".gitignore")
.and have_content("LICENSE")
+
+ click_link("files")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
+ click_link("html")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('html')
+ end
+
+ expect(page).to have_link('500.html')
end
end
@@ -193,6 +207,20 @@ describe "User browses files" do
expect(page).to have_content("VERSION")
.and have_content(".gitignore")
.and have_content("LICENSE")
+
+ click_link("files")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('files')
+ end
+
+ click_link("html")
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_link('html')
+ end
+
+ expect(page).to have_link('500.html')
end
end
diff --git a/spec/features/projects/settings/ci_cd_settings_spec.rb b/spec/features/projects/settings/ci_cd_settings_spec.rb
index d8208a93bb1..75c890ec2e2 100644
--- a/spec/features/projects/settings/ci_cd_settings_spec.rb
+++ b/spec/features/projects/settings/ci_cd_settings_spec.rb
@@ -18,7 +18,7 @@ describe 'Projects > Settings > CI / CD settings' do
visit project_settings_ci_cd_path(project)
end
- it_behaves_like 'a deploy token in ci/cd settings' do
+ it_behaves_like 'a deploy token in settings' do
let(:entity_type) { 'project' }
end
end
diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb
index b55a42e07a9..d883a1fc39c 100644
--- a/spec/features/projects/snippets/create_snippet_spec.rb
+++ b/spec/features/projects/snippets/create_snippet_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
shared_examples_for 'snippet editor' do
before do
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(monaco_snippets: flag)
end
diff --git a/spec/features/projects/snippets/user_updates_snippet_spec.rb b/spec/features/projects/snippets/user_updates_snippet_spec.rb
index bad3fde8a4a..743823a545a 100644
--- a/spec/features/projects/snippets/user_updates_snippet_spec.rb
+++ b/spec/features/projects/snippets/user_updates_snippet_spec.rb
@@ -11,6 +11,7 @@ describe 'Projects > Snippets > User updates a snippet', :js do
before do
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(version_snippets: version_snippet_enabled)
project.add_maintainer(user)
@@ -60,6 +61,7 @@ describe 'Projects > Snippets > User updates a snippet', :js do
end
fill_in('project_snippet_title', with: 'Snippet new title')
+ fill_in('project_snippet_file_name', with: 'new_file_name')
click_button('Save')
end
diff --git a/spec/features/snippets/spam_snippets_spec.rb b/spec/features/snippets/spam_snippets_spec.rb
index e9534dedcd3..8bba3e45824 100644
--- a/spec/features/snippets/spam_snippets_spec.rb
+++ b/spec/features/snippets/spam_snippets_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
shared_examples_for 'snippet editor' do
+ include_context 'includes Spam constants'
+
def description_field
find('.js-description-input').find('input,textarea')
end
@@ -10,6 +12,7 @@ shared_examples_for 'snippet editor' do
before do
stub_feature_flags(allow_possible_spam: false)
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(monaco_snippets: flag)
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
@@ -52,13 +55,30 @@ shared_examples_for 'snippet editor' do
end
end
- context 'when identified as spam' do
+ shared_examples 'does not allow creation' do
+ it 'rejects creation of the snippet' do
+ click_button('Create snippet')
+ wait_for_requests
+
+ expect(page).to have_content('discarded')
+ expect(page).not_to have_content('My Snippet Title')
+ expect(page).not_to have_css('.recaptcha')
+ end
+ end
+
+ context 'when SpamVerdictService requires recaptcha' do
before do
- WebMock.stub_request(:any, /.*akismet.com.*/).to_return(body: "true", status: 200)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(REQUIRE_RECAPTCHA)
+ end
end
context 'when allow_possible_spam feature flag is false' do
- it_behaves_like 'solve recaptcha'
+ before do
+ stub_application_setting(recaptcha_enabled: false)
+ end
+
+ it_behaves_like 'does not allow creation'
end
context 'when allow_possible_spam feature flag is true' do
@@ -66,9 +86,31 @@ shared_examples_for 'snippet editor' do
end
end
- context 'when not identified as spam' do
+ context 'when SpamVerdictService disallows' do
+ before do
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(DISALLOW)
+ end
+ end
+
+ context 'when allow_possible_spam feature flag is false' do
+ before do
+ stub_application_setting(recaptcha_enabled: false)
+ end
+
+ it_behaves_like 'does not allow creation'
+ end
+
+ context 'when allow_possible_spam feature flag is true' do
+ it_behaves_like 'does not allow creation'
+ end
+ end
+
+ context 'when SpamVerdictService allows' do
before do
- WebMock.stub_request(:any, /.*akismet.com.*/).to_return(body: "false", status: 200)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(ALLOW)
+ end
end
it 'creates a snippet' do
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index 93da976dee0..5d3a84dd7bc 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
shared_examples_for 'snippet editor' do
before do
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(monaco_snippets: flag)
sign_in(user)
visit new_snippet_path
diff --git a/spec/features/snippets/user_edits_snippet_spec.rb b/spec/features/snippets/user_edits_snippet_spec.rb
index 0bbb92b1f3f..76d658a21c6 100644
--- a/spec/features/snippets/user_edits_snippet_spec.rb
+++ b/spec/features/snippets/user_edits_snippet_spec.rb
@@ -14,6 +14,7 @@ describe 'User edits snippet', :js do
before do
stub_feature_flags(snippets_vue: false)
+ stub_feature_flags(snippets_edit_vue: false)
stub_feature_flags(version_snippets: version_snippet_enabled)
sign_in(user)
@@ -91,6 +92,7 @@ describe 'User edits snippet', :js do
end
fill_in 'personal_snippet_title', with: 'New Snippet Title'
+ fill_in 'personal_snippet_file_name', with: 'new_file_name'
click_button('Save changes')
end
diff --git a/spec/finders/autocomplete/move_to_project_finder_spec.rb b/spec/finders/autocomplete/move_to_project_finder_spec.rb
index 9129a3b65be..f2da82bb9be 100644
--- a/spec/finders/autocomplete/move_to_project_finder_spec.rb
+++ b/spec/finders/autocomplete/move_to_project_finder_spec.rb
@@ -62,19 +62,20 @@ describe Autocomplete::MoveToProjectFinder do
expect(finder.execute.to_a).to eq([other_reporter_project])
end
- it 'returns a page of projects ordered by name' do
+ it 'returns a page of projects ordered by star count' do
stub_const('Autocomplete::MoveToProjectFinder::LIMIT', 2)
- projects = create_list(:project, 3) do |project|
- project.add_developer(user)
- end
+ projects = [
+ create(:project, namespace: user.namespace, star_count: 1),
+ create(:project, namespace: user.namespace, star_count: 5),
+ create(:project, namespace: user.namespace)
+ ]
finder = described_class.new(user, project_id: project.id)
page = finder.execute.to_a
- expected_projects = projects.sort_by(&:name).first(2)
expect(page.length).to eq(2)
- expect(page).to eq(expected_projects)
+ expect(page).to eq([projects[1], projects[0]])
end
end
diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb
index f6df727f7db..d77548c6fd0 100644
--- a/spec/finders/members_finder_spec.rb
+++ b/spec/finders/members_finder_spec.rb
@@ -110,7 +110,7 @@ describe MembersFinder, '#execute' do
project.add_maintainer(user3)
member3 = project.add_maintainer(user4)
- result = described_class.new(project, user2).execute(params: { search: user4.name })
+ result = described_class.new(project, user2, params: { search: user4.name }).execute
expect(result).to contain_exactly(member3)
end
@@ -120,7 +120,7 @@ describe MembersFinder, '#execute' do
member2 = project.add_maintainer(user3)
member3 = project.add_maintainer(user4)
- result = described_class.new(project, user2).execute(params: { sort: 'id_desc' })
+ result = described_class.new(project, user2, params: { sort: 'id_desc' }).execute
expect(result).to eq([member3, member2, member1])
end
diff --git a/spec/fixtures/api/schemas/public_api/v4/snippets.json b/spec/fixtures/api/schemas/public_api/v4/snippets.json
index d13d703e063..ddddd46f5c4 100644
--- a/spec/fixtures/api/schemas/public_api/v4/snippets.json
+++ b/spec/fixtures/api/schemas/public_api/v4/snippets.json
@@ -10,6 +10,7 @@
"description": { "type": ["string", "null"] },
"visibility": { "type": "string" },
"web_url": { "type": "string" },
+ "raw_url": { "type": "string" },
"created_at": { "type": "date" },
"updated_at": { "type": "date" },
"author": {
@@ -27,7 +28,7 @@
},
"required": [
"id", "title", "file_name", "description", "web_url",
- "created_at", "updated_at", "author"
+ "created_at", "updated_at", "author", "raw_url"
],
"additionalProperties": false
}
diff --git a/spec/fixtures/lib/elasticsearch/query.json b/spec/fixtures/lib/elasticsearch/query.json
index 75164a7439f..86431bac572 100644
--- a/spec/fixtures/lib/elasticsearch/query.json
+++ b/spec/fixtures/lib/elasticsearch/query.json
@@ -26,7 +26,7 @@
}
},
{
- "offset": {
+ "log.offset": {
"order": "desc"
}
}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_container.json b/spec/fixtures/lib/elasticsearch/query_with_container.json
index 11bc653441c..3cbe2e814b1 100644
--- a/spec/fixtures/lib/elasticsearch/query_with_container.json
+++ b/spec/fixtures/lib/elasticsearch/query_with_container.json
@@ -33,7 +33,7 @@
}
},
{
- "offset": {
+ "log.offset": {
"order": "desc"
}
}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_cursor.json b/spec/fixtures/lib/elasticsearch/query_with_cursor.json
index c5b81e97d3c..da697b0b081 100644
--- a/spec/fixtures/lib/elasticsearch/query_with_cursor.json
+++ b/spec/fixtures/lib/elasticsearch/query_with_cursor.json
@@ -26,7 +26,7 @@
}
},
{
- "offset": {
+ "log.offset": {
"order": "desc"
}
}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_end_time.json b/spec/fixtures/lib/elasticsearch/query_with_end_time.json
index 226e0f115e7..dca08382cd8 100644
--- a/spec/fixtures/lib/elasticsearch/query_with_end_time.json
+++ b/spec/fixtures/lib/elasticsearch/query_with_end_time.json
@@ -35,7 +35,7 @@
}
},
{
- "offset": {
+ "log.offset": {
"order": "desc"
}
}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_filebeat_6.json b/spec/fixtures/lib/elasticsearch/query_with_filebeat_6.json
new file mode 100644
index 00000000000..75164a7439f
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_filebeat_6.json
@@ -0,0 +1,40 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message",
+ "kubernetes.pod.name"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_search.json b/spec/fixtures/lib/elasticsearch/query_with_search.json
index ca63c12f3b8..ab5c0ef13c2 100644
--- a/spec/fixtures/lib/elasticsearch/query_with_search.json
+++ b/spec/fixtures/lib/elasticsearch/query_with_search.json
@@ -35,7 +35,7 @@
}
},
{
- "offset": {
+ "log.offset": {
"order": "desc"
}
}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_start_time.json b/spec/fixtures/lib/elasticsearch/query_with_start_time.json
index cb3e37de8a7..479e4b74cdf 100644
--- a/spec/fixtures/lib/elasticsearch/query_with_start_time.json
+++ b/spec/fixtures/lib/elasticsearch/query_with_start_time.json
@@ -35,7 +35,7 @@
}
},
{
- "offset": {
+ "log.offset": {
"order": "desc"
}
}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_times.json b/spec/fixtures/lib/elasticsearch/query_with_times.json
index 91d28b28842..8bb0109a053 100644
--- a/spec/fixtures/lib/elasticsearch/query_with_times.json
+++ b/spec/fixtures/lib/elasticsearch/query_with_times.json
@@ -36,7 +36,7 @@
}
},
{
- "offset": {
+ "log.offset": {
"order": "desc"
}
}
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/development_metrics.yml b/spec/fixtures/lib/gitlab/metrics/dashboard/development_metrics.yml
new file mode 100644
index 00000000000..2a4de35c1ec
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/development_metrics.yml
@@ -0,0 +1,20 @@
+panel_groups:
+ - group: 'Usage Variation'
+ panels:
+ - type: anomaly-chart
+ title: "Memory Usage Rate Anomalies"
+ y_label: "Memory Usage Rate"
+ metrics:
+ - id: container_memory_usage_bytes
+ query_range: avg(sum(rate(container_memory_usage_bytes[15m]))) /1024
+ label: "Memory Usage Rate"
+ unit: "kB"
+ - id: container_memory_usage_bytes_upper
+ query_range: 80000
+ label: "Memory Usage Rate Lower Limit"
+ unit: "kB"
+ - id: container_memory_usage_bytes_lower
+ query_range: 50000
+ label: "Memory Usage Rate Upper Limit"
+ unit: "kB"
+
diff --git a/spec/fixtures/terraform/tfplan.json b/spec/fixtures/terraform/tfplan.json
new file mode 100644
index 00000000000..0ab4891e63a
--- /dev/null
+++ b/spec/fixtures/terraform/tfplan.json
@@ -0,0 +1 @@
+{"create": 0, "update": 1, "delete": 0}
diff --git a/spec/fixtures/terraform/tfplan_with_corrupted_data.json b/spec/fixtures/terraform/tfplan_with_corrupted_data.json
new file mode 100644
index 00000000000..b83f5e172bb
--- /dev/null
+++ b/spec/fixtures/terraform/tfplan_with_corrupted_data.json
@@ -0,0 +1 @@
+Exited code 1
diff --git a/spec/fixtures/x509/ZZZZZZA6.crl b/spec/fixtures/x509/ZZZZZZA6.crl
new file mode 100644
index 00000000000..eb6b9d5d71a
--- /dev/null
+++ b/spec/fixtures/x509/ZZZZZZA6.crl
Binary files differ
diff --git a/spec/frontend/alert_management/components/alert_management_list_spec.js b/spec/frontend/alert_management/components/alert_management_list_spec.js
new file mode 100644
index 00000000000..2bfbf37d7f0
--- /dev/null
+++ b/spec/frontend/alert_management/components/alert_management_list_spec.js
@@ -0,0 +1,38 @@
+import { mount } from '@vue/test-utils';
+import { GlEmptyState } from '@gitlab/ui';
+import stubChildren from 'helpers/stub_children';
+import AlertManagementList from '~/alert_management/components/alert_management_list.vue';
+
+describe('AlertManagementList', () => {
+ let wrapper;
+
+ function mountComponent({ stubs = {} } = {}) {
+ wrapper = mount(AlertManagementList, {
+ propsData: {
+ indexPath: '/path',
+ enableAlertManagementPath: '/link',
+ emptyAlertSvgPath: 'illustration/path',
+ },
+ stubs: {
+ ...stubChildren(AlertManagementList),
+ ...stubs,
+ },
+ });
+ }
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('alert management feature renders empty state', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('shows empty state', () => {
+ expect(wrapper.find(GlEmptyState).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/blob_edit_content_spec.js b/spec/frontend/blob/components/blob_edit_content_spec.js
index 189d2629efa..971ef72521d 100644
--- a/spec/frontend/blob/components/blob_edit_content_spec.js
+++ b/spec/frontend/blob/components/blob_edit_content_spec.js
@@ -80,7 +80,7 @@ describe('Blob Header Editing', () => {
getValue: jest.fn().mockReturnValue(value),
};
- editorEl.trigger('focusout');
+ editorEl.trigger('keyup');
return nextTick().then(() => {
expect(wrapper.emitted().input[0]).toEqual([value]);
diff --git a/spec/frontend/ci_variable_list/components/ci_key_field_spec.js b/spec/frontend/ci_variable_list/components/ci_key_field_spec.js
new file mode 100644
index 00000000000..bcc29f22dd1
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_key_field_spec.js
@@ -0,0 +1,244 @@
+import { mount } from '@vue/test-utils';
+import { GlButton, GlFormInput } from '@gitlab/ui';
+import { AWS_ACCESS_KEY_ID, AWS_DEFAULT_REGION } from '~/ci_variable_list/constants';
+import CiKeyField from '~/ci_variable_list/components/ci_key_field.vue';
+
+import {
+ awsTokens,
+ awsTokenList,
+} from '~/ci_variable_list/components/ci_variable_autocomplete_tokens';
+
+const doTimes = (num, fn) => {
+ for (let i = 0; i < num; i += 1) {
+ fn();
+ }
+};
+
+describe('Ci Key field', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = mount({
+ data() {
+ return {
+ inputVal: '',
+ tokens: awsTokenList,
+ };
+ },
+ components: { CiKeyField },
+ template: `
+ <div>
+ <ci-key-field
+ v-model="inputVal"
+ :token-list="tokens"
+ />
+ </div>
+ `,
+ });
+ };
+
+ const findDropdown = () => wrapper.find('#ci-variable-dropdown');
+ const findDropdownOptions = () => wrapper.findAll(GlButton).wrappers.map(item => item.text());
+ const findInput = () => wrapper.find(GlFormInput);
+ const findInputValue = () => findInput().element.value;
+ const setInput = val => findInput().setValue(val);
+ const clickDown = () => findInput().trigger('keydown.down');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('match and filter functionality', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('is closed when the input is empty', () => {
+ expect(findInput().isVisible()).toBe(true);
+ expect(findInputValue()).toBe('');
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+
+ it('is open when the input text matches a token', () => {
+ setInput('AWS');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDropdown().isVisible()).toBe(true);
+ });
+ });
+
+ it('shows partial matches at string start', () => {
+ setInput('AWS');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDropdown().isVisible()).toBe(true);
+ expect(findDropdownOptions()).toEqual(awsTokenList);
+ });
+ });
+
+ it('shows partial matches mid-string', () => {
+ setInput('D');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDropdown().isVisible()).toBe(true);
+ expect(findDropdownOptions()).toEqual([
+ awsTokens[AWS_ACCESS_KEY_ID].name,
+ awsTokens[AWS_DEFAULT_REGION].name,
+ ]);
+ });
+ });
+
+ it('is closed when the text does not match', () => {
+ setInput('elephant');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+ });
+ });
+
+ describe('keyboard navigation in dropdown', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('on down arrow + enter', () => {
+ it('selects the next item in the list and closes the dropdown', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.down');
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[0]);
+ });
+ });
+
+ it('loops to the top when it reaches the bottom', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ doTimes(findDropdownOptions().length + 1, clickDown);
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[0]);
+ });
+ });
+ });
+
+ describe('on up arrow + enter', () => {
+ it('selects the previous item in the list and closes the dropdown', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ doTimes(3, clickDown);
+ findInput().trigger('keydown.up');
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[1]);
+ });
+ });
+
+ it('loops to the bottom when it reaches the top', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.down');
+ findInput().trigger('keydown.up');
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[awsTokenList.length - 1]);
+ });
+ });
+ });
+
+ describe('on enter with no item highlighted', () => {
+ it('does not select any item and closes the dropdown', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.enter');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe('AWS');
+ });
+ });
+ });
+
+ describe('on click', () => {
+ it('selects the clicked item regardless of arrow highlight', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ wrapper.find(GlButton).trigger('click');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe(awsTokenList[0]);
+ });
+ });
+ });
+
+ describe('on tab', () => {
+ it('selects entered text, closes dropdown', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.tab');
+ doTimes(2, clickDown);
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe('AWS');
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+ });
+ });
+
+ describe('on esc', () => {
+ describe('when dropdown is open', () => {
+ it('closes dropdown and does not select anything', () => {
+ setInput('AWS');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findInput().trigger('keydown.esc');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe('AWS');
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+ });
+ });
+
+ describe('when dropdown is closed', () => {
+ it('clears the input field', () => {
+ setInput('elephant');
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(findDropdown().isVisible()).toBe(false);
+ findInput().trigger('keydown.esc');
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findInputValue()).toBe('');
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
index 70edd36669b..7b8d69df35e 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -1,7 +1,10 @@
import Vuex from 'vuex';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
import { GlDeprecatedButton } from '@gitlab/ui';
+import { AWS_ACCESS_KEY_ID } from '~/ci_variable_list/constants';
import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
+import CiKeyField from '~/ci_variable_list/components/ci_key_field.vue';
+import { awsTokens } from '~/ci_variable_list/components/ci_variable_autocomplete_tokens';
import createStore from '~/ci_variable_list/store';
import mockData from '../services/mock_data';
import ModalStub from '../stubs';
@@ -13,14 +16,17 @@ describe('Ci variable modal', () => {
let wrapper;
let store;
- const createComponent = () => {
+ const createComponent = (method, options = {}) => {
store = createStore();
- wrapper = shallowMount(CiVariableModal, {
+ wrapper = method(CiVariableModal, {
+ attachToDocument: true,
+ provide: { glFeatures: { ciKeyAutocomplete: true } },
stubs: {
GlModal: ModalStub,
},
localVue,
store,
+ ...options,
});
};
@@ -34,22 +40,46 @@ describe('Ci variable modal', () => {
.findAll(GlDeprecatedButton)
.at(1);
- beforeEach(() => {
- createComponent();
- jest.spyOn(store, 'dispatch').mockImplementation();
- });
-
afterEach(() => {
wrapper.destroy();
});
- it('button is disabled when no key/value pair are present', () => {
- expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ describe('Feature flag', () => {
+ describe('when off', () => {
+ beforeEach(() => {
+ createComponent(shallowMount, { provide: { glFeatures: { ciKeyAutocomplete: false } } });
+ });
+
+ it('does not render the autocomplete dropdown', () => {
+ expect(wrapper.contains(CiKeyField)).toBe(false);
+ });
+ });
+
+ describe('when on', () => {
+ beforeEach(() => {
+ createComponent(shallowMount);
+ });
+ it('renders the autocomplete dropdown', () => {
+ expect(wrapper.find(CiKeyField).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('Basic interactions', () => {
+ beforeEach(() => {
+ createComponent(shallowMount);
+ });
+
+ it('button is disabled when no key/value pair are present', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ });
});
describe('Adding a new variable', () => {
beforeEach(() => {
const [variable] = mockData.mockVariables;
+ createComponent(shallowMount);
+ jest.spyOn(store, 'dispatch').mockImplementation();
store.state.variable = variable;
});
@@ -71,6 +101,8 @@ describe('Ci variable modal', () => {
describe('Editing a variable', () => {
beforeEach(() => {
const [variable] = mockData.mockVariables;
+ createComponent(shallowMount);
+ jest.spyOn(store, 'dispatch').mockImplementation();
store.state.variableBeingEdited = variable;
});
@@ -96,4 +128,105 @@ describe('Ci variable modal', () => {
expect(store.dispatch).toHaveBeenCalledWith('deleteVariable', mockData.mockVariables[0]);
});
});
+
+ describe('Validations', () => {
+ const maskError = 'This variable can not be masked.';
+
+ describe('when the key state is invalid', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ const invalidKeyVariable = {
+ ...variable,
+ key: AWS_ACCESS_KEY_ID,
+ value: 'AKIAIOSFODNN7EXAMPLEjdhy',
+ secret_value: 'AKIAIOSFODNN7EXAMPLEjdhy',
+ };
+ createComponent(mount);
+ store.state.variable = invalidKeyVariable;
+ });
+
+ it('disables the submit button', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ });
+
+ it('shows the correct error text', () => {
+ const errorText = awsTokens[AWS_ACCESS_KEY_ID].invalidMessage;
+ expect(findModal().text()).toContain(errorText);
+ });
+ });
+
+ describe('when the mask state is invalid', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ const invalidMaskVariable = {
+ ...variable,
+ key: 'qs',
+ value: 'd:;',
+ secret_value: 'd:;',
+ masked: true,
+ };
+ createComponent(mount);
+ store.state.variable = invalidMaskVariable;
+ });
+
+ it('disables the submit button', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ });
+
+ it('shows the correct error text', () => {
+ expect(findModal().text()).toContain(maskError);
+ });
+ });
+
+ describe('when the mask and key states are invalid', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ const invalidMaskandKeyVariable = {
+ ...variable,
+ key: AWS_ACCESS_KEY_ID,
+ value: 'AKIAIOSFODNN7EXAMPLEjdhyd:;',
+ secret_value: 'AKIAIOSFODNN7EXAMPLEjdhyd:;',
+ masked: true,
+ };
+ createComponent(mount);
+ store.state.variable = invalidMaskandKeyVariable;
+ });
+
+ it('disables the submit button', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ });
+
+ it('shows the correct error text', () => {
+ const errorText = awsTokens[AWS_ACCESS_KEY_ID].invalidMessage;
+ expect(findModal().text()).toContain(maskError);
+ expect(findModal().text()).toContain(errorText);
+ });
+ });
+
+ describe('when both states are valid', () => {
+ beforeEach(() => {
+ const [variable] = mockData.mockVariables;
+ const validMaskandKeyVariable = {
+ ...variable,
+ key: AWS_ACCESS_KEY_ID,
+ value: 'AKIAIOSFODNN7EXAMPLE',
+ secret_value: 'AKIAIOSFODNN7EXAMPLE',
+ masked: true,
+ };
+ createComponent(mount);
+ store.state.variable = validMaskandKeyVariable;
+ store.state.maskableRegex = /^[a-zA-Z0-9_+=/@:-]{8,}$/;
+ });
+
+ it('does not disable the submit button', () => {
+ expect(addOrUpdateButton(1).attributes('disabled')).toBeFalsy();
+ });
+
+ it('shows no error text', () => {
+ const errorText = awsTokens[AWS_ACCESS_KEY_ID].invalidMessage;
+ expect(findModal().text()).not.toContain(maskError);
+ expect(findModal().text()).not.toContain(errorText);
+ });
+ });
+ });
});
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index 782e5215ad8..33b30891d5e 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -8,6 +8,7 @@ import eventHub from '~/clusters/event_hub';
import KnativeDomainEditor from '~/clusters/components/knative_domain_editor.vue';
import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
import IngressModsecuritySettings from '~/clusters/components/ingress_modsecurity_settings.vue';
+import FluentdOutputSettings from '~/clusters/components/fluentd_output_settings.vue';
describe('Applications', () => {
let vm;
@@ -67,6 +68,10 @@ describe('Applications', () => {
it('renders a row for Elastic Stack', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack')).not.toBeNull();
});
+
+ it('renders a row for Fluentd', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-fluentd')).not.toBeNull();
+ });
});
describe('Group cluster applications', () => {
@@ -112,6 +117,10 @@ describe('Applications', () => {
it('renders a row for Elastic Stack', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack')).not.toBeNull();
});
+
+ it('renders a row for Fluentd', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-fluentd')).not.toBeNull();
+ });
});
describe('Instance cluster applications', () => {
@@ -157,6 +166,10 @@ describe('Applications', () => {
it('renders a row for Elastic Stack', () => {
expect(vm.$el.querySelector('.js-cluster-application-row-elastic_stack')).not.toBeNull();
});
+
+ it('renders a row for Fluentd', () => {
+ expect(vm.$el.querySelector('.js-cluster-application-row-fluentd')).not.toBeNull();
+ });
});
describe('Helm application', () => {
@@ -240,6 +253,7 @@ describe('Applications', () => {
jupyter: { title: 'JupyterHub', hostname: '' },
knative: { title: 'Knative', hostname: '' },
elastic_stack: { title: 'Elastic Stack' },
+ fluentd: { title: 'Fluentd' },
},
});
@@ -539,4 +553,23 @@ describe('Applications', () => {
});
});
});
+
+ describe('Fluentd application', () => {
+ const propsData = {
+ applications: {
+ ...APPLICATIONS_MOCK_STATE,
+ },
+ };
+
+ let wrapper;
+ beforeEach(() => {
+ wrapper = shallowMount(Applications, { propsData });
+ });
+ afterEach(() => {
+ wrapper.destroy();
+ });
+ it('renders the correct Component', () => {
+ expect(wrapper.contains(FluentdOutputSettings)).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/clusters/components/fluentd_output_settings_spec.js b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
new file mode 100644
index 00000000000..360478b36f5
--- /dev/null
+++ b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
@@ -0,0 +1,158 @@
+import { shallowMount } from '@vue/test-utils';
+import FluentdOutputSettings from '~/clusters/components/fluentd_output_settings.vue';
+import { APPLICATION_STATUS, FLUENTD } from '~/clusters/constants';
+import { GlAlert, GlDropdown } from '@gitlab/ui';
+import eventHub from '~/clusters/event_hub';
+
+const { UPDATING } = APPLICATION_STATUS;
+
+describe('FluentdOutputSettings', () => {
+ let wrapper;
+
+ const defaultProps = {
+ status: 'installable',
+ installed: false,
+ updateAvailable: false,
+ protocol: 'tcp',
+ host: '127.0.0.1',
+ port: 514,
+ isEditingSettings: false,
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(FluentdOutputSettings, {
+ propsData: {
+ fluentd: {
+ ...defaultProps,
+ ...props,
+ },
+ },
+ });
+ };
+
+ const findSaveButton = () => wrapper.find({ ref: 'saveBtn' });
+ const findCancelButton = () => wrapper.find({ ref: 'cancelBtn' });
+ const findProtocolDropdown = () => wrapper.find(GlDropdown);
+
+ describe('when fluentd is installed', () => {
+ beforeEach(() => {
+ createComponent({ installed: true, status: 'installed' });
+ jest.spyOn(eventHub, '$emit');
+ });
+
+ it('does not render save and cancel buttons', () => {
+ expect(findSaveButton().exists()).toBe(false);
+ expect(findCancelButton().exists()).toBe(false);
+ });
+
+ describe('with protocol dropdown changed by the user', () => {
+ beforeEach(() => {
+ findProtocolDropdown().vm.$children[1].$emit('click');
+ wrapper.setProps({
+ fluentd: {
+ ...defaultProps,
+ installed: true,
+ status: 'installed',
+ protocol: 'udp',
+ isEditingSettings: true,
+ },
+ });
+ });
+
+ it('renders save and cancel buttons', () => {
+ expect(findSaveButton().exists()).toBe(true);
+ expect(findCancelButton().exists()).toBe(true);
+ });
+
+ it('enables related toggle and buttons', () => {
+ expect(findSaveButton().attributes().disabled).toBeUndefined();
+ expect(findCancelButton().attributes().disabled).toBeUndefined();
+ });
+
+ it('triggers set event to be propagated with the current value', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('setFluentdSettings', {
+ id: FLUENTD,
+ host: '127.0.0.1',
+ port: 514,
+ protocol: 'UDP',
+ });
+ });
+
+ describe('and the save changes button is clicked', () => {
+ beforeEach(() => {
+ findSaveButton().vm.$emit('click');
+ });
+
+ it('triggers save event and pass current values', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('updateApplication', {
+ id: FLUENTD,
+ params: {
+ host: '127.0.0.1',
+ port: 514,
+ protocol: 'udp',
+ },
+ });
+ });
+ });
+
+ describe('and the cancel button is clicked', () => {
+ beforeEach(() => {
+ findCancelButton().vm.$emit('click');
+ wrapper.setProps({
+ fluentd: {
+ ...defaultProps,
+ installed: true,
+ status: 'installed',
+ protocol: 'udp',
+ isEditingSettings: false,
+ },
+ });
+ });
+
+ it('triggers reset event and hides both cancel and save changes button', () => {
+ expect(findSaveButton().exists()).toBe(false);
+ expect(findCancelButton().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe(`when fluentd status is ${UPDATING}`, () => {
+ beforeEach(() => {
+ createComponent({ installed: true, status: UPDATING });
+ });
+
+ it('renders loading spinner in save button', () => {
+ expect(findSaveButton().props('loading')).toBe(true);
+ });
+
+ it('renders disabled save button', () => {
+ expect(findSaveButton().props('disabled')).toBe(true);
+ });
+
+ it('renders save button with "Saving" label', () => {
+ expect(findSaveButton().text()).toBe('Saving');
+ });
+ });
+
+ describe('when fluentd fails to update', () => {
+ beforeEach(() => {
+ createComponent({ updateFailed: true });
+ });
+
+ it('displays a error message', () => {
+ expect(wrapper.contains(GlAlert)).toBe(true);
+ });
+ });
+ });
+
+ describe('when fluentd is not installed', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not render the save button', () => {
+ expect(findSaveButton().exists()).toBe(false);
+ expect(findCancelButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/clusters/services/mock_data.js b/spec/frontend/clusters/services/mock_data.js
index 52d78ea1176..c5ec3f6e6a8 100644
--- a/spec/frontend/clusters/services/mock_data.js
+++ b/spec/frontend/clusters/services/mock_data.js
@@ -159,6 +159,7 @@ const APPLICATIONS_MOCK_STATE = {
jupyter: { title: 'JupyterHub', status: 'installable', hostname: '' },
knative: { title: 'Knative ', status: 'installable', hostname: '' },
elastic_stack: { title: 'Elastic Stack', status: 'installable' },
+ fluentd: { title: 'Fluentd', status: 'installable' },
};
export { CLUSTERS_MOCK_DATA, DEFAULT_APPLICATION_STATE, APPLICATIONS_MOCK_STATE };
diff --git a/spec/frontend/clusters/stores/clusters_store_spec.js b/spec/frontend/clusters/stores/clusters_store_spec.js
index 9fafc688af9..58e5bfb8007 100644
--- a/spec/frontend/clusters/stores/clusters_store_spec.js
+++ b/spec/frontend/clusters/stores/clusters_store_spec.js
@@ -121,6 +121,22 @@ describe('Clusters Store', () => {
uninstallFailed: false,
validationError: null,
},
+ fluentd: {
+ title: 'Fluentd',
+ status: null,
+ statusReason: null,
+ requestReason: null,
+ port: null,
+ host: null,
+ protocol: null,
+ installed: false,
+ isEditingSettings: false,
+ installFailed: false,
+ uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
+ validationError: null,
+ },
jupyter: {
title: 'JupyterHub',
status: mockResponseData.applications[4].status,
diff --git a/spec/frontend/diffs/components/diff_table_cell_spec.js b/spec/frontend/diffs/components/diff_table_cell_spec.js
index 1af0746f3bd..e871d86d901 100644
--- a/spec/frontend/diffs/components/diff_table_cell_spec.js
+++ b/spec/frontend/diffs/components/diff_table_cell_spec.js
@@ -85,15 +85,18 @@ describe('DiffTableCell', () => {
describe('comment button', () => {
it.each`
- showCommentButton | userData | query | expectation
- ${true} | ${TEST_USER} | ${'diff_head=false'} | ${true}
- ${true} | ${TEST_USER} | ${'diff_head=true'} | ${false}
- ${false} | ${TEST_USER} | ${'bogus'} | ${false}
- ${true} | ${null} | ${''} | ${false}
+ showCommentButton | userData | query | mergeRefHeadComments | expectation
+ ${true} | ${TEST_USER} | ${'diff_head=false'} | ${false} | ${true}
+ ${true} | ${TEST_USER} | ${'diff_head=true'} | ${true} | ${true}
+ ${true} | ${TEST_USER} | ${'diff_head=true'} | ${false} | ${false}
+ ${false} | ${TEST_USER} | ${'diff_head=true'} | ${true} | ${false}
+ ${false} | ${TEST_USER} | ${'bogus'} | ${true} | ${false}
+ ${true} | ${null} | ${''} | ${true} | ${false}
`(
'exists is $expectation - with showCommentButton ($showCommentButton) userData ($userData) query ($query)',
- ({ showCommentButton, userData, query, expectation }) => {
+ ({ showCommentButton, userData, query, mergeRefHeadComments, expectation }) => {
store.state.notes.userData = userData;
+ gon.features = { mergeRefHeadComments };
setWindowLocation({ href: `${TEST_HOST}?${query}` });
createComponent({ showCommentButton });
diff --git a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
index 3e5ba66d5e4..0343ef75732 100644
--- a/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
+++ b/spec/frontend/diffs/store/getters_versions_dropdowns_spec.js
@@ -1,6 +1,9 @@
import * as getters from '~/diffs/store/getters';
import state from '~/diffs/store/modules/diff_state';
-import { DIFF_COMPARE_BASE_VERSION_INDEX } from '~/diffs/constants';
+import {
+ DIFF_COMPARE_BASE_VERSION_INDEX,
+ DIFF_COMPARE_HEAD_VERSION_INDEX,
+} from '~/diffs/constants';
import diffsMockData from '../mock_data/merge_request_diffs';
describe('Compare diff version dropdowns', () => {
@@ -37,47 +40,93 @@ describe('Compare diff version dropdowns', () => {
describe('diffCompareDropdownTargetVersions', () => {
// diffCompareDropdownTargetVersions slices the array at the first position
- // and appends a "base" version which is why we use diffsMockData[1] below
- // This is to display "base" at the end of the target dropdown
- const expectedFirstVersion = {
- ...diffsMockData[1],
- href: expect.any(String),
- versionName: expect.any(String),
+ // and appends a "base" and "head" version at the end of the list so that
+ // "base" and "head" appear at the bottom of the dropdown
+ // this is also why we use diffsMockData[1] for the "first" version
+
+ let expectedFirstVersion;
+ let expectedBaseVersion;
+ let expectedHeadVersion;
+ const originalLocation = window.location;
+
+ const setupTest = includeDiffHeadParam => {
+ const diffHeadParam = includeDiffHeadParam ? '?diff_head=true' : '';
+
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { href: `https://example.gitlab.com${diffHeadParam}` },
+ });
+
+ expectedFirstVersion = {
+ ...diffsMockData[1],
+ href: expect.any(String),
+ versionName: expect.any(String),
+ selected: false,
+ };
+
+ expectedBaseVersion = {
+ versionName: 'baseVersion',
+ version_index: DIFF_COMPARE_BASE_VERSION_INDEX,
+ href: 'basePath',
+ isBase: true,
+ selected: false,
+ };
+
+ expectedHeadVersion = {
+ versionName: 'baseVersion',
+ version_index: DIFF_COMPARE_HEAD_VERSION_INDEX,
+ href: 'headPath',
+ isHead: true,
+ selected: false,
+ };
};
- const expectedBaseVersion = {
- versionName: 'baseVersion',
- version_index: DIFF_COMPARE_BASE_VERSION_INDEX,
- href: 'basePath',
- isBase: true,
+ const assertVersions = targetVersions => {
+ // base and head should be the last two versions in that order
+ const targetBaseVersion = targetVersions[targetVersions.length - 2];
+ const targetHeadVersion = targetVersions[targetVersions.length - 1];
+ expect(targetVersions[0]).toEqual(expectedFirstVersion);
+ expect(targetBaseVersion).toEqual(expectedBaseVersion);
+ expect(targetHeadVersion).toEqual(expectedHeadVersion);
};
+ afterEach(() => {
+ window.location = originalLocation;
+ });
+
it('base version selected', () => {
- expectedFirstVersion.selected = false;
+ setupTest();
expectedBaseVersion.selected = true;
- const targetVersions = getters.diffCompareDropdownTargetVersions(localState, {
- selectedTargetIndex: DIFF_COMPARE_BASE_VERSION_INDEX,
- });
+ const targetVersions = getters.diffCompareDropdownTargetVersions(localState, getters);
+ assertVersions(targetVersions);
+ });
- const lastVersion = targetVersions[targetVersions.length - 1];
- expect(targetVersions[0]).toEqual(expectedFirstVersion);
- expect(lastVersion).toEqual(expectedBaseVersion);
+ it('head version selected', () => {
+ setupTest(true);
+
+ expectedHeadVersion.selected = true;
+
+ const targetVersions = getters.diffCompareDropdownTargetVersions(localState, getters);
+ assertVersions(targetVersions);
});
it('first version selected', () => {
- expectedFirstVersion.selected = true;
- expectedBaseVersion.selected = false;
+ // NOTE: It should not be possible to have both "diff_head=true" and
+ // have anything other than the head version selected, but the user could
+ // manually add "?diff_head=true" to the url. In this instance we still
+ // want the actual selected version to display as "selected"
+ // Passing in "true" here asserts that first version is still selected
+ // even if "diff_head" is present in the url
+ setupTest(true);
+ expectedFirstVersion.selected = true;
localState.startVersion = expectedFirstVersion;
const targetVersions = getters.diffCompareDropdownTargetVersions(localState, {
selectedTargetIndex: expectedFirstVersion.version_index,
});
-
- const lastVersion = targetVersions[targetVersions.length - 1];
- expect(targetVersions[0]).toEqual(expectedFirstVersion);
- expect(lastVersion).toEqual(expectedBaseVersion);
+ assertVersions(targetVersions);
});
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index c44feaf4b63..858ab5be167 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -615,6 +615,73 @@ describe('DiffsStoreMutations', () => {
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions.length).toEqual(1);
expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toEqual(1);
});
+
+ it('should add discussions by line_codes and positions attributes', () => {
+ const diffPosition = {
+ base_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
+ head_sha: 'b921914f9a834ac47e6fd9420f78db0f83559130',
+ new_line: null,
+ new_path: '500-lines-4.txt',
+ old_line: 5,
+ old_path: '500-lines-4.txt',
+ start_sha: 'ed13df29948c41ba367caa757ab3ec4892509910',
+ };
+
+ const state = {
+ latestDiff: true,
+ diffFiles: [
+ {
+ file_hash: 'ABC',
+ parallel_diff_lines: [
+ {
+ left: {
+ line_code: 'ABC_1',
+ discussions: [],
+ },
+ right: {
+ line_code: 'ABC_1',
+ discussions: [],
+ },
+ },
+ ],
+ highlighted_diff_lines: [
+ {
+ line_code: 'ABC_1',
+ discussions: [],
+ },
+ ],
+ },
+ ],
+ };
+ const discussion = {
+ id: 1,
+ line_code: 'ABC_2',
+ line_codes: ['ABC_1'],
+ diff_discussion: true,
+ resolvable: true,
+ original_position: {},
+ position: {},
+ positions: [diffPosition],
+ diff_file: {
+ file_hash: state.diffFiles[0].file_hash,
+ },
+ };
+
+ const diffPositionByLineCode = {
+ ABC_1: diffPosition,
+ };
+
+ mutations[types.SET_LINE_DISCUSSIONS_FOR_FILE](state, {
+ discussion,
+ diffPositionByLineCode,
+ });
+
+ expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions).toHaveLength(1);
+ expect(state.diffFiles[0].parallel_diff_lines[0].left.discussions[0].id).toBe(1);
+
+ expect(state.diffFiles[0].highlighted_diff_lines[0].discussions).toHaveLength(1);
+ expect(state.diffFiles[0].highlighted_diff_lines[0].discussions[0].id).toBe(1);
+ });
});
describe('REMOVE_LINE_DISCUSSIONS', () => {
diff --git a/spec/frontend/helpers/dom_events_helper.js b/spec/frontend/helpers/dom_events_helper.js
new file mode 100644
index 00000000000..b66c12daf4f
--- /dev/null
+++ b/spec/frontend/helpers/dom_events_helper.js
@@ -0,0 +1,10 @@
+export const triggerDOMEvent = type => {
+ window.document.dispatchEvent(
+ new Event(type, {
+ bubbles: true,
+ cancelable: true,
+ }),
+ );
+};
+
+export default () => {};
diff --git a/spec/frontend/jira_import/components/jira_import_app_spec.js b/spec/frontend/jira_import/components/jira_import_app_spec.js
index fb3ffe1ede3..ce32559d5c9 100644
--- a/spec/frontend/jira_import/components/jira_import_app_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_app_spec.js
@@ -1,38 +1,213 @@
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import JiraImportApp from '~/jira_import/components/jira_import_app.vue';
+import JiraImportForm from '~/jira_import/components/jira_import_form.vue';
+import JiraImportProgress from '~/jira_import/components/jira_import_progress.vue';
import JiraImportSetup from '~/jira_import/components/jira_import_setup.vue';
+import initiateJiraImportMutation from '~/jira_import/queries/initiate_jira_import.mutation.graphql';
+import { IMPORT_STATE } from '~/jira_import/utils';
+
+const mountComponent = ({
+ isJiraConfigured = true,
+ errorMessage = '',
+ showAlert = true,
+ status = IMPORT_STATE.NONE,
+ loading = false,
+ mutate = jest.fn(() => Promise.resolve()),
+} = {}) =>
+ shallowMount(JiraImportApp, {
+ propsData: {
+ isJiraConfigured,
+ inProgressIllustration: 'in-progress-illustration.svg',
+ issuesPath: 'gitlab-org/gitlab-test/-/issues',
+ jiraProjects: [
+ ['My Jira Project', 'MJP'],
+ ['My Second Jira Project', 'MSJP'],
+ ['Migrate to GitLab', 'MTG'],
+ ],
+ projectPath: 'gitlab-org/gitlab-test',
+ setupIllustration: 'setup-illustration.svg',
+ },
+ data() {
+ return {
+ errorMessage,
+ showAlert,
+ jiraImportDetails: {
+ status,
+ import: {
+ jiraProjectKey: 'MTG',
+ scheduledAt: '2020-04-08T12:17:25+00:00',
+ scheduledBy: {
+ name: 'Jane Doe',
+ },
+ },
+ },
+ };
+ },
+ mocks: {
+ $apollo: {
+ loading,
+ mutate,
+ },
+ },
+ });
describe('JiraImportApp', () => {
let wrapper;
+ const getFormComponent = () => wrapper.find(JiraImportForm);
+
+ const getProgressComponent = () => wrapper.find(JiraImportProgress);
+
+ const getSetupComponent = () => wrapper.find(JiraImportSetup);
+
+ const getAlert = () => wrapper.find(GlAlert);
+
+ const getLoadingIcon = () => wrapper.find(GlLoadingIcon);
+
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
- describe('set up Jira integration page', () => {
+ describe('when Jira integration is not configured', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ isJiraConfigured: false });
+ });
+
+ it('shows the "Set up Jira integration" screen', () => {
+ expect(getSetupComponent().exists()).toBe(true);
+ });
+
+ it('does not show loading icon', () => {
+ expect(getLoadingIcon().exists()).toBe(false);
+ });
+
+ it('does not show the "Import in progress" screen', () => {
+ expect(getProgressComponent().exists()).toBe(false);
+ });
+
+ it('does not show the "Import Jira project" form', () => {
+ expect(getFormComponent().exists()).toBe(false);
+ });
+ });
+
+ describe('when Jira integration is configured but data is being fetched', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ loading: true });
+ });
+
+ it('does not show the "Set up Jira integration" screen', () => {
+ expect(getSetupComponent().exists()).toBe(false);
+ });
+
+ it('shows loading icon', () => {
+ expect(getLoadingIcon().exists()).toBe(true);
+ });
+
+ it('does not show the "Import in progress" screen', () => {
+ expect(getProgressComponent().exists()).toBe(false);
+ });
+
+ it('does not show the "Import Jira project" form', () => {
+ expect(getFormComponent().exists()).toBe(false);
+ });
+ });
+
+ describe('when Jira integration is configured but import is in progress', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ status: IMPORT_STATE.SCHEDULED });
+ });
+
+ it('does not show the "Set up Jira integration" screen', () => {
+ expect(getSetupComponent().exists()).toBe(false);
+ });
+
+ it('does not show loading icon', () => {
+ expect(getLoadingIcon().exists()).toBe(false);
+ });
+
+ it('shows the "Import in progress" screen', () => {
+ expect(getProgressComponent().exists()).toBe(true);
+ });
+
+ it('does not show the "Import Jira project" form', () => {
+ expect(getFormComponent().exists()).toBe(false);
+ });
+ });
+
+ describe('when Jira integration is configured and there is no import in progress', () => {
beforeEach(() => {
- wrapper = shallowMount(JiraImportApp, {
- propsData: {
- isJiraConfigured: true,
- projectPath: 'gitlab-org/gitlab-test',
- setupIllustration: 'illustration.svg',
+ wrapper = mountComponent();
+ });
+
+ it('does not show the "Set up Jira integration" screen', () => {
+ expect(getSetupComponent().exists()).toBe(false);
+ });
+
+ it('does not show loading icon', () => {
+ expect(getLoadingIcon().exists()).toBe(false);
+ });
+
+ it('does not show the Import in progress" screen', () => {
+ expect(getProgressComponent().exists()).toBe(false);
+ });
+
+ it('shows the "Import Jira project" form', () => {
+ expect(getFormComponent().exists()).toBe(true);
+ });
+ });
+
+ describe('initiating a Jira import', () => {
+ it('calls the mutation with the expected arguments', () => {
+ const mutate = jest.fn(() => Promise.resolve());
+
+ wrapper = mountComponent({ mutate });
+
+ const mutationArguments = {
+ mutation: initiateJiraImportMutation,
+ variables: {
+ input: {
+ jiraProjectKey: 'MTG',
+ projectPath: 'gitlab-org/gitlab-test',
+ },
},
- });
+ };
+
+ getFormComponent().vm.$emit('initiateJiraImport', 'MTG');
+
+ expect(mutate).toHaveBeenCalledWith(expect.objectContaining(mutationArguments));
});
- it('is shown when Jira integration is not configured', () => {
- wrapper.setProps({
- isJiraConfigured: false,
- });
+ it('shows alert message with error message on error', () => {
+ const mutate = jest.fn(() => Promise.reject());
+
+ wrapper = mountComponent({ mutate });
+
+ getFormComponent().vm.$emit('initiateJiraImport', 'MTG');
+
+ // One tick doesn't update the dom to the desired state so we have two ticks here
+ return Vue.nextTick()
+ .then(Vue.nextTick)
+ .then(() => {
+ expect(getAlert().text()).toBe('There was an error importing the Jira project.');
+ });
+ });
+ });
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.find(JiraImportSetup).exists()).toBe(true);
- });
+ it('can dismiss alert message', () => {
+ wrapper = mountComponent({
+ errorMessage: 'There was an error importing the Jira project.',
+ showAlert: true,
});
- it('is not shown when Jira integration is configured', () => {
- expect(wrapper.find(JiraImportSetup).exists()).toBe(false);
+ expect(getAlert().exists()).toBe(true);
+
+ getAlert().vm.$emit('dismiss');
+
+ return Vue.nextTick().then(() => {
+ expect(getAlert().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/jira_import/components/jira_import_form_spec.js b/spec/frontend/jira_import/components/jira_import_form_spec.js
index 3215ff26bdd..0987eb11693 100644
--- a/spec/frontend/jira_import/components/jira_import_form_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_form_spec.js
@@ -1,62 +1,126 @@
import { GlAvatar, GlButton, GlFormSelect, GlLabel } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount, shallowMount } from '@vue/test-utils';
import JiraImportForm from '~/jira_import/components/jira_import_form.vue';
+const mountComponent = ({ mountType } = {}) => {
+ const mountFunction = mountType === 'mount' ? mount : shallowMount;
+
+ return mountFunction(JiraImportForm, {
+ propsData: {
+ issuesPath: 'gitlab-org/gitlab-test/-/issues',
+ jiraProjects: [
+ {
+ text: 'My Jira Project',
+ value: 'MJP',
+ },
+ {
+ text: 'My Second Jira Project',
+ value: 'MSJP',
+ },
+ {
+ text: 'Migrate to GitLab',
+ value: 'MTG',
+ },
+ ],
+ },
+ });
+};
+
describe('JiraImportForm', () => {
let wrapper;
- beforeEach(() => {
- wrapper = shallowMount(JiraImportForm);
- });
+ const getCancelButton = () => wrapper.findAll(GlButton).at(1);
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
- it('shows a dropdown to choose the Jira project to import from', () => {
- expect(wrapper.find(GlFormSelect).exists()).toBe(true);
- });
+ describe('select dropdown', () => {
+ it('is shown', () => {
+ wrapper = mountComponent();
- it('shows a label which will be applied to imported Jira projects', () => {
- expect(wrapper.find(GlLabel).attributes('title')).toBe('jira-import::KEY-1');
- });
+ expect(wrapper.find(GlFormSelect).exists()).toBe(true);
+ });
- it('shows information to the user', () => {
- expect(wrapper.find('p').text()).toBe(
- "For each Jira issue successfully imported, we'll create a new GitLab issue with the following data:",
- );
- });
+ it('contains a list of Jira projects to select from', () => {
+ wrapper = mountComponent({ mountType: 'mount' });
- it('shows jira.issue.summary for the Title', () => {
- expect(wrapper.find('[id="jira-project-title"]').text()).toBe('jira.issue.summary');
+ const optionItems = ['My Jira Project', 'My Second Jira Project', 'Migrate to GitLab'];
+
+ wrapper
+ .find(GlFormSelect)
+ .findAll('option')
+ .wrappers.forEach((optionEl, index) => {
+ expect(optionEl.text()).toBe(optionItems[index]);
+ });
+ });
});
- it('shows an avatar for the Reporter', () => {
- expect(wrapper.find(GlAvatar).exists()).toBe(true);
+ describe('form information', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('shows a label which will be applied to imported Jira projects', () => {
+ expect(wrapper.find(GlLabel).attributes('title')).toBe('jira-import::KEY-1');
+ });
+
+ it('shows information to the user', () => {
+ expect(wrapper.find('p').text()).toBe(
+ "For each Jira issue successfully imported, we'll create a new GitLab issue with the following data:",
+ );
+ });
+
+ it('shows jira.issue.summary for the Title', () => {
+ expect(wrapper.find('[id="jira-project-title"]').text()).toBe('jira.issue.summary');
+ });
+
+ it('shows an avatar for the Reporter', () => {
+ expect(wrapper.find(GlAvatar).exists()).toBe(true);
+ });
+
+ it('shows jira.issue.description.content for the Description', () => {
+ expect(wrapper.find('[id="jira-project-description"]').text()).toBe(
+ 'jira.issue.description.content',
+ );
+ });
});
- it('shows jira.issue.description.content for the Description', () => {
- expect(wrapper.find('[id="jira-project-description"]').text()).toBe(
- 'jira.issue.description.content',
- );
+ describe('Next button', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('is shown', () => {
+ expect(wrapper.find(GlButton).text()).toBe('Next');
+ });
});
- it('shows a Next button', () => {
- const nextButton = wrapper
- .findAll(GlButton)
- .at(0)
- .text();
+ describe('Cancel button', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('is shown', () => {
+ expect(getCancelButton().text()).toBe('Cancel');
+ });
- expect(nextButton).toBe('Next');
+ it('links to the Issues page', () => {
+ expect(getCancelButton().attributes('href')).toBe('gitlab-org/gitlab-test/-/issues');
+ });
});
- it('shows a Cancel button', () => {
- const cancelButton = wrapper
- .findAll(GlButton)
- .at(1)
- .text();
+ it('emits an "initiateJiraImport" event with the selected dropdown value when submitted', () => {
+ const selectedOption = 'MTG';
+
+ wrapper = mountComponent();
+ wrapper.setData({
+ selectedOption,
+ });
+
+ wrapper.find('form').trigger('submit');
- expect(cancelButton).toBe('Cancel');
+ expect(wrapper.emitted('initiateJiraImport')[0]).toEqual([selectedOption]);
});
});
diff --git a/spec/frontend/jira_import/components/jira_import_progress_spec.js b/spec/frontend/jira_import/components/jira_import_progress_spec.js
new file mode 100644
index 00000000000..9a6fc3b5925
--- /dev/null
+++ b/spec/frontend/jira_import/components/jira_import_progress_spec.js
@@ -0,0 +1,70 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
+import JiraImportProgress from '~/jira_import/components/jira_import_progress.vue';
+
+describe('JiraImportProgress', () => {
+ let wrapper;
+
+ const getGlEmptyStateAttribute = attribute => wrapper.find(GlEmptyState).attributes(attribute);
+
+ const getParagraphText = () => wrapper.find('p').text();
+
+ const mountComponent = ({ mountType = 'shallowMount' } = {}) => {
+ const mountFunction = mountType === 'shallowMount' ? shallowMount : mount;
+ return mountFunction(JiraImportProgress, {
+ propsData: {
+ illustration: 'illustration.svg',
+ importInitiator: 'Jane Doe',
+ importProject: 'JIRAPROJECT',
+ importTime: '2020-04-08T12:17:25+00:00',
+ issuesPath: 'gitlab-org/gitlab-test/-/issues',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('empty state', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('contains illustration', () => {
+ expect(getGlEmptyStateAttribute('svgpath')).toBe('illustration.svg');
+ });
+
+ it('contains a title', () => {
+ const title = 'Import in progress';
+ expect(getGlEmptyStateAttribute('title')).toBe(title);
+ });
+
+ it('contains button text', () => {
+ expect(getGlEmptyStateAttribute('primarybuttontext')).toBe('View issues');
+ });
+
+ it('contains button url', () => {
+ expect(getGlEmptyStateAttribute('primarybuttonlink')).toBe('gitlab-org/gitlab-test/-/issues');
+ });
+ });
+
+ describe('description', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ mountType: 'mount' });
+ });
+
+ it('shows who initiated the import', () => {
+ expect(getParagraphText()).toContain('Import started by: Jane Doe');
+ });
+
+ it('shows the time of import', () => {
+ expect(getParagraphText()).toContain('Time of import: Apr 8, 2020 12:17pm GMT+0000');
+ });
+
+ it('shows the project key of the import', () => {
+ expect(getParagraphText()).toContain('Jira project: JIRAPROJECT');
+ });
+ });
+});
diff --git a/spec/frontend/jira_import/components/jira_import_setup_spec.js b/spec/frontend/jira_import/components/jira_import_setup_spec.js
index 27366bd7e8a..834c14b512e 100644
--- a/spec/frontend/jira_import/components/jira_import_setup_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_setup_spec.js
@@ -1,9 +1,12 @@
+import { GlEmptyState } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import JiraImportSetup from '~/jira_import/components/jira_import_setup.vue';
describe('JiraImportSetup', () => {
let wrapper;
+ const getGlEmptyStateAttribute = attribute => wrapper.find(GlEmptyState).attributes(attribute);
+
beforeEach(() => {
wrapper = shallowMount(JiraImportSetup, {
propsData: {
@@ -17,12 +20,16 @@ describe('JiraImportSetup', () => {
wrapper = null;
});
- it('displays a message to the user', () => {
- const message = 'You will first need to set up Jira Integration to use this feature.';
- expect(wrapper.find('p').text()).toBe(message);
+ it('contains illustration', () => {
+ expect(getGlEmptyStateAttribute('svgpath')).toBe('illustration.svg');
+ });
+
+ it('contains a description', () => {
+ const description = 'You will first need to set up Jira Integration to use this feature.';
+ expect(getGlEmptyStateAttribute('description')).toBe(description);
});
- it('contains button to set up Jira integration', () => {
- expect(wrapper.find('a').text()).toBe('Set up Jira Integration');
+ it('contains button text', () => {
+ expect(getGlEmptyStateAttribute('primarybuttontext')).toBe('Set up Jira Integration');
});
});
diff --git a/spec/frontend/jira_import/utils_spec.js b/spec/frontend/jira_import/utils_spec.js
new file mode 100644
index 00000000000..a14db104229
--- /dev/null
+++ b/spec/frontend/jira_import/utils_spec.js
@@ -0,0 +1,27 @@
+import { IMPORT_STATE, isInProgress } from '~/jira_import/utils';
+
+describe('isInProgress', () => {
+ it('returns true when state is IMPORT_STATE.SCHEDULED', () => {
+ expect(isInProgress(IMPORT_STATE.SCHEDULED)).toBe(true);
+ });
+
+ it('returns true when state is IMPORT_STATE.STARTED', () => {
+ expect(isInProgress(IMPORT_STATE.STARTED)).toBe(true);
+ });
+
+ it('returns false when state is IMPORT_STATE.FAILED', () => {
+ expect(isInProgress(IMPORT_STATE.FAILED)).toBe(false);
+ });
+
+ it('returns false when state is IMPORT_STATE.FINISHED', () => {
+ expect(isInProgress(IMPORT_STATE.FINISHED)).toBe(false);
+ });
+
+ it('returns false when state is IMPORT_STATE.NONE', () => {
+ expect(isInProgress(IMPORT_STATE.NONE)).toBe(false);
+ });
+
+ it('returns false when state is undefined', () => {
+ expect(isInProgress()).toBe(false);
+ });
+});
diff --git a/spec/frontend/monitoring/__snapshots__/alert_widget_spec.js.snap b/spec/frontend/monitoring/__snapshots__/alert_widget_spec.js.snap
new file mode 100644
index 00000000000..620ed58bde4
--- /dev/null
+++ b/spec/frontend/monitoring/__snapshots__/alert_widget_spec.js.snap
@@ -0,0 +1,43 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`AlertWidget Alert firing displays a warning icon and matches snapshot 1`] = `
+<gl-badge-stub
+ class="d-flex-center text-truncate"
+ pill=""
+ variant="danger"
+>
+ <gl-icon-stub
+ class="flex-shrink-0"
+ name="warning"
+ size="16"
+ />
+
+ <span
+ class="text-truncate gl-pl-1"
+ >
+ Firing:
+ alert-label &gt; 42
+
+ </span>
+</gl-badge-stub>
+`;
+
+exports[`AlertWidget Alert not firing displays a warning icon and matches snapshot 1`] = `
+<gl-badge-stub
+ class="d-flex-center text-truncate"
+ pill=""
+ variant="secondary"
+>
+ <gl-icon-stub
+ class="flex-shrink-0"
+ name="warning"
+ size="16"
+ />
+
+ <span
+ class="text-truncate gl-pl-1"
+ >
+ alert-label &gt; 42
+ </span>
+</gl-badge-stub>
+`;
diff --git a/spec/frontend/monitoring/alert_widget_spec.js b/spec/frontend/monitoring/alert_widget_spec.js
new file mode 100644
index 00000000000..f0355dfa01b
--- /dev/null
+++ b/spec/frontend/monitoring/alert_widget_spec.js
@@ -0,0 +1,422 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon, GlTooltip, GlSprintf, GlBadge } from '@gitlab/ui';
+import AlertWidget from '~/monitoring/components/alert_widget.vue';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+
+const mockReadAlert = jest.fn();
+const mockCreateAlert = jest.fn();
+const mockUpdateAlert = jest.fn();
+const mockDeleteAlert = jest.fn();
+
+jest.mock('~/flash');
+jest.mock(
+ '~/monitoring/services/alerts_service',
+ () =>
+ function AlertsServiceMock() {
+ return {
+ readAlert: mockReadAlert,
+ createAlert: mockCreateAlert,
+ updateAlert: mockUpdateAlert,
+ deleteAlert: mockDeleteAlert,
+ };
+ },
+);
+
+describe('AlertWidget', () => {
+ let wrapper;
+
+ const nonFiringAlertResult = [
+ {
+ values: [[0, 1], [1, 42], [2, 41]],
+ },
+ ];
+ const firingAlertResult = [
+ {
+ values: [[0, 42], [1, 43], [2, 44]],
+ },
+ ];
+ const metricId = '5';
+ const alertPath = 'my/alert.json';
+
+ const relevantQueries = [
+ {
+ metricId,
+ label: 'alert-label',
+ alert_path: alertPath,
+ result: nonFiringAlertResult,
+ },
+ ];
+
+ const firingRelevantQueries = [
+ {
+ metricId,
+ label: 'alert-label',
+ alert_path: alertPath,
+ result: firingAlertResult,
+ },
+ ];
+
+ const defaultProps = {
+ alertsEndpoint: '',
+ relevantQueries,
+ alertsToManage: {},
+ modalId: 'alert-modal-1',
+ };
+
+ const propsWithAlert = {
+ relevantQueries,
+ };
+
+ const propsWithAlertData = {
+ relevantQueries,
+ alertsToManage: {
+ [alertPath]: { operator: '>', threshold: 42, alert_path: alertPath, metricId },
+ },
+ };
+
+ const createComponent = propsData => {
+ wrapper = shallowMount(AlertWidget, {
+ stubs: { GlTooltip, GlSprintf },
+ propsData: {
+ ...defaultProps,
+ ...propsData,
+ },
+ });
+ };
+ const hasLoadingIcon = () => wrapper.contains(GlLoadingIcon);
+ const findWidgetForm = () => wrapper.find({ ref: 'widgetForm' });
+ const findAlertErrorMessage = () => wrapper.find({ ref: 'alertErrorMessage' });
+ const findCurrentSettingsText = () =>
+ wrapper
+ .find({ ref: 'alertCurrentSetting' })
+ .text()
+ .replace(/\s\s+/g, ' ');
+ const findBadge = () => wrapper.find(GlBadge);
+ const findTooltip = () => wrapper.find(GlTooltip);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('displays a loading spinner and disables form when fetching alerts', () => {
+ let resolveReadAlert;
+ mockReadAlert.mockReturnValue(
+ new Promise(resolve => {
+ resolveReadAlert = resolve;
+ }),
+ );
+ createComponent(defaultProps);
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(hasLoadingIcon()).toBe(true);
+ expect(findWidgetForm().props('disabled')).toBe(true);
+
+ resolveReadAlert({ operator: '==', threshold: 42 });
+ })
+ .then(() => waitForPromises())
+ .then(() => {
+ expect(hasLoadingIcon()).toBe(false);
+ expect(findWidgetForm().props('disabled')).toBe(false);
+ });
+ });
+
+ it('does not render loading spinner if showLoadingState is false', () => {
+ let resolveReadAlert;
+ mockReadAlert.mockReturnValue(
+ new Promise(resolve => {
+ resolveReadAlert = resolve;
+ }),
+ );
+ createComponent({
+ ...defaultProps,
+ showLoadingState: false,
+ });
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+
+ resolveReadAlert({ operator: '==', threshold: 42 });
+ })
+ .then(() => waitForPromises())
+ .then(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ });
+ });
+
+ it('displays an error message when fetch fails', () => {
+ mockReadAlert.mockRejectedValue();
+ createComponent(propsWithAlert);
+ expect(hasLoadingIcon()).toBe(true);
+
+ return waitForPromises().then(() => {
+ expect(createFlash).toHaveBeenCalled();
+ expect(hasLoadingIcon()).toBe(false);
+ });
+ });
+
+ describe('Alert not firing', () => {
+ it('displays a warning icon and matches snapshot', () => {
+ mockReadAlert.mockResolvedValue({ operator: '>', threshold: 42 });
+ createComponent(propsWithAlertData);
+
+ return waitForPromises().then(() => {
+ expect(findBadge().element).toMatchSnapshot();
+ });
+ });
+
+ it('displays an alert summary when there is a single alert', () => {
+ mockReadAlert.mockResolvedValue({ operator: '>', threshold: 42 });
+ createComponent(propsWithAlertData);
+ return waitForPromises().then(() => {
+ expect(findCurrentSettingsText()).toEqual('alert-label > 42');
+ });
+ });
+
+ it('displays a combined alert summary when there are multiple alerts', () => {
+ mockReadAlert.mockResolvedValue({ operator: '>', threshold: 42 });
+ const propsWithManyAlerts = {
+ relevantQueries: [
+ ...relevantQueries,
+ ...[
+ {
+ metricId: '6',
+ alert_path: 'my/alert2.json',
+ label: 'alert-label2',
+ result: [{ values: [] }],
+ },
+ ],
+ ],
+ alertsToManage: {
+ 'my/alert.json': {
+ operator: '>',
+ threshold: 42,
+ alert_path: alertPath,
+ metricId,
+ },
+ 'my/alert2.json': {
+ operator: '==',
+ threshold: 900,
+ alert_path: 'my/alert2.json',
+ metricId: '6',
+ },
+ },
+ };
+ createComponent(propsWithManyAlerts);
+ return waitForPromises().then(() => {
+ expect(findCurrentSettingsText()).toContain('2 alerts applied');
+ });
+ });
+ });
+
+ describe('Alert firing', () => {
+ it('displays a warning icon and matches snapshot', () => {
+ mockReadAlert.mockResolvedValue({ operator: '>', threshold: 42 });
+ propsWithAlertData.relevantQueries = firingRelevantQueries;
+ createComponent(propsWithAlertData);
+
+ return waitForPromises().then(() => {
+ expect(findBadge().element).toMatchSnapshot();
+ });
+ });
+
+ it('displays an alert summary when there is a single alert', () => {
+ mockReadAlert.mockResolvedValue({ operator: '>', threshold: 42 });
+ propsWithAlertData.relevantQueries = firingRelevantQueries;
+ createComponent(propsWithAlertData);
+ return waitForPromises().then(() => {
+ expect(findCurrentSettingsText()).toEqual('Firing: alert-label > 42');
+ });
+ });
+
+ it('displays a combined alert summary when there are multiple alerts', () => {
+ mockReadAlert.mockResolvedValue({ operator: '>', threshold: 42 });
+ const propsWithManyAlerts = {
+ relevantQueries: [
+ ...firingRelevantQueries,
+ ...[
+ {
+ metricId: '6',
+ alert_path: 'my/alert2.json',
+ label: 'alert-label2',
+ result: [{ values: [] }],
+ },
+ ],
+ ],
+ alertsToManage: {
+ 'my/alert.json': {
+ operator: '>',
+ threshold: 42,
+ alert_path: alertPath,
+ metricId,
+ },
+ 'my/alert2.json': {
+ operator: '==',
+ threshold: 900,
+ alert_path: 'my/alert2.json',
+ metricId: '6',
+ },
+ },
+ };
+ createComponent(propsWithManyAlerts);
+
+ return waitForPromises().then(() => {
+ expect(findCurrentSettingsText()).toContain('2 alerts applied, 1 firing');
+ });
+ });
+
+ it('should display tooltip with thresholds summary', () => {
+ mockReadAlert.mockResolvedValue({ operator: '>', threshold: 42 });
+ const propsWithManyAlerts = {
+ relevantQueries: [
+ ...firingRelevantQueries,
+ ...[
+ {
+ metricId: '6',
+ alert_path: 'my/alert2.json',
+ label: 'alert-label2',
+ result: [{ values: [] }],
+ },
+ ],
+ ],
+ alertsToManage: {
+ 'my/alert.json': {
+ operator: '>',
+ threshold: 42,
+ alert_path: alertPath,
+ metricId,
+ },
+ 'my/alert2.json': {
+ operator: '==',
+ threshold: 900,
+ alert_path: 'my/alert2.json',
+ metricId: '6',
+ },
+ },
+ };
+ createComponent(propsWithManyAlerts);
+
+ return waitForPromises().then(() => {
+ expect(
+ findTooltip()
+ .text()
+ .replace(/\s\s+/g, ' '),
+ ).toEqual('Firing: alert-label > 42');
+ });
+ });
+ });
+
+ it('creates an alert with an appropriate handler', () => {
+ const alertParams = {
+ operator: '<',
+ threshold: 4,
+ prometheus_metric_id: '5',
+ };
+ mockReadAlert.mockResolvedValue({ operator: '>', threshold: 42 });
+ const fakeAlertPath = 'foo/bar';
+ mockCreateAlert.mockResolvedValue({ alert_path: fakeAlertPath, ...alertParams });
+ createComponent({
+ alertsToManage: {
+ [fakeAlertPath]: {
+ alert_path: fakeAlertPath,
+ operator: '<',
+ threshold: 4,
+ prometheus_metric_id: '5',
+ metricId: '5',
+ },
+ },
+ });
+
+ findWidgetForm().vm.$emit('create', alertParams);
+
+ expect(mockCreateAlert).toHaveBeenCalledWith(alertParams);
+ });
+
+ it('updates an alert with an appropriate handler', () => {
+ const alertParams = { operator: '<', threshold: 4, alert_path: alertPath };
+ const newAlertParams = { operator: '==', threshold: 12 };
+ mockReadAlert.mockResolvedValue(alertParams);
+ mockUpdateAlert.mockResolvedValue({ ...alertParams, ...newAlertParams });
+ createComponent({
+ ...propsWithAlertData,
+ alertsToManage: {
+ [alertPath]: {
+ alert_path: alertPath,
+ operator: '==',
+ threshold: 12,
+ metricId: '5',
+ },
+ },
+ });
+
+ findWidgetForm().vm.$emit('update', {
+ alert: alertPath,
+ ...newAlertParams,
+ prometheus_metric_id: '5',
+ });
+
+ expect(mockUpdateAlert).toHaveBeenCalledWith(alertPath, newAlertParams);
+ });
+
+ it('deletes an alert with an appropriate handler', () => {
+ const alertParams = { alert_path: alertPath, operator: '>', threshold: 42 };
+ mockReadAlert.mockResolvedValue(alertParams);
+ mockDeleteAlert.mockResolvedValue({});
+ createComponent({
+ ...propsWithAlert,
+ alertsToManage: {
+ [alertPath]: {
+ alert_path: alertPath,
+ operator: '>',
+ threshold: 42,
+ metricId: '5',
+ },
+ },
+ });
+
+ findWidgetForm().vm.$emit('delete', { alert: alertPath });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(mockDeleteAlert).toHaveBeenCalledWith(alertPath);
+ expect(findAlertErrorMessage().exists()).toBe(false);
+ });
+ });
+
+ describe('when delete fails', () => {
+ beforeEach(() => {
+ const alertParams = { alert_path: alertPath, operator: '>', threshold: 42 };
+ mockReadAlert.mockResolvedValue(alertParams);
+ mockDeleteAlert.mockRejectedValue();
+
+ createComponent({
+ ...propsWithAlert,
+ alertsToManage: {
+ [alertPath]: {
+ alert_path: alertPath,
+ operator: '>',
+ threshold: 42,
+ metricId: '5',
+ },
+ },
+ });
+
+ findWidgetForm().vm.$emit('delete', { alert: alertPath });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('shows error message', () => {
+ expect(findAlertErrorMessage().text()).toEqual('Error deleting alert');
+ });
+
+ it('dismisses error message on cancel', () => {
+ findWidgetForm().vm.$emit('cancel');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findAlertErrorMessage().exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index d968b042ff1..1906ad7c6ed 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -6,101 +6,106 @@ exports[`Dashboard template matches the default snapshot 1`] = `
data-qa-selector="prometheus_graphs"
>
<div
- class="prometheus-graphs-header gl-p-3 pb-0 border-bottom bg-gray-light"
+ class="prometheus-graphs-header d-sm-flex flex-sm-wrap pt-2 pr-1 pb-0 pl-2 border-bottom bg-gray-light"
>
<div
- class="row"
+ class="mb-2 pr-2 d-flex d-sm-block"
>
- <gl-form-group-stub
- class="col-sm-12 col-md-6 col-lg-2"
- label="Dashboard"
- label-for="monitor-dashboards-dropdown"
- label-size="sm"
- >
- <dashboards-dropdown-stub
- class="mb-0 d-flex"
- data-qa-selector="dashboards_filter_dropdown"
- defaultbranch="master"
- id="monitor-dashboards-dropdown"
- selecteddashboard="[object Object]"
- toggle-class="dropdown-menu-toggle"
- />
- </gl-form-group-stub>
-
- <gl-form-group-stub
- class="col-sm-6 col-md-6 col-lg-2"
- label="Environment"
- label-for="monitor-environments-dropdown"
- label-size="sm"
+ <dashboards-dropdown-stub
+ class="flex-grow-1"
+ data-qa-selector="dashboards_filter_dropdown"
+ defaultbranch="master"
+ id="monitor-dashboards-dropdown"
+ selecteddashboard="[object Object]"
+ toggle-class="dropdown-menu-toggle"
+ />
+ </div>
+
+ <div
+ class="mb-2 pr-2 d-flex d-sm-block"
+ >
+ <gl-dropdown-stub
+ class="flex-grow-1"
+ data-qa-selector="environments_dropdown"
+ id="monitor-environments-dropdown"
+ menu-class="monitor-environment-dropdown-menu"
+ text="production"
+ toggle-class="dropdown-menu-toggle"
>
- <gl-dropdown-stub
- class="mb-0 d-flex"
- data-qa-selector="environments_dropdown"
- id="monitor-environments-dropdown"
- menu-class="monitor-environment-dropdown-menu"
- text="production"
- toggle-class="dropdown-menu-toggle"
+ <div
+ class="d-flex flex-column overflow-hidden"
>
+ <gl-dropdown-header-stub
+ class="monitor-environment-dropdown-header text-center"
+ >
+
+ Environment
+
+ </gl-dropdown-header-stub>
+
+ <gl-dropdown-divider-stub />
+
+ <gl-search-box-by-type-stub
+ class="m-2"
+ clearbuttontitle="Clear"
+ value=""
+ />
+
+ <div
+ class="flex-fill overflow-auto"
+ />
+
<div
- class="d-flex flex-column overflow-hidden"
+ class="text-secondary no-matches-message"
>
- <gl-dropdown-header-stub
- class="monitor-environment-dropdown-header text-center"
- >
- Environment
- </gl-dropdown-header-stub>
-
- <gl-dropdown-divider-stub />
-
- <gl-search-box-by-type-stub
- class="m-2"
- clearbuttontitle="Clear"
- value=""
- />
-
- <div
- class="flex-fill overflow-auto"
- />
-
- <div
- class="text-secondary no-matches-message"
- >
-
- No matching results
- </div>
+ No matching results
+
</div>
- </gl-dropdown-stub>
- </gl-form-group-stub>
-
- <gl-form-group-stub
- class="col-sm-auto col-md-auto col-lg-auto"
+ </div>
+ </gl-dropdown-stub>
+ </div>
+
+ <div
+ class="mb-2 pr-2 d-flex d-sm-block"
+ >
+ <date-time-picker-stub
+ class="flex-grow-1 show-last-dropdown"
+ customenabled="true"
data-qa-selector="show_last_dropdown"
- label="Show last"
- label-for="monitor-time-window-dropdown"
- label-size="sm"
+ options="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
+ value="[object Object]"
+ />
+ </div>
+
+ <div
+ class="mb-2 pr-2 d-flex d-sm-block"
+ >
+ <gl-deprecated-button-stub
+ class="flex-grow-1"
+ size="md"
+ title="Refresh dashboard"
+ variant="default"
>
- <date-time-picker-stub
- customenabled="true"
- options="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
- value="[object Object]"
+ <icon-stub
+ name="retry"
+ size="16"
/>
- </gl-form-group-stub>
+ </gl-deprecated-button-stub>
+ </div>
+
+ <div
+ class="flex-grow-1"
+ />
+
+ <div
+ class="d-sm-flex"
+ >
+ <!---->
- <gl-form-group-stub
- class="col-sm-2 col-md-2 col-lg-1 refresh-dashboard-button"
- >
- <gl-deprecated-button-stub
- size="md"
- title="Refresh dashboard"
- variant="default"
- >
- <icon-stub
- name="retry"
- size="16"
- />
- </gl-deprecated-button-stub>
- </gl-form-group-stub>
+ <!---->
+
+ <!---->
<!---->
</div>
diff --git a/spec/frontend/monitoring/components/alert_widget_form_spec.js b/spec/frontend/monitoring/components/alert_widget_form_spec.js
new file mode 100644
index 00000000000..635e4bc1372
--- /dev/null
+++ b/spec/frontend/monitoring/components/alert_widget_form_spec.js
@@ -0,0 +1,188 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
+import AlertWidgetForm from '~/monitoring/components/alert_widget_form.vue';
+import ModalStub from '../stubs/modal_stub';
+
+describe('AlertWidgetForm', () => {
+ let wrapper;
+
+ const metricId = '8';
+ const alertPath = 'alert';
+ const relevantQueries = [{ metricId, alert_path: alertPath, label: 'alert-label' }];
+ const dataTrackingOptions = {
+ create: { action: 'click_button', label: 'create_alert' },
+ delete: { action: 'click_button', label: 'delete_alert' },
+ update: { action: 'click_button', label: 'update_alert' },
+ };
+
+ const defaultProps = {
+ disabled: false,
+ relevantQueries,
+ modalId: 'alert-modal-1',
+ };
+
+ const propsWithAlertData = {
+ ...defaultProps,
+ alertsToManage: {
+ alert: { alert_path: alertPath, operator: '<', threshold: 5, metricId },
+ },
+ configuredAlert: metricId,
+ };
+
+ function createComponent(props = {}) {
+ const propsData = {
+ ...defaultProps,
+ ...props,
+ };
+
+ wrapper = shallowMount(AlertWidgetForm, {
+ propsData,
+ stubs: {
+ GlModal: ModalStub,
+ },
+ });
+ }
+
+ const modal = () => wrapper.find(ModalStub);
+ const modalTitle = () => modal().attributes('title');
+ const submitButton = () => modal().find(GlLink);
+ const submitButtonTrackingOpts = () =>
+ JSON.parse(submitButton().attributes('data-tracking-options'));
+ const e = {
+ preventDefault: jest.fn(),
+ };
+
+ beforeEach(() => {
+ e.preventDefault.mockReset();
+ });
+
+ afterEach(() => {
+ if (wrapper) wrapper.destroy();
+ });
+
+ it('disables the form when disabled prop is set', () => {
+ createComponent({ disabled: true });
+
+ expect(modal().attributes('ok-disabled')).toBe('true');
+ });
+
+ it('disables the form if no query is selected', () => {
+ createComponent();
+
+ expect(modal().attributes('ok-disabled')).toBe('true');
+ });
+
+ it('shows correct title and button text', () => {
+ expect(modalTitle()).toBe('Add alert');
+ expect(submitButton().text()).toBe('Add');
+ });
+
+ it('sets tracking options for create alert', () => {
+ expect(submitButtonTrackingOpts()).toEqual(dataTrackingOptions.create);
+ });
+
+ it('emits a "create" event when form submitted without existing alert', () => {
+ createComponent();
+
+ wrapper.vm.selectQuery('9');
+ wrapper.setData({
+ threshold: 900,
+ });
+
+ wrapper.vm.handleSubmit(e);
+
+ expect(wrapper.emitted().create[0]).toEqual([
+ {
+ alert: undefined,
+ operator: '>',
+ threshold: 900,
+ prometheus_metric_id: '9',
+ },
+ ]);
+ expect(e.preventDefault).toHaveBeenCalledTimes(1);
+ });
+
+ it('resets form when modal is dismissed (hidden)', () => {
+ createComponent();
+
+ wrapper.vm.selectQuery('9');
+ wrapper.vm.selectQuery('>');
+ wrapper.setData({
+ threshold: 800,
+ });
+
+ modal().vm.$emit('hidden');
+
+ expect(wrapper.vm.selectedAlert).toEqual({});
+ expect(wrapper.vm.operator).toBe(null);
+ expect(wrapper.vm.threshold).toBe(null);
+ expect(wrapper.vm.prometheusMetricId).toBe(null);
+ });
+
+ it('sets selectedAlert to the provided configuredAlert on modal show', () => {
+ createComponent(propsWithAlertData);
+
+ modal().vm.$emit('shown');
+
+ expect(wrapper.vm.selectedAlert).toEqual(propsWithAlertData.alertsToManage[alertPath]);
+ });
+
+ describe('with existing alert', () => {
+ beforeEach(() => {
+ createComponent(propsWithAlertData);
+
+ wrapper.vm.selectQuery(metricId);
+ });
+
+ it('sets tracking options for delete alert', () => {
+ expect(submitButtonTrackingOpts()).toEqual(dataTrackingOptions.delete);
+ });
+
+ it('updates button text', () => {
+ expect(modalTitle()).toBe('Edit alert');
+ expect(submitButton().text()).toBe('Delete');
+ });
+
+ it('emits "delete" event when form values unchanged', () => {
+ wrapper.vm.handleSubmit(e);
+
+ expect(wrapper.emitted().delete[0]).toEqual([
+ {
+ alert: 'alert',
+ operator: '<',
+ threshold: 5,
+ prometheus_metric_id: '8',
+ },
+ ]);
+ expect(e.preventDefault).toHaveBeenCalledTimes(1);
+ });
+
+ it('emits "update" event when form changed', () => {
+ wrapper.setData({
+ threshold: 11,
+ });
+
+ wrapper.vm.handleSubmit(e);
+
+ expect(wrapper.emitted().update[0]).toEqual([
+ {
+ alert: 'alert',
+ operator: '<',
+ threshold: 11,
+ prometheus_metric_id: '8',
+ },
+ ]);
+ expect(e.preventDefault).toHaveBeenCalledTimes(1);
+ });
+
+ it('sets tracking options for update alert', () => {
+ wrapper.setData({
+ threshold: 11,
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(submitButtonTrackingOpts()).toEqual(dataTrackingOptions.update);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/charts/annotations_spec.js b/spec/frontend/monitoring/components/charts/annotations_spec.js
index 69bf1fe4ced..fc90175d307 100644
--- a/spec/frontend/monitoring/components/charts/annotations_spec.js
+++ b/spec/frontend/monitoring/components/charts/annotations_spec.js
@@ -54,6 +54,7 @@ describe('annotations spec', () => {
yAxisIndex: 1,
data: expect.any(Array),
markLine: expect.any(Object),
+ markPoint: expect.any(Object),
}),
);
@@ -61,11 +62,12 @@ describe('annotations spec', () => {
expect(annotation).toEqual(expect.any(Object));
});
- expect(annotations.data).toHaveLength(annotationsData.length);
+ expect(annotations.data).toHaveLength(0);
expect(annotations.markLine.data).toHaveLength(annotationsData.length);
+ expect(annotations.markPoint.data).toHaveLength(annotationsData.length);
});
- it('when deploments and annotations data is passed', () => {
+ it('when deployments and annotations data is passed', () => {
const annotations = generateAnnotationsSeries({
deployments: deploymentData,
annotations: annotationsData,
@@ -77,6 +79,7 @@ describe('annotations spec', () => {
yAxisIndex: 1,
data: expect.any(Array),
markLine: expect.any(Object),
+ markPoint: expect.any(Object),
}),
);
@@ -84,7 +87,9 @@ describe('annotations spec', () => {
expect(annotation).toEqual(expect.any(Object));
});
- expect(annotations.data).toHaveLength(deploymentData.length + annotationsData.length);
+ expect(annotations.data).toHaveLength(deploymentData.length);
+ expect(annotations.markLine.data).toHaveLength(annotationsData.length);
+ expect(annotations.markPoint.data).toHaveLength(annotationsData.length);
});
});
});
diff --git a/spec/frontend/monitoring/components/charts/options_spec.js b/spec/frontend/monitoring/components/charts/options_spec.js
index d219a6627bf..1c8fdc01e3e 100644
--- a/spec/frontend/monitoring/components/charts/options_spec.js
+++ b/spec/frontend/monitoring/components/charts/options_spec.js
@@ -31,7 +31,32 @@ describe('options spec', () => {
});
});
- it('formatter options', () => {
+ it('formatter options defaults to engineering notation', () => {
+ const options = getYAxisOptions();
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(3002.1)).toBe('3k');
+ });
+
+ it('formatter options allows for precision to be set explicitly', () => {
+ const options = getYAxisOptions({
+ precision: 4,
+ });
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(5002.1)).toBe('5.0021k');
+ });
+
+ it('formatter options allows for overrides in milliseconds', () => {
+ const options = getYAxisOptions({
+ format: SUPPORTED_FORMATS.milliseconds,
+ });
+
+ expect(options.axisLabel.formatter).toEqual(expect.any(Function));
+ expect(options.axisLabel.formatter(1.1234)).toBe('1.12ms');
+ });
+
+ it('formatter options allows for overrides in bytes', () => {
const options = getYAxisOptions({
format: SUPPORTED_FORMATS.bytes,
});
@@ -46,7 +71,7 @@ describe('options spec', () => {
const formatter = getTooltipFormatter();
expect(formatter).toEqual(expect.any(Function));
- expect(formatter(1)).toBe('1.000');
+ expect(formatter(0.11111)).toBe('111.1m');
});
it('defined format', () => {
diff --git a/spec/frontend/monitoring/components/charts/single_stat_spec.js b/spec/frontend/monitoring/components/charts/single_stat_spec.js
index fb0682d0338..9cc5970da82 100644
--- a/spec/frontend/monitoring/components/charts/single_stat_spec.js
+++ b/spec/frontend/monitoring/components/charts/single_stat_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import SingleStatChart from '~/monitoring/components/charts/single_stat.vue';
-import { graphDataPrometheusQuery } from '../../mock_data';
+import { singleStatMetricsResult } from '../../mock_data';
describe('Single Stat Chart component', () => {
let singleStatChart;
@@ -8,7 +8,7 @@ describe('Single Stat Chart component', () => {
beforeEach(() => {
singleStatChart = shallowMount(SingleStatChart, {
propsData: {
- graphData: graphDataPrometheusQuery,
+ graphData: singleStatMetricsResult,
},
});
});
@@ -26,7 +26,7 @@ describe('Single Stat Chart component', () => {
it('should change the value representation to a percentile one', () => {
singleStatChart.setProps({
graphData: {
- ...graphDataPrometheusQuery,
+ ...singleStatMetricsResult,
maxValue: 120,
},
});
@@ -37,7 +37,7 @@ describe('Single Stat Chart component', () => {
it('should display NaN for non numeric maxValue values', () => {
singleStatChart.setProps({
graphData: {
- ...graphDataPrometheusQuery,
+ ...singleStatMetricsResult,
maxValue: 'not a number',
},
});
@@ -48,13 +48,13 @@ describe('Single Stat Chart component', () => {
it('should display NaN for missing query values', () => {
singleStatChart.setProps({
graphData: {
- ...graphDataPrometheusQuery,
+ ...singleStatMetricsResult,
metrics: [
{
- ...graphDataPrometheusQuery.metrics[0],
+ ...singleStatMetricsResult.metrics[0],
result: [
{
- ...graphDataPrometheusQuery.metrics[0].result[0],
+ ...singleStatMetricsResult.metrics[0].result[0],
value: [''],
},
],
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 53463aa3358..c05bf1a547d 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -11,9 +11,10 @@ import {
import { cloneDeep } from 'lodash';
import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
import { createStore } from '~/monitoring/stores';
+import { panelTypes } from '~/monitoring/constants';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import * as types from '~/monitoring/stores/mutation_types';
-import { deploymentData, mockProjectDir } from '../../mock_data';
+import { deploymentData, mockProjectDir, annotationsData } from '../../mock_data';
import {
metricsDashboardPayload,
metricsDashboardViewModel,
@@ -278,6 +279,33 @@ describe('Time series component', () => {
});
});
+ describe('formatAnnotationsTooltipText', () => {
+ const annotationsMetadata = {
+ name: 'annotations',
+ xAxis: annotationsData[0].from,
+ yAxis: 0,
+ tooltipData: {
+ title: '2020/02/19 10:01:41',
+ content: annotationsData[0].description,
+ },
+ };
+
+ const mockMarkPoint = {
+ componentType: 'markPoint',
+ name: 'annotations',
+ value: undefined,
+ data: annotationsMetadata,
+ };
+
+ it('formats tooltip title and sets tooltip content', () => {
+ const formattedTooltipData = timeSeriesChart.vm.formatAnnotationsTooltipText(
+ mockMarkPoint,
+ );
+ expect(formattedTooltipData.title).toBe('19 Feb 2020, 10:01AM');
+ expect(formattedTooltipData.content).toBe(annotationsMetadata.tooltipData.content);
+ });
+ });
+
describe('setSvg', () => {
const mockSvgName = 'mockSvgName';
@@ -380,6 +408,8 @@ describe('Time series component', () => {
series: [
{
name: mockSeriesName,
+ type: 'line',
+ data: [],
},
],
},
@@ -442,8 +472,8 @@ describe('Time series component', () => {
deploymentFormatter = getChartOptions().yAxis[1].axisLabel.formatter;
});
- it('formats and rounds to 2 decimal places', () => {
- expect(dataFormatter(0.88888)).toBe('0.89');
+ it('formats by default to precision notation', () => {
+ expect(dataFormatter(0.88888)).toBe('889m');
});
it('deployment formatter is set as is required to display a tooltip', () => {
@@ -506,11 +536,11 @@ describe('Time series component', () => {
describe('wrapped components', () => {
const glChartComponents = [
{
- chartType: 'area-chart',
+ chartType: panelTypes.AREA_CHART,
component: GlAreaChart,
},
{
- chartType: 'line-chart',
+ chartType: panelTypes.LINE_CHART,
component: GlLineChart,
},
];
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 92e0320f516..ad48874014e 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -1,16 +1,18 @@
import { shallowMount, mount } from '@vue/test-utils';
-import { GlDropdownItem, GlDeprecatedButton } from '@gitlab/ui';
+import Tracking from '~/tracking';
+import { GlModal, GlDropdownItem, GlDeprecatedButton } from '@gitlab/ui';
import VueDraggable from 'vuedraggable';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import { metricStates } from '~/monitoring/constants';
-import Dashboard from '~/monitoring/components/dashboard.vue';
+import Dashboard from '~/monitoring/components/dashboard_with_alerts.vue';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import CustomMetricsFormFields from '~/custom_metrics/components/custom_metrics_form_fields.vue';
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
-import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
+import PanelType from '~/monitoring/components/panel_type_with_alerts.vue';
import { createStore } from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
import { setupStoreWithDashboard, setMetricResult, setupStoreWithData } from '../store_utils';
@@ -546,4 +548,74 @@ describe('Dashboard', () => {
});
});
});
+
+ describe('add custom metrics', () => {
+ const findAddMetricButton = () => wrapper.vm.$refs.addMetricBtn;
+ describe('when not available', () => {
+ beforeEach(() => {
+ createShallowWrapper({
+ hasMetrics: true,
+ customMetricsPath: '/endpoint',
+ });
+ });
+ it('does not render add button on the dashboard', () => {
+ expect(findAddMetricButton()).toBeUndefined();
+ });
+ });
+
+ describe('when available', () => {
+ let origPage;
+ beforeEach(done => {
+ jest.spyOn(Tracking, 'event').mockReturnValue();
+ createShallowWrapper({
+ hasMetrics: true,
+ customMetricsPath: '/endpoint',
+ customMetricsAvailable: true,
+ });
+ setupStoreWithData(wrapper.vm.$store);
+
+ origPage = document.body.dataset.page;
+ document.body.dataset.page = 'projects:environments:metrics';
+
+ wrapper.vm.$nextTick(done);
+ });
+ afterEach(() => {
+ document.body.dataset.page = origPage;
+ });
+
+ it('renders add button on the dashboard', () => {
+ expect(findAddMetricButton()).toBeDefined();
+ });
+
+ it('uses modal for custom metrics form', () => {
+ expect(wrapper.find(GlModal).exists()).toBe(true);
+ expect(wrapper.find(GlModal).attributes().modalid).toBe('add-metric');
+ });
+ it('adding new metric is tracked', done => {
+ const submitButton = wrapper.vm.$refs.submitCustomMetricsFormBtn;
+ wrapper.setData({
+ formIsValid: true,
+ });
+ wrapper.vm.$nextTick(() => {
+ submitButton.$el.click();
+ wrapper.vm.$nextTick(() => {
+ expect(Tracking.event).toHaveBeenCalledWith(
+ document.body.dataset.page,
+ 'click_button',
+ {
+ label: 'add_new_metric',
+ property: 'modal',
+ value: undefined,
+ },
+ );
+ done();
+ });
+ });
+ });
+
+ it('renders custom metrics form fields', () => {
+ expect(wrapper.find(CustomMetricsFormFields).exists()).toBe(true);
+ });
+ });
+ });
});
diff --git a/spec/frontend/monitoring/components/embeds/metric_embed_spec.js b/spec/frontend/monitoring/components/embeds/metric_embed_spec.js
index b829cd53479..b6734ede63e 100644
--- a/spec/frontend/monitoring/components/embeds/metric_embed_spec.js
+++ b/spec/frontend/monitoring/components/embeds/metric_embed_spec.js
@@ -1,6 +1,6 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
-import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
+import PanelType from '~/monitoring/components/panel_type_with_alerts.vue';
import { TEST_HOST } from 'helpers/test_constants';
import MetricEmbed from '~/monitoring/components/embeds/metric_embed.vue';
import { groups, initialState, metricsData, metricsWithData } from './mock_data';
diff --git a/spec/frontend/monitoring/components/panel_type_spec.js b/spec/frontend/monitoring/components/panel_type_spec.js
index 02511ac46ea..753d5caba69 100644
--- a/spec/frontend/monitoring/components/panel_type_spec.js
+++ b/spec/frontend/monitoring/components/panel_type_spec.js
@@ -5,22 +5,33 @@ import invalidUrl from '~/lib/utils/invalid_url';
import axios from '~/lib/utils/axios_utils';
import PanelType from '~/monitoring/components/panel_type.vue';
-import EmptyChart from '~/monitoring/components/charts/empty_chart.vue';
-import TimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
-import AnomalyChart from '~/monitoring/components/charts/anomaly.vue';
import {
anomalyMockGraphData,
- graphDataPrometheusQueryRange,
mockLogsHref,
mockLogsPath,
mockNamespace,
mockNamespacedData,
mockTimeRange,
-} from 'jest/monitoring/mock_data';
+ singleStatMetricsResult,
+ graphDataPrometheusQueryRangeMultiTrack,
+ barMockData,
+} from '../mock_data';
+
+import { panelTypes } from '~/monitoring/constants';
+
+import MonitorEmptyChart from '~/monitoring/components/charts/empty_chart.vue';
+import MonitorTimeSeriesChart from '~/monitoring/components/charts/time_series.vue';
+import MonitorAnomalyChart from '~/monitoring/components/charts/anomaly.vue';
+import MonitorSingleStatChart from '~/monitoring/components/charts/single_stat.vue';
+import MonitorHeatmapChart from '~/monitoring/components/charts/heatmap.vue';
+import MonitorColumnChart from '~/monitoring/components/charts/column.vue';
+import MonitorBarChart from '~/monitoring/components/charts/bar.vue';
+import MonitorStackedColumnChart from '~/monitoring/components/charts/stacked_column.vue';
+
+import { graphData, graphDataEmpty } from '../fixture_data';
import { createStore, monitoringDashboard } from '~/monitoring/stores';
import { createStore as createEmbedGroupStore } from '~/monitoring/stores/embed_group';
-global.IS_EE = true;
global.URL.createObjectURL = jest.fn();
const mocks = {
@@ -39,10 +50,13 @@ describe('Panel Type component', () => {
const findCopyLink = () => wrapper.find({ ref: 'copyChartLink' });
const findTimeChart = () => wrapper.find({ ref: 'timeChart' });
+ const findTitle = () => wrapper.find({ ref: 'graphTitle' });
+ const findContextualMenu = () => wrapper.find({ ref: 'contextualMenu' });
const createWrapper = props => {
wrapper = shallowMount(PanelType, {
propsData: {
+ graphData,
...props,
},
store,
@@ -64,14 +78,9 @@ describe('Panel Type component', () => {
});
describe('When no graphData is available', () => {
- let glEmptyChart;
- // Deep clone object before modifying
- const graphDataNoResult = JSON.parse(JSON.stringify(graphDataPrometheusQueryRange));
- graphDataNoResult.metrics[0].result = [];
-
beforeEach(() => {
createWrapper({
- graphData: graphDataNoResult,
+ graphData: graphDataEmpty,
});
});
@@ -80,12 +89,8 @@ describe('Panel Type component', () => {
});
describe('Empty Chart component', () => {
- beforeEach(() => {
- glEmptyChart = wrapper.find(EmptyChart);
- });
-
it('renders the chart title', () => {
- expect(wrapper.find({ ref: 'graphTitle' }).text()).toBe(graphDataNoResult.title);
+ expect(findTitle().text()).toBe(graphDataEmpty.title);
});
it('renders the no download csv link', () => {
@@ -93,26 +98,19 @@ describe('Panel Type component', () => {
});
it('does not contain graph widgets', () => {
- expect(wrapper.find('.js-graph-widgets').exists()).toBe(false);
+ expect(findContextualMenu().exists()).toBe(false);
});
it('is a Vue instance', () => {
- expect(glEmptyChart.isVueInstance()).toBe(true);
- });
-
- it('it receives a graph title', () => {
- const props = glEmptyChart.props();
-
- expect(props.graphTitle).toBe(wrapper.vm.graphData.title);
+ expect(wrapper.find(MonitorEmptyChart).exists()).toBe(true);
+ expect(wrapper.find(MonitorEmptyChart).isVueInstance()).toBe(true);
});
});
});
describe('when graph data is available', () => {
beforeEach(() => {
- createWrapper({
- graphData: graphDataPrometheusQueryRange,
- });
+ createWrapper();
});
afterEach(() => {
@@ -120,11 +118,11 @@ describe('Panel Type component', () => {
});
it('renders the chart title', () => {
- expect(wrapper.find({ ref: 'graphTitle' }).text()).toBe(graphDataPrometheusQueryRange.title);
+ expect(findTitle().text()).toBe(graphData.title);
});
it('contains graph widgets', () => {
- expect(wrapper.find('.js-graph-widgets').exists()).toBe(true);
+ expect(findContextualMenu().exists()).toBe(true);
expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(true);
});
@@ -147,28 +145,44 @@ describe('Panel Type component', () => {
});
});
- describe('Time Series Chart panel type', () => {
- it('is rendered', () => {
- expect(wrapper.find(TimeSeriesChart).isVueInstance()).toBe(true);
- expect(wrapper.find(TimeSeriesChart).exists()).toBe(true);
- });
+ it('includes a default group id', () => {
+ expect(wrapper.vm.groupId).toBe('dashboard-panel');
+ });
+
+ describe('Supports different panel types', () => {
+ const dataWithType = type => {
+ return {
+ ...graphData,
+ type,
+ };
+ };
- it('includes a default group id', () => {
- expect(wrapper.vm.groupId).toBe('panel-type-chart');
+ it('empty chart is rendered for empty results', () => {
+ createWrapper({ graphData: graphDataEmpty });
+ expect(wrapper.find(MonitorEmptyChart).exists()).toBe(true);
+ expect(wrapper.find(MonitorEmptyChart).isVueInstance()).toBe(true);
});
- });
- describe('Anomaly Chart panel type', () => {
- beforeEach(() => {
- wrapper.setProps({
- graphData: anomalyMockGraphData,
- });
- return wrapper.vm.$nextTick();
+ it('area chart is rendered by default', () => {
+ createWrapper();
+ expect(wrapper.find(MonitorTimeSeriesChart).exists()).toBe(true);
+ expect(wrapper.find(MonitorTimeSeriesChart).isVueInstance()).toBe(true);
});
- it('is rendered with an anomaly chart', () => {
- expect(wrapper.find(AnomalyChart).isVueInstance()).toBe(true);
- expect(wrapper.find(AnomalyChart).exists()).toBe(true);
+ it.each`
+ data | component
+ ${dataWithType(panelTypes.AREA_CHART)} | ${MonitorTimeSeriesChart}
+ ${dataWithType(panelTypes.LINE_CHART)} | ${MonitorTimeSeriesChart}
+ ${anomalyMockGraphData} | ${MonitorAnomalyChart}
+ ${dataWithType(panelTypes.COLUMN)} | ${MonitorColumnChart}
+ ${dataWithType(panelTypes.STACKED_COLUMN)} | ${MonitorStackedColumnChart}
+ ${singleStatMetricsResult} | ${MonitorSingleStatChart}
+ ${graphDataPrometheusQueryRangeMultiTrack} | ${MonitorHeatmapChart}
+ ${barMockData} | ${MonitorBarChart}
+ `('type $data.type renders the expected component', ({ data, component }) => {
+ createWrapper({ graphData: data });
+ expect(wrapper.find(component).exists()).toBe(true);
+ expect(wrapper.find(component).isVueInstance()).toBe(true);
});
});
});
@@ -177,11 +191,7 @@ describe('Panel Type component', () => {
const findEditCustomMetricLink = () => wrapper.find({ ref: 'editMetricLink' });
beforeEach(() => {
- createWrapper({
- graphData: {
- ...graphDataPrometheusQueryRange,
- },
- });
+ createWrapper();
return wrapper.vm.$nextTick();
});
@@ -193,10 +203,10 @@ describe('Panel Type component', () => {
it('is present when the panel contains an edit_path property', () => {
wrapper.setProps({
graphData: {
- ...graphDataPrometheusQueryRange,
+ ...graphData,
metrics: [
{
- ...graphDataPrometheusQueryRange.metrics[0],
+ ...graphData.metrics[0],
edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
},
],
@@ -205,23 +215,6 @@ describe('Panel Type component', () => {
return wrapper.vm.$nextTick(() => {
expect(findEditCustomMetricLink().exists()).toBe(true);
- });
- });
-
- it('shows an "Edit metric" link for a panel with a single metric', () => {
- wrapper.setProps({
- graphData: {
- ...graphDataPrometheusQueryRange,
- metrics: [
- {
- ...graphDataPrometheusQueryRange.metrics[0],
- edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
- },
- ],
- },
- });
-
- return wrapper.vm.$nextTick(() => {
expect(findEditCustomMetricLink().text()).toBe('Edit metric');
});
});
@@ -229,14 +222,14 @@ describe('Panel Type component', () => {
it('shows an "Edit metrics" link for a panel with multiple metrics', () => {
wrapper.setProps({
graphData: {
- ...graphDataPrometheusQueryRange,
+ ...graphData,
metrics: [
{
- ...graphDataPrometheusQueryRange.metrics[0],
+ ...graphData.metrics[0],
edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
},
{
- ...graphDataPrometheusQueryRange.metrics[0],
+ ...graphData.metrics[0],
edit_path: '/root/kubernetes-gke-project/prometheus/metrics/23/edit',
},
],
@@ -253,9 +246,7 @@ describe('Panel Type component', () => {
const findViewLogsLink = () => wrapper.find({ ref: 'viewLogsLink' });
beforeEach(() => {
- createWrapper({
- graphData: graphDataPrometheusQueryRange,
- });
+ createWrapper();
return wrapper.vm.$nextTick();
});
@@ -327,7 +318,6 @@ describe('Panel Type component', () => {
beforeEach(() => {
createWrapper({
clipboardText,
- graphData: graphDataPrometheusQueryRange,
});
});
@@ -353,11 +343,13 @@ describe('Panel Type component', () => {
describe('when downloading metrics data as CSV', () => {
beforeEach(() => {
- graphDataPrometheusQueryRange.y_label = 'metric';
wrapper = shallowMount(PanelType, {
propsData: {
clipboardText: exampleText,
- graphData: graphDataPrometheusQueryRange,
+ graphData: {
+ y_label: 'metric',
+ ...graphData,
+ },
},
store,
});
@@ -370,12 +362,12 @@ describe('Panel Type component', () => {
describe('csvText', () => {
it('converts metrics data from json to csv', () => {
- const header = `timestamp,${graphDataPrometheusQueryRange.y_label}`;
- const data = graphDataPrometheusQueryRange.metrics[0].result[0].values;
+ const header = `timestamp,${graphData.y_label}`;
+ const data = graphData.metrics[0].result[0].values;
const firstRow = `${data[0][0]},${data[0][1]}`;
const secondRow = `${data[1][0]},${data[1][1]}`;
- expect(wrapper.vm.csvText).toBe(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
+ expect(wrapper.vm.csvText).toMatch(`${header}\r\n${firstRow}\r\n${secondRow}\r\n`);
});
});
@@ -402,7 +394,7 @@ describe('Panel Type component', () => {
wrapper = shallowMount(PanelType, {
propsData: {
- graphData: graphDataPrometheusQueryRange,
+ graphData,
namespace: mockNamespace,
},
store,
@@ -436,8 +428,8 @@ describe('Panel Type component', () => {
});
it('it renders a time series chart with no errors', () => {
- expect(wrapper.find(TimeSeriesChart).isVueInstance()).toBe(true);
- expect(wrapper.find(TimeSeriesChart).exists()).toBe(true);
+ expect(wrapper.find(MonitorTimeSeriesChart).isVueInstance()).toBe(true);
+ expect(wrapper.find(MonitorTimeSeriesChart).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/monitoring/components/panel_type_with_alerts_spec.js b/spec/frontend/monitoring/components/panel_type_with_alerts_spec.js
new file mode 100644
index 00000000000..3374fe4b55f
--- /dev/null
+++ b/spec/frontend/monitoring/components/panel_type_with_alerts_spec.js
@@ -0,0 +1,73 @@
+import Vuex from 'vuex';
+import { shallowMount } from '@vue/test-utils';
+import { GlDropdownItem } from '@gitlab/ui';
+import { monitoringDashboard } from '~/monitoring/stores';
+import PanelType from '~/monitoring/components/panel_type_with_alerts.vue';
+import AlertWidget from '~/monitoring/components/alert_widget.vue';
+import { graphData } from 'jest/monitoring/fixture_data';
+
+global.URL.createObjectURL = jest.fn();
+
+describe('Panel Type', () => {
+ let store;
+ let wrapper;
+
+ const setMetricsSavedToDb = val =>
+ monitoringDashboard.getters.metricsSavedToDb.mockReturnValue(val);
+ const findAlertsWidget = () => wrapper.find(AlertWidget);
+ const findMenuItemAlert = () =>
+ wrapper.findAll(GlDropdownItem).filter(i => i.text() === 'Alerts');
+
+ const mockPropsData = {
+ graphData,
+ clipboardText: 'example_text',
+ alertsEndpoint: '/endpoint',
+ prometheusAlertsAvailable: true,
+ };
+
+ const createWrapper = propsData => {
+ wrapper = shallowMount(PanelType, {
+ propsData: {
+ ...mockPropsData,
+ ...propsData,
+ },
+ store,
+ });
+ };
+
+ beforeEach(() => {
+ jest.spyOn(monitoringDashboard.getters, 'metricsSavedToDb').mockReturnValue([]);
+
+ store = new Vuex.Store({
+ modules: {
+ monitoringDashboard,
+ },
+ });
+ });
+
+ describe('panel type alerts', () => {
+ describe.each`
+ desc | metricsSavedToDb | propsData | isShown
+ ${'with license and no metrics in db'} | ${[]} | ${{}} | ${false}
+ ${'with license and related metrics in db'} | ${[graphData.metrics[0].metricId]} | ${{}} | ${true}
+ ${'without license and related metrics in db'} | ${[graphData.metrics[0].metricId]} | ${{ prometheusAlertsAvailable: false }} | ${false}
+ ${'with license and unrelated metrics in db'} | ${['another_metric_id']} | ${{}} | ${false}
+ `('$desc', ({ metricsSavedToDb, isShown, propsData }) => {
+ const showsDesc = isShown ? 'shows' : 'does not show';
+
+ beforeEach(() => {
+ setMetricsSavedToDb(metricsSavedToDb);
+ createWrapper(propsData);
+ return wrapper.vm.$nextTick();
+ });
+
+ it(`${showsDesc} alert widget`, () => {
+ expect(findAlertsWidget().exists()).toBe(isShown);
+ });
+
+ it(`${showsDesc} alert configuration`, () => {
+ expect(findMenuItemAlert().exists()).toBe(isShown);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/fixture_data.js b/spec/frontend/monitoring/fixture_data.js
index 76045baa632..b7b72a15992 100644
--- a/spec/frontend/monitoring/fixture_data.js
+++ b/spec/frontend/monitoring/fixture_data.js
@@ -1,4 +1,6 @@
import { mapToDashboardViewModel } from '~/monitoring/stores/utils';
+import { metricStates } from '~/monitoring/constants';
+
import { metricsResult } from './mock_data';
// Use globally available `getJSONFixture` so this file can be imported by both karma and jest specs
@@ -23,3 +25,25 @@ export const metricResultEmpty = {
metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
result: [],
};
+
+// Graph data
+
+const firstPanel = metricsDashboardViewModel.panelGroups[0].panels[0];
+
+export const graphData = {
+ ...firstPanel,
+ metrics: firstPanel.metrics.map(metric => ({
+ ...metric,
+ result: metricsResult,
+ state: metricStates.OK,
+ })),
+};
+
+export const graphDataEmpty = {
+ ...firstPanel,
+ metrics: firstPanel.metrics.map(metric => ({
+ ...metric,
+ result: [],
+ state: metricStates.NO_DATA,
+ })),
+};
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 700fe4086f8..0db69ca7d8d 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -247,59 +247,27 @@ export const deploymentData = [
export const annotationsData = [
{
id: 'gid://gitlab/Metrics::Dashboard::Annotation/1',
- starting_at: '2020-04-01T12:51:58.373Z',
- ending_at: null,
+ startingAt: '2020-04-12 12:51:53 UTC',
+ endingAt: null,
panelId: null,
description: 'This is a test annotation',
},
{
id: 'gid://gitlab/Metrics::Dashboard::Annotation/2',
description: 'test annotation 2',
- starting_at: '2020-04-02T12:51:58.373Z',
- ending_at: null,
+ startingAt: '2020-04-13 12:51:53 UTC',
+ endingAt: null,
panelId: null,
},
{
id: 'gid://gitlab/Metrics::Dashboard::Annotation/3',
description: 'test annotation 3',
- starting_at: '2020-04-04T12:51:58.373Z',
- ending_at: null,
+ startingAt: '2020-04-16 12:51:53 UTC',
+ endingAt: null,
panelId: null,
},
];
-export const metricsNewGroupsAPIResponse = [
- {
- group: 'System metrics (Kubernetes)',
- priority: 5,
- panels: [
- {
- title: 'Memory Usage (Pod average)',
- type: 'area-chart',
- y_label: 'Memory Used per Pod',
- weight: 2,
- metrics: [
- {
- id: 'system_metrics_kubernetes_container_memory_average',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024',
- label: 'Pod average',
- unit: 'MB',
- metric_id: 17,
- prometheus_endpoint_path:
- '/root/autodevops-deploy/environments/32/prometheus/api/v1/query_range?query=avg%28sum%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+by+%28job%29%29+without+%28job%29+%2F+count%28avg%28container_memory_usage_bytes%7Bcontainer_name%21%3D%22POD%22%2Cpod_name%3D~%22%5E%25%7Bci_environment_slug%7D-%28%5B%5Ec%5D.%2A%7Cc%28%5B%5Ea%5D%7Ca%28%5B%5En%5D%7Cn%28%5B%5Ea%5D%7Ca%28%5B%5Er%5D%7Cr%5B%5Ey%5D%29%29%29%29.%2A%7C%29-%28.%2A%29%22%2Cnamespace%3D%22%25%7Bkube_namespace%7D%22%7D%29+without+%28job%29%29+%2F1024%2F1024',
- appearance: {
- line: {
- width: 2,
- },
- },
- },
- ],
- },
- ],
- },
-];
-
const extraEnvironmentData = new Array(15).fill(null).map((_, idx) => ({
id: `gid://gitlab/Environments/${150 + idx}`,
name: `no-deployment/noop-branch-${idx}`,
@@ -369,39 +337,11 @@ export const metricsResult = [
[1563272125.589, '10.333984375'],
[1563272185.589, '10.333984375'],
[1563272245.589, '10.333984375'],
- [1563272305.589, '10.333984375'],
- [1563272365.589, '10.333984375'],
- [1563272425.589, '10.38671875'],
- [1563272485.589, '10.333984375'],
- [1563272545.589, '10.333984375'],
- [1563272605.589, '10.333984375'],
- [1563272665.589, '10.333984375'],
- [1563272725.589, '10.333984375'],
- [1563272785.589, '10.396484375'],
- [1563272845.589, '10.333984375'],
- [1563272905.589, '10.333984375'],
- [1563272965.589, '10.3984375'],
- [1563273025.589, '10.337890625'],
- [1563273085.589, '10.34765625'],
- [1563273145.589, '10.337890625'],
- [1563273205.589, '10.337890625'],
- [1563273265.589, '10.337890625'],
- [1563273325.589, '10.337890625'],
- [1563273385.589, '10.337890625'],
- [1563273445.589, '10.337890625'],
- [1563273505.589, '10.337890625'],
- [1563273565.589, '10.337890625'],
- [1563273625.589, '10.337890625'],
- [1563273685.589, '10.337890625'],
- [1563273745.589, '10.337890625'],
- [1563273805.589, '10.337890625'],
- [1563273865.589, '10.390625'],
- [1563273925.589, '10.390625'],
],
},
];
-export const graphDataPrometheusQuery = {
+export const singleStatMetricsResult = {
title: 'Super Chart A2',
type: 'single-stat',
weight: 2,
@@ -425,29 +365,6 @@ export const graphDataPrometheusQuery = {
],
};
-export const graphDataPrometheusQueryRange = {
- title: 'Super Chart A1',
- type: 'area-chart',
- weight: 2,
- metrics: [
- {
- metricId: '2_metric_a',
- query_range:
- 'avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) /1024/1024/1024',
- unit: 'MB',
- label: 'Total Consumption',
- prometheus_endpoint_path:
- '/root/kubernetes-gke-project/environments/35/prometheus/api/v1/query?query=max%28go_memstats_alloc_bytes%7Bjob%3D%22prometheus%22%7D%29+by+%28job%29+%2F1024%2F1024',
- result: [
- {
- metric: {},
- values: [[1495700554.925, '8.0390625'], [1495700614.925, '8.0390625']],
- },
- ],
- },
- ],
-};
-
export const graphDataPrometheusQueryRangeMultiTrack = {
title: 'Super Chart A3',
type: 'heatmap',
@@ -572,7 +489,7 @@ export const stackedColumnMockedData = {
export const barMockData = {
title: 'SLA Trends - Primary Services',
- type: 'bar-chart',
+ type: 'bar',
xLabel: 'service',
y_label: 'percentile',
metrics: [
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index 4591e110974..f312aa1fd34 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -23,7 +23,11 @@ import {
setGettingStartedEmptyState,
duplicateSystemDashboard,
} from '~/monitoring/stores/actions';
-import { gqClient, parseEnvironmentsResponse } from '~/monitoring/stores/utils';
+import {
+ gqClient,
+ parseEnvironmentsResponse,
+ parseAnnotationsResponse,
+} from '~/monitoring/stores/utils';
import getEnvironments from '~/monitoring/queries/getEnvironments.query.graphql';
import getAnnotations from '~/monitoring/queries/getAnnotations.query.graphql';
import storeState from '~/monitoring/stores/state';
@@ -224,6 +228,10 @@ describe('Monitoring store actions', () => {
describe('fetchAnnotations', () => {
const { state } = store;
+ state.timeRange = {
+ start: '2020-04-15T12:54:32.137Z',
+ end: '2020-08-15T12:54:32.137Z',
+ };
state.projectPath = 'gitlab-org/gitlab-test';
state.currentEnvironmentName = 'production';
state.currentDashboard = '.gitlab/dashboards/custom_dashboard.yml';
@@ -239,17 +247,25 @@ describe('Monitoring store actions', () => {
variables: {
projectPath: state.projectPath,
environmentName: state.currentEnvironmentName,
- dashboardId: state.currentDashboard,
+ dashboardPath: state.currentDashboard,
+ startingFrom: state.timeRange.start,
},
};
+ const parsedResponse = parseAnnotationsResponse(annotationsData);
mockMutate.mockResolvedValue({
data: {
project: {
- environment: {
- metricDashboard: {
- annotations: annotationsData,
- },
+ environments: {
+ nodes: [
+ {
+ metricsDashboard: {
+ annotations: {
+ nodes: parsedResponse,
+ },
+ },
+ },
+ ],
},
},
},
@@ -260,7 +276,7 @@ describe('Monitoring store actions', () => {
null,
state,
[],
- [{ type: 'receiveAnnotationsSuccess', payload: annotationsData }],
+ [{ type: 'receiveAnnotationsSuccess', payload: parsedResponse }],
() => {
expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
},
@@ -274,7 +290,8 @@ describe('Monitoring store actions', () => {
variables: {
projectPath: state.projectPath,
environmentName: state.currentEnvironmentName,
- dashboardId: state.currentDashboard,
+ dashboardPath: state.currentDashboard,
+ startingFrom: state.timeRange.start,
},
};
diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index f46409e8e32..7ee2a16b4bd 100644
--- a/spec/frontend/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
@@ -2,9 +2,11 @@ import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
import {
uniqMetricsId,
parseEnvironmentsResponse,
+ parseAnnotationsResponse,
removeLeadingSlash,
mapToDashboardViewModel,
} from '~/monitoring/stores/utils';
+import { annotationsData } from '../mock_data';
import { NOT_IN_DB_PREFIX } from '~/monitoring/constants';
const projectPath = 'gitlab-org/gitlab-test';
@@ -56,7 +58,7 @@ describe('mapToDashboardViewModel', () => {
y_label: 'Y Label A',
yAxis: {
name: 'Y Label A',
- format: 'number',
+ format: 'engineering',
precision: 2,
},
metrics: [],
@@ -138,7 +140,7 @@ describe('mapToDashboardViewModel', () => {
y_label: '',
yAxis: {
name: '',
- format: SUPPORTED_FORMATS.number,
+ format: SUPPORTED_FORMATS.engineering,
precision: 2,
},
metrics: [],
@@ -159,7 +161,7 @@ describe('mapToDashboardViewModel', () => {
},
yAxis: {
name: '',
- format: SUPPORTED_FORMATS.number,
+ format: SUPPORTED_FORMATS.engineering,
precision: 2,
},
metrics: [],
@@ -219,7 +221,7 @@ describe('mapToDashboardViewModel', () => {
},
});
- expect(getMappedPanel().yAxis.format).toBe(SUPPORTED_FORMATS.number);
+ expect(getMappedPanel().yAxis.format).toBe(SUPPORTED_FORMATS.engineering);
});
// This property allows single_stat panels to render percentile values
@@ -376,6 +378,27 @@ describe('parseEnvironmentsResponse', () => {
});
});
+describe('parseAnnotationsResponse', () => {
+ const parsedAnnotationResponse = [
+ {
+ description: 'This is a test annotation',
+ endingAt: null,
+ id: 'gid://gitlab/Metrics::Dashboard::Annotation/1',
+ panelId: null,
+ startingAt: new Date('2020-04-12T12:51:53.000Z'),
+ },
+ ];
+ it.each`
+ case | input | expected
+ ${'Returns empty array for null input'} | ${null} | ${[]}
+ ${'Returns empty array for undefined input'} | ${undefined} | ${[]}
+ ${'Returns empty array for empty input'} | ${[]} | ${[]}
+ ${'Returns parsed responses for annotations data'} | ${[annotationsData[0]]} | ${parsedAnnotationResponse}
+ `('$case', ({ input, expected }) => {
+ expect(parseAnnotationsResponse(input)).toEqual(expected);
+ });
+});
+
describe('removeLeadingSlash', () => {
[
{ input: null, output: '' },
diff --git a/spec/frontend/monitoring/stubs/modal_stub.js b/spec/frontend/monitoring/stubs/modal_stub.js
new file mode 100644
index 00000000000..4cd0362096e
--- /dev/null
+++ b/spec/frontend/monitoring/stubs/modal_stub.js
@@ -0,0 +1,11 @@
+const ModalStub = {
+ name: 'glmodal-stub',
+ template: `
+ <div>
+ <slot></slot>
+ <slot name="modal-ok"></slot>
+ </div>
+ `,
+};
+
+export default ModalStub;
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index a9010e2bffa..964c462988c 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -3,11 +3,11 @@ import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_uti
import { TEST_HOST } from 'jest/helpers/test_constants';
import {
mockProjectDir,
- graphDataPrometheusQuery,
- graphDataPrometheusQueryRange,
+ singleStatMetricsResult,
anomalyMockGraphData,
barMockData,
} from './mock_data';
+import { graphData } from './fixture_data';
jest.mock('~/lib/utils/url_utility');
@@ -89,7 +89,7 @@ describe('monitoring/utils', () => {
it('validates data with the query format', () => {
const validGraphData = monitoringUtils.graphDataValidatorForValues(
true,
- graphDataPrometheusQuery,
+ singleStatMetricsResult,
);
expect(validGraphData).toBe(true);
@@ -101,10 +101,7 @@ describe('monitoring/utils', () => {
* the validator will look for the `values` key instead of `value`
*/
it('validates data with the query_range format', () => {
- const validGraphData = monitoringUtils.graphDataValidatorForValues(
- false,
- graphDataPrometheusQueryRange,
- );
+ const validGraphData = monitoringUtils.graphDataValidatorForValues(false, graphData);
expect(validGraphData).toBe(true);
});
@@ -115,7 +112,7 @@ describe('monitoring/utils', () => {
let threeMetrics;
let fourMetrics;
beforeEach(() => {
- oneMetric = graphDataPrometheusQuery;
+ oneMetric = singleStatMetricsResult;
threeMetrics = anomalyMockGraphData;
const metrics = [...threeMetrics.metrics];
diff --git a/spec/frontend/monitoring/validators_spec.js b/spec/frontend/monitoring/validators_spec.js
new file mode 100644
index 00000000000..0c3d77a7d98
--- /dev/null
+++ b/spec/frontend/monitoring/validators_spec.js
@@ -0,0 +1,80 @@
+import { alertsValidator, queriesValidator } from '~/monitoring/validators';
+
+describe('alertsValidator', () => {
+ const validAlert = {
+ alert_path: 'my/alert.json',
+ operator: '<',
+ threshold: 5,
+ metricId: '8',
+ };
+ it('requires all alerts to have an alert path', () => {
+ const { operator, threshold, metricId } = validAlert;
+ const input = {
+ [validAlert.alert_path]: {
+ operator,
+ threshold,
+ metricId,
+ },
+ };
+ expect(alertsValidator(input)).toEqual(false);
+ });
+ it('requires that the object key matches the alert path', () => {
+ const input = {
+ undefined: validAlert,
+ };
+ expect(alertsValidator(input)).toEqual(false);
+ });
+ it('requires all alerts to have a metric id', () => {
+ const input = {
+ [validAlert.alert_path]: { ...validAlert, metricId: undefined },
+ };
+ expect(alertsValidator(input)).toEqual(false);
+ });
+ it('requires the metricId to be a string', () => {
+ const input = {
+ [validAlert.alert_path]: { ...validAlert, metricId: 8 },
+ };
+ expect(alertsValidator(input)).toEqual(false);
+ });
+ it('requires all alerts to have an operator', () => {
+ const input = {
+ [validAlert.alert_path]: { ...validAlert, operator: '' },
+ };
+ expect(alertsValidator(input)).toEqual(false);
+ });
+ it('requires all alerts to have an numeric threshold', () => {
+ const input = {
+ [validAlert.alert_path]: { ...validAlert, threshold: '60' },
+ };
+ expect(alertsValidator(input)).toEqual(false);
+ });
+ it('correctly identifies a valid alerts object', () => {
+ const input = {
+ [validAlert.alert_path]: validAlert,
+ };
+ expect(alertsValidator(input)).toEqual(true);
+ });
+});
+describe('queriesValidator', () => {
+ const validQuery = {
+ metricId: '8',
+ alert_path: 'alert',
+ label: 'alert-label',
+ };
+ it('requires all alerts to have a metric id', () => {
+ const input = [{ ...validQuery, metricId: undefined }];
+ expect(queriesValidator(input)).toEqual(false);
+ });
+ it('requires the metricId to be a string', () => {
+ const input = [{ ...validQuery, metricId: 8 }];
+ expect(queriesValidator(input)).toEqual(false);
+ });
+ it('requires all queries to have a label', () => {
+ const input = [{ ...validQuery, label: undefined }];
+ expect(queriesValidator(input)).toEqual(false);
+ });
+ it('correctly identifies a valid queries array', () => {
+ const input = [validQuery];
+ expect(queriesValidator(input)).toEqual(true);
+ });
+});
diff --git a/spec/frontend/notes/components/note_header_spec.js b/spec/frontend/notes/components/note_header_spec.js
index d477de69716..8cb78720c7e 100644
--- a/spec/frontend/notes/components/note_header_spec.js
+++ b/spec/frontend/notes/components/note_header_spec.js
@@ -1,4 +1,5 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
import Vuex from 'vuex';
import NoteHeader from '~/notes/components/note_header.vue';
import GitlabTeamMemberBadge from '~/vue_shared/components/user_avatar/badges/gitlab_team_member_badge.vue';
@@ -179,4 +180,70 @@ describe('NoteHeader component', () => {
expect(findTimestamp().exists()).toBe(true);
});
});
+
+ describe('author username link', () => {
+ it('proxies `mouseenter` event to author name link', () => {
+ createComponent({ author });
+
+ const dispatchEvent = jest.spyOn(wrapper.vm.$refs.authorNameLink, 'dispatchEvent');
+
+ wrapper.find({ ref: 'authorUsernameLink' }).trigger('mouseenter');
+
+ expect(dispatchEvent).toHaveBeenCalledWith(new Event('mouseenter'));
+ });
+
+ it('proxies `mouseleave` event to author name link', () => {
+ createComponent({ author });
+
+ const dispatchEvent = jest.spyOn(wrapper.vm.$refs.authorNameLink, 'dispatchEvent');
+
+ wrapper.find({ ref: 'authorUsernameLink' }).trigger('mouseleave');
+
+ expect(dispatchEvent).toHaveBeenCalledWith(new Event('mouseleave'));
+ });
+ });
+
+ describe('when author status tooltip is opened', () => {
+ it('removes `title` attribute from emoji to prevent duplicate tooltips', () => {
+ createComponent({
+ author: {
+ ...author,
+ status_tooltip_html:
+ '"<span class="user-status-emoji has-tooltip" title="foo bar" data-html="true" data-placement="top"><gl-emoji title="basketball and hoop" data-name="basketball" data-unicode-version="6.0">🏀</gl-emoji></span>"',
+ },
+ });
+
+ return nextTick().then(() => {
+ const authorStatus = wrapper.find({ ref: 'authorStatus' });
+ authorStatus.trigger('mouseenter');
+
+ expect(authorStatus.find('gl-emoji').attributes('title')).toBeUndefined();
+ });
+ });
+ });
+
+ describe('when author username link is hovered', () => {
+ it('toggles hover specific CSS classes on author name link', done => {
+ createComponent({ author });
+
+ const authorUsernameLink = wrapper.find({ ref: 'authorUsernameLink' });
+ const authorNameLink = wrapper.find({ ref: 'authorNameLink' });
+
+ authorUsernameLink.trigger('mouseenter');
+
+ nextTick(() => {
+ expect(authorNameLink.classes()).toContain('hover');
+ expect(authorNameLink.classes()).toContain('text-underline');
+
+ authorUsernameLink.trigger('mouseleave');
+
+ nextTick(() => {
+ expect(authorNameLink.classes()).not.toContain('hover');
+ expect(authorNameLink.classes()).not.toContain('text-underline');
+
+ done();
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/notes/mock_data.js b/spec/frontend/notes/mock_data.js
index 9ed79c61c22..980faac2b04 100644
--- a/spec/frontend/notes/mock_data.js
+++ b/spec/frontend/notes/mock_data.js
@@ -57,6 +57,7 @@ export const noteableDataMock = {
updated_by_id: 1,
web_url: '/gitlab-org/gitlab-foss/issues/26',
noteableType: 'issue',
+ blocked_by_issues: [],
};
export const lastFetchedAt = '1501862675';
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 544d482e7fc..e0c5441b9d3 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -34,6 +34,11 @@ describe('Actions Notes Store', () => {
dispatch = jest.fn();
state = {};
axiosMock = new AxiosMockAdapter(axios);
+
+ // This is necessary as we query Close issue button at the top of issue page when clicking bottom button
+ setFixtures(
+ '<div class="detail-page-header-actions"><button class="btn-close btn-grouped"></button></div>',
+ );
});
afterEach(() => {
@@ -242,6 +247,30 @@ describe('Actions Notes Store', () => {
});
});
+ describe('toggleBlockedIssueWarning', () => {
+ it('should set issue warning as true', done => {
+ testAction(
+ actions.toggleBlockedIssueWarning,
+ true,
+ {},
+ [{ type: 'TOGGLE_BLOCKED_ISSUE_WARNING', payload: true }],
+ [],
+ done,
+ );
+ });
+
+ it('should set issue warning as false', done => {
+ testAction(
+ actions.toggleBlockedIssueWarning,
+ false,
+ {},
+ [{ type: 'TOGGLE_BLOCKED_ISSUE_WARNING', payload: false }],
+ [],
+ done,
+ );
+ });
+ });
+
describe('poll', () => {
jest.useFakeTimers();
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index 06d2654ceca..67757ad56c5 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -664,4 +664,40 @@ describe('Notes Store mutations', () => {
expect(state.discussionSortOrder).toBe(DESC);
});
});
+
+ describe('TOGGLE_BLOCKED_ISSUE_WARNING', () => {
+ it('should set isToggleBlockedIssueWarning as true', () => {
+ const state = {
+ discussions: [],
+ targetNoteHash: null,
+ lastFetchedAt: null,
+ isToggleStateButtonLoading: false,
+ isToggleBlockedIssueWarning: false,
+ notesData: {},
+ userData: {},
+ noteableData: {},
+ };
+
+ mutations.TOGGLE_BLOCKED_ISSUE_WARNING(state, true);
+
+ expect(state.isToggleBlockedIssueWarning).toEqual(true);
+ });
+
+ it('should set isToggleBlockedIssueWarning as false', () => {
+ const state = {
+ discussions: [],
+ targetNoteHash: null,
+ lastFetchedAt: null,
+ isToggleStateButtonLoading: false,
+ isToggleBlockedIssueWarning: true,
+ notesData: {},
+ userData: {},
+ noteableData: {},
+ };
+
+ mutations.TOGGLE_BLOCKED_ISSUE_WARNING(state, false);
+
+ expect(state.isToggleBlockedIssueWarning).toEqual(false);
+ });
+ });
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index d3932ca09ff..9c292fa0f2b 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -55,7 +55,12 @@ describe('Settings Panel', () => {
currentSettings: { ...defaultProps.currentSettings, ...currentSettings },
};
- return mountFn(settingsPanel, { propsData });
+ return mountFn(settingsPanel, {
+ propsData,
+ provide: {
+ glFeatures: { metricsDashboardVisibilitySwitchingAvailable: true },
+ },
+ });
};
const overrideCurrentSettings = (currentSettingsProps, extraProps = {}) => {
@@ -471,4 +476,28 @@ describe('Settings Panel', () => {
});
});
});
+
+ describe('Metrics dashboard', () => {
+ it('should show the metrics dashboard access toggle', () => {
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find({ ref: 'metrics-visibility-settings' }).exists()).toBe(true);
+ });
+ });
+
+ it('should set the visibility level description based upon the selected visibility level', () => {
+ wrapper
+ .find('[name="project[project_feature_attributes][metrics_dashboard_access_level]"]')
+ .setValue(visibilityOptions.PUBLIC);
+
+ expect(wrapper.vm.metricsAccessLevel).toBe(visibilityOptions.PUBLIC);
+ });
+
+ it('should contain help text', () => {
+ wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PRIVATE });
+
+ expect(wrapper.find({ ref: 'metrics-visibility-settings' }).props().helpText).toEqual(
+ 'With Metrics Dashboard you can visualize this project performance metrics',
+ );
+ });
+ });
});
diff --git a/spec/frontend/pipelines/graph/action_component_spec.js b/spec/frontend/pipelines/graph/action_component_spec.js
index 43da6388efa..3c5938cfa1f 100644
--- a/spec/frontend/pipelines/graph/action_component_spec.js
+++ b/spec/frontend/pipelines/graph/action_component_spec.js
@@ -7,6 +7,7 @@ import ActionComponent from '~/pipelines/components/graph/action_component.vue';
describe('pipeline graph action component', () => {
let wrapper;
let mock;
+ const findButton = () => wrapper.find('button');
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -44,15 +45,15 @@ describe('pipeline graph action component', () => {
});
it('should render an svg', () => {
- expect(wrapper.find('.ci-action-icon-wrapper')).toBeDefined();
- expect(wrapper.find('svg')).toBeDefined();
+ expect(wrapper.find('.ci-action-icon-wrapper').exists()).toBe(true);
+ expect(wrapper.find('svg').exists()).toBe(true);
});
describe('on click', () => {
it('emits `pipelineActionRequestComplete` after a successful request', done => {
jest.spyOn(wrapper.vm, '$emit');
- wrapper.find('button').trigger('click');
+ findButton().trigger('click');
waitForPromises()
.then(() => {
@@ -63,7 +64,7 @@ describe('pipeline graph action component', () => {
});
it('renders a loading icon while waiting for request', done => {
- wrapper.find('button').trigger('click');
+ findButton().trigger('click');
wrapper.vm.$nextTick(() => {
expect(wrapper.find('.js-action-icon-loading').exists()).toBe(true);
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
new file mode 100644
index 00000000000..a9b06eab3fa
--- /dev/null
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -0,0 +1,305 @@
+import Vue from 'vue';
+import { mount } from '@vue/test-utils';
+import PipelineStore from '~/pipelines/stores/pipeline_store';
+import graphComponent from '~/pipelines/components/graph/graph_component.vue';
+import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import linkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
+import graphJSON from './mock_data';
+import linkedPipelineJSON from './linked_pipelines_mock_data';
+import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
+
+describe('graph component', () => {
+ const store = new PipelineStore();
+ store.storePipeline(linkedPipelineJSON);
+ const mediator = new PipelinesMediator({ endpoint: '' });
+
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('while is loading', () => {
+ it('should render a loading icon', () => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: true,
+ pipeline: {},
+ mediator,
+ },
+ });
+
+ expect(wrapper.find('.gl-spinner').exists()).toBe(true);
+ });
+ });
+
+ describe('with data', () => {
+ it('should render the graph', () => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: graphJSON,
+ mediator,
+ },
+ });
+
+ expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
+
+ expect(wrapper.find(stageColumnComponent).classes()).toContain('no-margin');
+
+ expect(
+ wrapper
+ .findAll(stageColumnComponent)
+ .at(1)
+ .classes(),
+ ).toContain('left-margin');
+
+ expect(wrapper.find('.stage-column:nth-child(2) .build:nth-child(1)').classes()).toContain(
+ 'left-connector',
+ );
+
+ expect(wrapper.find('.loading-icon').exists()).toBe(false);
+
+ expect(wrapper.find('.stage-column-list').exists()).toBe(true);
+ });
+ });
+
+ describe('when linked pipelines are present', () => {
+ beforeEach(() => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: store.state.pipeline,
+ mediator,
+ },
+ });
+ });
+
+ describe('rendered output', () => {
+ it('should include the pipelines graph', () => {
+ expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
+ });
+
+ it('should not include the loading icon', () => {
+ expect(wrapper.find('.fa-spinner').exists()).toBe(false);
+ });
+
+ it('should include the stage column list', () => {
+ expect(wrapper.find(stageColumnComponent).exists()).toBe(true);
+ });
+
+ it('should include the no-margin class on the first child if there is only one job', () => {
+ const firstStageColumnElement = wrapper.find(stageColumnComponent);
+
+ expect(firstStageColumnElement.classes()).toContain('no-margin');
+ });
+
+ it('should include the has-only-one-job class on the first child', () => {
+ const firstStageColumnElement = wrapper.find('.stage-column-list .stage-column');
+
+ expect(firstStageColumnElement.classes()).toContain('has-only-one-job');
+ });
+
+ it('should include the left-margin class on the second child', () => {
+ const firstStageColumnElement = wrapper.find('.stage-column-list .stage-column:last-child');
+
+ expect(firstStageColumnElement.classes()).toContain('left-margin');
+ });
+
+ it('should include the js-has-linked-pipelines flag', () => {
+ expect(wrapper.find('.js-has-linked-pipelines').exists()).toBe(true);
+ });
+ });
+
+ describe('computeds and methods', () => {
+ describe('capitalizeStageName', () => {
+ it('it capitalizes the stage name', () => {
+ expect(
+ wrapper
+ .findAll('.stage-column .stage-name')
+ .at(1)
+ .text(),
+ ).toBe('Prebuild');
+ });
+ });
+
+ describe('stageConnectorClass', () => {
+ it('it returns left-margin when there is a triggerer', () => {
+ expect(
+ wrapper
+ .findAll(stageColumnComponent)
+ .at(1)
+ .classes(),
+ ).toContain('left-margin');
+ });
+ });
+ });
+
+ describe('linked pipelines components', () => {
+ beforeEach(() => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: store.state.pipeline,
+ mediator,
+ },
+ });
+ });
+
+ it('should render an upstream pipelines column at first position', () => {
+ expect(wrapper.find(linkedPipelinesColumn).exists()).toBe(true);
+ expect(wrapper.find('.stage-column .stage-name').text()).toBe('Upstream');
+ });
+
+ it('should render a downstream pipelines column at last position', () => {
+ const stageColumnNames = wrapper.findAll('.stage-column .stage-name');
+
+ expect(wrapper.find(linkedPipelinesColumn).exists()).toBe(true);
+ expect(stageColumnNames.at(stageColumnNames.length - 1).text()).toBe('Downstream');
+ });
+
+ describe('triggered by', () => {
+ describe('on click', () => {
+ it('should emit `onClickTriggeredBy` when triggered by linked pipeline is clicked', () => {
+ const btnWrapper = wrapper.find('.linked-pipeline-content');
+
+ btnWrapper.trigger('click');
+
+ btnWrapper.vm.$nextTick(() => {
+ expect(wrapper.emitted().onClickTriggeredBy).toEqual([
+ store.state.pipeline.triggered_by,
+ ]);
+ });
+ });
+ });
+
+ describe('with expanded pipeline', () => {
+ it('should render expanded pipeline', done => {
+ // expand the pipeline
+ store.state.pipeline.triggered_by[0].isExpanded = true;
+
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: store.state.pipeline,
+ mediator,
+ },
+ });
+
+ Vue.nextTick()
+ .then(() => {
+ expect(wrapper.find('.js-upstream-pipeline-12').exists()).toBe(true);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
+
+ describe('triggered', () => {
+ describe('on click', () => {
+ it('should emit `onClickTriggered`', () => {
+ // We have to mock this method since we do both style change and
+ // emit and event, not mocking returns an error.
+ wrapper.setMethods({
+ handleClickedDownstream: jest.fn(() =>
+ wrapper.vm.$emit('onClickTriggered', ...store.state.pipeline.triggered),
+ ),
+ });
+
+ const btnWrappers = wrapper.findAll('.linked-pipeline-content');
+ const downstreamBtnWrapper = btnWrappers.at(btnWrappers.length - 1);
+
+ downstreamBtnWrapper.trigger('click');
+
+ downstreamBtnWrapper.vm.$nextTick(() => {
+ expect(wrapper.emitted().onClickTriggered).toEqual([store.state.pipeline.triggered]);
+ });
+ });
+ });
+
+ describe('with expanded pipeline', () => {
+ it('should render expanded pipeline', done => {
+ // expand the pipeline
+ store.state.pipeline.triggered[0].isExpanded = true;
+
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: store.state.pipeline,
+ mediator,
+ },
+ });
+
+ Vue.nextTick()
+ .then(() => {
+ expect(wrapper.find('.js-downstream-pipeline-34993051')).not.toBeNull();
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+ });
+ });
+ });
+
+ describe('when linked pipelines are not present', () => {
+ beforeEach(() => {
+ const pipeline = Object.assign(linkedPipelineJSON, { triggered: null, triggered_by: null });
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline,
+ mediator,
+ },
+ });
+ });
+
+ describe('rendered output', () => {
+ it('should include the first column with a no margin', () => {
+ const firstColumn = wrapper.find('.stage-column');
+
+ expect(firstColumn.classes()).toContain('no-margin');
+ });
+
+ it('should not render a linked pipelines column', () => {
+ expect(wrapper.find('.linked-pipelines-column').exists()).toBe(false);
+ });
+ });
+
+ describe('stageConnectorClass', () => {
+ it('it returns no-margin when no triggerer and there is one job', () => {
+ expect(wrapper.find(stageColumnComponent).classes()).toContain('no-margin');
+ });
+
+ it('it returns left-margin when no triggerer and not the first stage', () => {
+ expect(
+ wrapper
+ .findAll(stageColumnComponent)
+ .at(1)
+ .classes(),
+ ).toContain('left-margin');
+ });
+ });
+ });
+
+ describe('capitalizeStageName', () => {
+ it('capitalizes and escapes stage name', () => {
+ wrapper = mount(graphComponent, {
+ propsData: {
+ isLoading: false,
+ pipeline: graphJSON,
+ mediator,
+ },
+ });
+
+ expect(
+ wrapper
+ .find('.stage-column:nth-child(2) .stage-name')
+ .text()
+ .trim(),
+ ).toEqual('Deploy &lt;img src=x onerror=alert(document.domain)&gt;');
+ });
+ });
+});
diff --git a/spec/javascripts/pipelines/graph/job_group_dropdown_spec.js b/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
index a3957f94caa..b323e1d8a06 100644
--- a/spec/javascripts/pipelines/graph/job_group_dropdown_spec.js
+++ b/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
@@ -1,11 +1,7 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
+import { shallowMount } from '@vue/test-utils';
import JobGroupDropdown from '~/pipelines/components/graph/job_group_dropdown.vue';
describe('job group dropdown component', () => {
- const Component = Vue.extend(JobGroupDropdown);
- let vm;
-
const group = {
jobs: [
{
@@ -66,20 +62,23 @@ describe('job group dropdown component', () => {
},
};
+ let wrapper;
+ const findButton = () => wrapper.find('button');
+
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
beforeEach(() => {
- vm = mountComponent(Component, { group });
+ wrapper = shallowMount(JobGroupDropdown, { propsData: { group } });
});
it('renders button with group name and size', () => {
- expect(vm.$el.querySelector('button').textContent).toContain(group.name);
- expect(vm.$el.querySelector('button').textContent).toContain(group.size);
+ expect(findButton().text()).toContain(group.name);
+ expect(findButton().text()).toContain(group.size);
});
it('renders dropdown with jobs', () => {
- expect(vm.$el.querySelectorAll('.scrollable-menu>ul>li').length).toEqual(group.jobs.length);
+ expect(wrapper.findAll('.scrollable-menu>ul>li').length).toBe(group.jobs.length);
});
});
diff --git a/spec/frontend/pipelines/graph/job_item_spec.js b/spec/frontend/pipelines/graph/job_item_spec.js
index 0c64d5c9fa8..da777466e3e 100644
--- a/spec/frontend/pipelines/graph/job_item_spec.js
+++ b/spec/frontend/pipelines/graph/job_item_spec.js
@@ -47,7 +47,7 @@ describe('pipeline graph job item', () => {
expect(link.attributes('title')).toEqual(`${mockJob.name} - ${mockJob.status.label}`);
- expect(wrapper.find('.js-status-icon-success')).toBeDefined();
+ expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
expect(trimText(wrapper.find('.ci-status-text').text())).toBe(mockJob.name);
@@ -73,7 +73,7 @@ describe('pipeline graph job item', () => {
},
});
- expect(wrapper.find('.js-status-icon-success')).toBeDefined();
+ expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
expect(wrapper.find('a').exists()).toBe(false);
expect(trimText(wrapper.find('.ci-status-text').text())).toEqual(mockJob.name);
@@ -84,8 +84,8 @@ describe('pipeline graph job item', () => {
it('it should render the action icon', () => {
createWrapper({ job: mockJob });
- expect(wrapper.find('a.ci-action-icon-container')).toBeDefined();
- expect(wrapper.find('i.ci-action-icon-wrapper')).toBeDefined();
+ expect(wrapper.find('.ci-action-icon-container').exists()).toBe(true);
+ expect(wrapper.find('.ci-action-icon-wrapper').exists()).toBe(true);
});
});
diff --git a/spec/frontend/pipelines/graph/job_name_component_spec.js b/spec/frontend/pipelines/graph/job_name_component_spec.js
new file mode 100644
index 00000000000..3574b66403e
--- /dev/null
+++ b/spec/frontend/pipelines/graph/job_name_component_spec.js
@@ -0,0 +1,36 @@
+import { mount } from '@vue/test-utils';
+import ciIcon from '~/vue_shared/components/ci_icon.vue';
+
+import jobNameComponent from '~/pipelines/components/graph/job_name_component.vue';
+
+describe('job name component', () => {
+ let wrapper;
+
+ const propsData = {
+ name: 'foo',
+ status: {
+ icon: 'status_success',
+ group: 'success',
+ },
+ };
+
+ beforeEach(() => {
+ wrapper = mount(jobNameComponent, {
+ propsData,
+ });
+ });
+
+ it('should render the provided name', () => {
+ expect(
+ wrapper
+ .find('.ci-status-text')
+ .text()
+ .trim(),
+ ).toBe(propsData.name);
+ });
+
+ it('should render an icon with the provided status', () => {
+ expect(wrapper.find(ciIcon).exists()).toBe(true);
+ expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/pipelines/graph/linked_pipeline_spec.js b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
index 7f49b21100d..cf78aa3ef71 100644
--- a/spec/frontend/pipelines/graph/linked_pipeline_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
@@ -1,12 +1,17 @@
import { mount } from '@vue/test-utils';
import LinkedPipelineComponent from '~/pipelines/components/graph/linked_pipeline.vue';
+import CiStatus from '~/vue_shared/components/ci_icon.vue';
import mockData from './linked_pipelines_mock_data';
const mockPipeline = mockData.triggered[0];
+const validTriggeredPipelineId = mockPipeline.project.id;
+const invalidTriggeredPipelineId = mockPipeline.project.id + 5;
+
describe('Linked pipeline', () => {
let wrapper;
+ const findButton = () => wrapper.find('button');
const createWrapper = propsData => {
wrapper = mount(LinkedPipelineComponent, {
@@ -21,7 +26,7 @@ describe('Linked pipeline', () => {
describe('rendered output', () => {
const props = {
pipeline: mockPipeline,
- projectId: 20,
+ projectId: invalidTriggeredPipelineId,
columnTitle: 'Downstream',
};
@@ -44,14 +49,13 @@ describe('Linked pipeline', () => {
});
it('should render an svg within the status container', () => {
- const pipelineStatusElement = wrapper.find('.js-linked-pipeline-status');
+ const pipelineStatusElement = wrapper.find(CiStatus);
expect(pipelineStatusElement.find('svg').exists()).toBe(true);
});
it('should render the pipeline status icon svg', () => {
- expect(wrapper.find('.js-ci-status-icon-running').exists()).toBe(true);
- expect(wrapper.find('.js-ci-status-icon-running').html()).toContain('<svg');
+ expect(wrapper.find('.ci-status-icon-failed svg').exists()).toBe(true);
});
it('should have a ci-status child component', () => {
@@ -88,7 +92,7 @@ describe('Linked pipeline', () => {
describe('parent/child', () => {
const downstreamProps = {
pipeline: mockPipeline,
- projectId: 19,
+ projectId: validTriggeredPipelineId,
columnTitle: 'Downstream',
};
@@ -116,7 +120,7 @@ describe('Linked pipeline', () => {
describe('when isLoading is true', () => {
const props = {
pipeline: { ...mockPipeline, isLoading: true },
- projectId: 19,
+ projectId: invalidTriggeredPipelineId,
columnTitle: 'Downstream',
};
@@ -132,7 +136,7 @@ describe('Linked pipeline', () => {
describe('on click', () => {
const props = {
pipeline: mockPipeline,
- projectId: 19,
+ projectId: validTriggeredPipelineId,
columnTitle: 'Downstream',
};
@@ -142,18 +146,18 @@ describe('Linked pipeline', () => {
it('emits `pipelineClicked` event', () => {
jest.spyOn(wrapper.vm, '$emit');
- wrapper.find('button').trigger('click');
+ findButton().trigger('click');
expect(wrapper.emitted().pipelineClicked).toBeTruthy();
});
it('should emit `bv::hide::tooltip` to close the tooltip', () => {
jest.spyOn(wrapper.vm.$root, '$emit');
- wrapper.find('button').trigger('click');
+ findButton().trigger('click');
expect(wrapper.vm.$root.$emit.mock.calls[0]).toEqual([
'bv::hide::tooltip',
- 'js-linked-pipeline-132',
+ 'js-linked-pipeline-34993051',
]);
});
});
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
new file mode 100644
index 00000000000..82eaa553d0c
--- /dev/null
+++ b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
@@ -0,0 +1,38 @@
+import { shallowMount } from '@vue/test-utils';
+import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
+import LinkedPipeline from '~/pipelines/components/graph/linked_pipeline.vue';
+import mockData from './linked_pipelines_mock_data';
+
+describe('Linked Pipelines Column', () => {
+ const propsData = {
+ columnTitle: 'Upstream',
+ linkedPipelines: mockData.triggered,
+ graphPosition: 'right',
+ projectId: 19,
+ };
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = shallowMount(LinkedPipelinesColumn, { propsData });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the pipeline orientation', () => {
+ const titleElement = wrapper.find('.linked-pipelines-column-title');
+
+ expect(titleElement.text()).toBe(propsData.columnTitle);
+ });
+
+ it('renders the correct number of linked pipelines', () => {
+ const linkedPipelineElements = wrapper.findAll(LinkedPipeline);
+
+ expect(linkedPipelineElements.length).toBe(propsData.linkedPipelines.length);
+ });
+
+ it('renders cross project triangle when column is upstream', () => {
+ expect(wrapper.find('.cross-project-triangle').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js b/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
index c9a94b3101f..3e9c0814403 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
+++ b/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
@@ -1,411 +1,3779 @@
export default {
- project: {
- id: 19,
+ id: 23211253,
+ user: {
+ id: 3585,
+ name: 'Achilleas Pipinellis',
+ username: 'axil',
+ state: 'active',
+ avatar_url: 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/3585/avatar.png',
+ web_url: 'https://gitlab.com/axil',
+ status_tooltip_html:
+ '\u003cspan class="user-status-emoji has-tooltip" title="I like pizza" data-html="true" data-placement="top"\u003e\u003cgl-emoji title="slice of pizza" data-name="pizza" data-unicode-version="6.0"\u003e🍕\u003c/gl-emoji\u003e\u003c/span\u003e',
+ path: '/axil',
},
- triggered_by: {
- id: 129,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/129',
- project: {
- name: 'GitLabCE',
- },
- details: {
- status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
- has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/129',
- favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
- },
- },
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: '7-5-stable',
- path: '/gitlab-org/gitlab-foss/commits/7-5-stable',
- tag: false,
- branch: true,
- },
- commit: {
- id: '23433d4d8b20d7e45c103d0b6048faad38a130ab',
- short_id: '23433d4d',
- title: 'Version 7.5.0.rc1',
- created_at: '2014-11-17T15:44:14.000+01:00',
- parent_ids: ['30ac909f30f58d319b42ed1537664483894b18cd'],
- message: 'Version 7.5.0.rc1\n',
- author_name: 'Jacob Vosmaer',
- author_email: 'contact@jacobvosmaer.nl',
- authored_date: '2014-11-17T15:44:14.000+01:00',
- committer_name: 'Jacob Vosmaer',
- committer_email: 'contact@jacobvosmaer.nl',
- committed_date: '2014-11-17T15:44:14.000+01:00',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/e66d11c0eedf8c07b3b18fca46599807?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/23433d4d8b20d7e45c103d0b6048faad38a130ab',
- commit_path: '/gitlab-org/gitlab-foss/commit/23433d4d8b20d7e45c103d0b6048faad38a130ab',
- },
- retry_path: '/gitlab-org/gitlab-foss/pipelines/129/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/129/cancel',
- created_at: '2017-05-24T14:46:20.090Z',
- updated_at: '2017-05-24T14:46:29.906Z',
+ active: false,
+ coverage: null,
+ source: 'push',
+ created_at: '2018-06-05T11:31:30.452Z',
+ updated_at: '2018-10-31T16:35:31.305Z',
+ path: '/gitlab-org/gitlab-runner/pipelines/23211253',
+ flags: {
+ latest: false,
+ stuck: false,
+ auto_devops: false,
+ merge_request: false,
+ yaml_errors: false,
+ retryable: false,
+ cancelable: false,
+ failure_reason: false,
},
- triggered: [
- {
- id: 132,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/132',
- project: {
- name: 'GitLabCE',
- id: 19,
- },
- details: {
+ details: {
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/pipelines/23211253',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ duration: 53,
+ finished_at: '2018-10-31T16:35:31.299Z',
+ stages: [
+ {
+ name: 'prebuild',
+ title: 'prebuild: passed',
+ groups: [
+ {
+ name: 'review-docs-deploy',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'manual play action',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469032',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469032/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 72469032,
+ name: 'review-docs-deploy',
+ started: '2018-10-31T16:34:58.778Z',
+ archived: false,
+ build_path: '/gitlab-org/gitlab-runner/-/jobs/72469032',
+ retry_path: '/gitlab-org/gitlab-runner/-/jobs/72469032/retry',
+ play_path: '/gitlab-org/gitlab-runner/-/jobs/72469032/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-06-05T11:31:30.495Z',
+ updated_at: '2018-10-31T16:35:31.251Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'manual play action',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469032',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469032/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/132',
+ details_path: '/gitlab-org/gitlab-runner/pipelines/23211253#prebuild',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
},
+ path: '/gitlab-org/gitlab-runner/pipelines/23211253#prebuild',
+ dropdown_path: '/gitlab-org/gitlab-runner/pipelines/23211253/stage.json?stage=prebuild',
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- short_id: 'b9d58c4c',
- title: 'getting user keys publically through http without any authentication, the github…',
- created_at: '2013-10-03T12:50:33.000+05:30',
- parent_ids: ['e219cf7246c6a0495e4507deaffeba11e79f13b8'],
- message:
- 'getting user keys publically through http without any authentication, the github way. E.g: http://github.com/devaroop.keys\n\nchangelog updated to include ssh key retrieval feature update\n',
- author_name: 'devaroop',
- author_email: 'devaroop123@yahoo.co.in',
- authored_date: '2013-10-02T20:39:29.000+05:30',
- committer_name: 'devaroop',
- committer_email: 'devaroop123@yahoo.co.in',
- committed_date: '2013-10-03T12:50:33.000+05:30',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/35df4b155ec66a3127d53459941cf8a2?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- commit_path: '/gitlab-org/gitlab-foss/commit/b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- },
- retry_path: '/gitlab-org/gitlab-foss/pipelines/132/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/132/cancel',
- created_at: '2017-05-24T14:46:24.644Z',
- updated_at: '2017-05-24T14:48:55.226Z',
- },
- {
- id: 133,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/133',
- project: {
- name: 'GitLabCE',
- },
- details: {
+ {
+ name: 'test',
+ title: 'test: passed',
+ groups: [
+ {
+ name: 'docs check links',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469033',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469033/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 72469033,
+ name: 'docs check links',
+ started: '2018-06-05T11:31:33.240Z',
+ archived: false,
+ build_path: '/gitlab-org/gitlab-runner/-/jobs/72469033',
+ retry_path: '/gitlab-org/gitlab-runner/-/jobs/72469033/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-06-05T11:31:30.627Z',
+ updated_at: '2018-06-05T11:31:54.363Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469033',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469033/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ ],
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/133',
+ details_path: '/gitlab-org/gitlab-runner/pipelines/23211253#test',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
},
+ path: '/gitlab-org/gitlab-runner/pipelines/23211253#test',
+ dropdown_path: '/gitlab-org/gitlab-runner/pipelines/23211253/stage.json?stage=test',
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- short_id: 'b6bd4856',
- title: 'getting user keys publically through http without any authentication, the github…',
- created_at: '2013-10-02T20:39:29.000+05:30',
- parent_ids: ['e219cf7246c6a0495e4507deaffeba11e79f13b8'],
- message:
- 'getting user keys publically through http without any authentication, the github way. E.g: http://github.com/devaroop.keys\n',
- author_name: 'devaroop',
- author_email: 'devaroop123@yahoo.co.in',
- authored_date: '2013-10-02T20:39:29.000+05:30',
- committer_name: 'devaroop',
- committer_email: 'devaroop123@yahoo.co.in',
- committed_date: '2013-10-02T20:39:29.000+05:30',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/35df4b155ec66a3127d53459941cf8a2?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- commit_path: '/gitlab-org/gitlab-foss/commit/b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- },
- retry_path: '/gitlab-org/gitlab-foss/pipelines/133/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/133/cancel',
- created_at: '2017-05-24T14:46:24.648Z',
- updated_at: '2017-05-24T14:48:59.673Z',
- },
- {
- id: 130,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/130',
- project: {
- name: 'GitLabCE',
- },
- details: {
+ {
+ name: 'cleanup',
+ title: 'cleanup: skipped',
+ groups: [
+ {
+ name: 'review-docs-cleanup',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual stop action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469034',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'stop',
+ title: 'Stop',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469034/play',
+ method: 'post',
+ button_title: 'Stop this environment',
+ },
+ },
+ jobs: [
+ {
+ id: 72469034,
+ name: 'review-docs-cleanup',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-org/gitlab-runner/-/jobs/72469034',
+ play_path: '/gitlab-org/gitlab-runner/-/jobs/72469034/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-06-05T11:31:30.760Z',
+ updated_at: '2018-06-05T11:31:56.037Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual stop action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-runner/-/jobs/72469034',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'stop',
+ title: 'Stop',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469034/play',
+ method: 'post',
+ button_title: 'Stop this environment',
+ },
+ },
+ },
+ ],
+ },
+ ],
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/130',
+ details_path: '/gitlab-org/gitlab-runner/pipelines/23211253#cleanup',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
},
+ path: '/gitlab-org/gitlab-runner/pipelines/23211253#cleanup',
+ dropdown_path: '/gitlab-org/gitlab-runner/pipelines/23211253/stage.json?stage=cleanup',
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'review-docs-cleanup',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469034/play',
+ playable: true,
+ scheduled: false,
},
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
+ {
+ name: 'review-docs-deploy',
+ path: '/gitlab-org/gitlab-runner/-/jobs/72469032/play',
+ playable: true,
+ scheduled: false,
},
- commit: {
- id: '6d7ced4a2311eeff037c5575cca1868a6d3f586f',
- short_id: '6d7ced4a',
- title: 'Whitespace fixes to patch',
- created_at: '2013-10-08T13:53:22.000-05:00',
- parent_ids: ['1875141a963a4238bda29011d8f7105839485253'],
- message: 'Whitespace fixes to patch\n',
- author_name: 'Dale Hamel',
- author_email: 'dale.hamel@srvthe.net',
- authored_date: '2013-10-08T13:53:22.000-05:00',
- committer_name: 'Dale Hamel',
- committer_email: 'dale.hamel@invenia.ca',
- committed_date: '2013-10-08T13:53:22.000-05:00',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/cd08930e69fa5ad1a669206e7bafe476?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/6d7ced4a2311eeff037c5575cca1868a6d3f586f',
- commit_path: '/gitlab-org/gitlab-foss/commit/6d7ced4a2311eeff037c5575cca1868a6d3f586f',
+ ],
+ scheduled_actions: [],
+ },
+ ref: {
+ name: 'docs/add-development-guide-to-readme',
+ path: '/gitlab-org/gitlab-runner/commits/docs/add-development-guide-to-readme',
+ tag: false,
+ branch: true,
+ merge_request: false,
+ },
+ commit: {
+ id: '8083eb0a920572214d0dccedd7981f05d535ad46',
+ short_id: '8083eb0a',
+ title: 'Add link to development guide in readme',
+ created_at: '2018-06-05T11:30:48.000Z',
+ parent_ids: ['1d7cf79b5a1a2121b9474ac20d61c1b8f621289d'],
+ message:
+ 'Add link to development guide in readme\n\nCloses https://gitlab.com/gitlab-org/gitlab-runner/issues/3122\n',
+ author_name: 'Achilleas Pipinellis',
+ author_email: 'axil@gitlab.com',
+ authored_date: '2018-06-05T11:30:48.000Z',
+ committer_name: 'Achilleas Pipinellis',
+ committer_email: 'axil@gitlab.com',
+ committed_date: '2018-06-05T11:30:48.000Z',
+ author: {
+ id: 3585,
+ name: 'Achilleas Pipinellis',
+ username: 'axil',
+ state: 'active',
+ avatar_url: 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/3585/avatar.png',
+ web_url: 'https://gitlab.com/axil',
+ status_tooltip_html: null,
+ path: '/axil',
+ },
+ author_gravatar_url:
+ 'https://secure.gravatar.com/avatar/1d37af00eec153a8333a4ce18e9aea41?s=80\u0026d=identicon',
+ commit_url:
+ 'https://gitlab.com/gitlab-org/gitlab-runner/commit/8083eb0a920572214d0dccedd7981f05d535ad46',
+ commit_path: '/gitlab-org/gitlab-runner/commit/8083eb0a920572214d0dccedd7981f05d535ad46',
+ },
+ project: { id: 20 },
+ triggered_by: {
+ id: 12,
+ user: {
+ id: 376774,
+ name: 'Alessio Caiazza',
+ username: 'nolith',
+ state: 'active',
+ avatar_url: 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/376774/avatar.png',
+ web_url: 'https://gitlab.com/nolith',
+ status_tooltip_html: null,
+ path: '/nolith',
+ },
+ active: false,
+ coverage: null,
+ source: 'pipeline',
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ details: {
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
},
- retry_path: '/gitlab-org/gitlab-foss/pipelines/130/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/130/cancel',
- created_at: '2017-05-24T14:46:24.630Z',
- updated_at: '2017-05-24T14:49:45.091Z',
+ duration: 118,
+ finished_at: '2018-10-31T16:41:40.615Z',
+ stages: [
+ {
+ name: 'build-images',
+ title: 'build-images: skipped',
+ groups: [
+ {
+ name: 'image:bootstrap',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 11421321982853,
+ name: 'image:bootstrap',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.704Z',
+ updated_at: '2018-10-31T16:35:24.118Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:builder-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 1149822131854,
+ name: 'image:builder-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.728Z',
+ updated_at: '2018-10-31T16:35:24.070Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:nginx-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 11498285523424,
+ name: 'image:nginx-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.753Z',
+ updated_at: '2018-10-31T16:35:24.033Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build-images',
+ },
+ {
+ name: 'build',
+ title: 'build: failed',
+ groups: [
+ {
+ name: 'compile_dev',
+ size: 1,
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 1149846949786,
+ name: 'compile_dev',
+ started: '2018-10-31T16:39:41.598Z',
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ retry_path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:39:41.138Z',
+ updated_at: '2018-10-31T16:41:40.072Z',
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ recoverable: false,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: skipped',
+ groups: [
+ {
+ name: 'review',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 11498282342357,
+ name: 'review',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.805Z',
+ updated_at: '2018-10-31T16:41:40.569Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'review_stop',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114982858,
+ name: 'review_stop',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.840Z',
+ updated_at: '2018-10-31T16:41:40.480Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=deploy',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'image:bootstrap',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:builder-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:nginx-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'review_stop',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982858/play',
+ playable: false,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
},
- {
- id: 131,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/132',
- project: {
- name: 'GitLabCE',
+ project: {
+ id: 20,
+ name: 'Test',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
+ },
+ triggered_by: {
+ id: 349932310342451,
+ user: {
+ id: 376774,
+ name: 'Alessio Caiazza',
+ username: 'nolith',
+ state: 'active',
+ avatar_url:
+ 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/376774/avatar.png',
+ web_url: 'https://gitlab.com/nolith',
+ status_tooltip_html: null,
+ path: '/nolith',
},
+ active: false,
+ coverage: null,
+ source: 'pipeline',
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051',
details: {
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/132',
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
},
+ duration: 118,
+ finished_at: '2018-10-31T16:41:40.615Z',
+ stages: [
+ {
+ name: 'build-images',
+ title: 'build-images: skipped',
+ groups: [
+ {
+ name: 'image:bootstrap',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 11421321982853,
+ name: 'image:bootstrap',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.704Z',
+ updated_at: '2018-10-31T16:35:24.118Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:builder-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 1149822131854,
+ name: 'image:builder-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.728Z',
+ updated_at: '2018-10-31T16:35:24.070Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:nginx-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 11498285523424,
+ name: 'image:nginx-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.753Z',
+ updated_at: '2018-10-31T16:35:24.033Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ dropdown_path:
+ '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build-images',
+ },
+ {
+ name: 'build',
+ title: 'build: failed',
+ groups: [
+ {
+ name: 'compile_dev',
+ size: 1,
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 1149846949786,
+ name: 'compile_dev',
+ started: '2018-10-31T16:39:41.598Z',
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ retry_path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:39:41.138Z',
+ updated_at: '2018-10-31T16:41:40.072Z',
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ recoverable: false,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: skipped',
+ groups: [
+ {
+ name: 'review',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 11498282342357,
+ name: 'review',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.805Z',
+ updated_at: '2018-10-31T16:41:40.569Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'review_stop',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114982858,
+ name: 'review_stop',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.840Z',
+ updated_at: '2018-10-31T16:41:40.480Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=deploy',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'image:bootstrap',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:builder-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:nginx-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'review_stop',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982858/play',
+ playable: false,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- short_id: 'b9d58c4c',
- title: 'getting user keys publically through http without any authentication, the github…',
- created_at: '2013-10-03T12:50:33.000+05:30',
- parent_ids: ['e219cf7246c6a0495e4507deaffeba11e79f13b8'],
- message:
- 'getting user keys publically through http without any authentication, the github way. E.g: http://github.com/devaroop.keys\n\nchangelog updated to include ssh key retrieval feature update\n',
- author_name: 'devaroop',
- author_email: 'devaroop123@yahoo.co.in',
- authored_date: '2013-10-02T20:39:29.000+05:30',
- committer_name: 'devaroop',
- committer_email: 'devaroop123@yahoo.co.in',
- committed_date: '2013-10-03T12:50:33.000+05:30',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/35df4b155ec66a3127d53459941cf8a2?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
- commit_path: '/gitlab-org/gitlab-foss/commit/b9d58c4cecd06be74c3cc32ccfb522b31544ab2e',
+ project: {
+ id: 20,
+ name: 'GitLab Docs',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
},
- retry_path: '/gitlab-org/gitlab-foss/pipelines/132/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/132/cancel',
- created_at: '2017-05-24T14:46:24.644Z',
- updated_at: '2017-05-24T14:48:55.226Z',
},
+ triggered: [],
+ },
+ triggered: [
{
- id: 134,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/133',
- project: {
- name: 'GitLabCE',
+ id: 34993051,
+ user: {
+ id: 376774,
+ name: 'Alessio Caiazza',
+ username: 'nolith',
+ state: 'active',
+ avatar_url:
+ 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/376774/avatar.png',
+ web_url: 'https://gitlab.com/nolith',
+ status_tooltip_html: null,
+ path: '/nolith',
},
+ active: false,
+ coverage: null,
+ source: 'pipeline',
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051',
details: {
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/133',
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
},
+ duration: 118,
+ finished_at: '2018-10-31T16:41:40.615Z',
+ stages: [
+ {
+ name: 'build-images',
+ title: 'build-images: skipped',
+ groups: [
+ {
+ name: 'image:bootstrap',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982853,
+ name: 'image:bootstrap',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.704Z',
+ updated_at: '2018-10-31T16:35:24.118Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:builder-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982854,
+ name: 'image:builder-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.728Z',
+ updated_at: '2018-10-31T16:35:24.070Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:nginx-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982855,
+ name: 'image:nginx-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.753Z',
+ updated_at: '2018-10-31T16:35:24.033Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ dropdown_path:
+ '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build-images',
+ },
+ {
+ name: 'build',
+ title: 'build: failed',
+ groups: [
+ {
+ name: 'compile_dev',
+ size: 1,
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 114984694,
+ name: 'compile_dev',
+ started: '2018-10-31T16:39:41.598Z',
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ retry_path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:39:41.138Z',
+ updated_at: '2018-10-31T16:41:40.072Z',
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ recoverable: false,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: skipped',
+ groups: [
+ {
+ name: 'review',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114982857,
+ name: 'review',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.805Z',
+ updated_at: '2018-10-31T16:41:40.569Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'review_stop',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114982858,
+ name: 'review_stop',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.840Z',
+ updated_at: '2018-10-31T16:41:40.480Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=deploy',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'image:bootstrap',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:builder-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:nginx-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'review_stop',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982858/play',
+ playable: false,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: 'b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- short_id: 'b6bd4856',
- title: 'getting user keys publically through http without any authentication, the github…',
- created_at: '2013-10-02T20:39:29.000+05:30',
- parent_ids: ['e219cf7246c6a0495e4507deaffeba11e79f13b8'],
- message:
- 'getting user keys publically through http without any authentication, the github way. E.g: http://github.com/devaroop.keys\n',
- author_name: 'devaroop',
- author_email: 'devaroop123@yahoo.co.in',
- authored_date: '2013-10-02T20:39:29.000+05:30',
- committer_name: 'devaroop',
- committer_email: 'devaroop123@yahoo.co.in',
- committed_date: '2013-10-02T20:39:29.000+05:30',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/35df4b155ec66a3127d53459941cf8a2?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/b6bd4856a33df3d144be66c4ed1f1396009bb08b',
- commit_path: '/gitlab-org/gitlab-foss/commit/b6bd4856a33df3d144be66c4ed1f1396009bb08b',
+ project: {
+ id: 20,
+ name: 'GitLab Docs',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
},
- retry_path: '/gitlab-org/gitlab-foss/pipelines/133/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/133/cancel',
- created_at: '2017-05-24T14:46:24.648Z',
- updated_at: '2017-05-24T14:48:59.673Z',
},
{
- id: 135,
- active: true,
- path: '/gitlab-org/gitlab-foss/pipelines/130',
- project: {
- name: 'GitLabCE',
+ id: 34993052,
+ user: {
+ id: 376774,
+ name: 'Alessio Caiazza',
+ username: 'nolith',
+ state: 'active',
+ avatar_url:
+ 'https://assets.gitlab-static.net/uploads/-/system/user/avatar/376774/avatar.png',
+ web_url: 'https://gitlab.com/nolith',
+ status_tooltip_html: null,
+ path: '/nolith',
},
+ active: false,
+ coverage: null,
+ source: 'pipeline',
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051',
details: {
status: {
- icon: 'status_running',
- text: 'running',
- label: 'running',
- group: 'running',
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
has_details: true,
- details_path: '/gitlab-org/gitlab-foss/pipelines/130',
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051',
+ illustration: null,
favicon:
- '/assets/ci_favicons/dev/favicon_status_running-c3ad2fc53ea6079c174e5b6c1351ff349e99ec3af5a5622fb77b0fe53ea279c1.ico',
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
},
+ duration: 118,
+ finished_at: '2018-10-31T16:41:40.615Z',
+ stages: [
+ {
+ name: 'build-images',
+ title: 'build-images: skipped',
+ groups: [
+ {
+ name: 'image:bootstrap',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982853,
+ name: 'image:bootstrap',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.704Z',
+ updated_at: '2018-10-31T16:35:24.118Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982853',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:builder-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 114982854,
+ name: 'image:builder-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.728Z',
+ updated_at: '2018-10-31T16:35:24.070Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982854',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'image:nginx-onbuild',
+ size: 1,
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 1224982855,
+ name: 'image:nginx-onbuild',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ play_path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.753Z',
+ updated_at: '2018-10-31T16:35:24.033Z',
+ status: {
+ icon: 'status_manual',
+ text: 'manual',
+ label: 'manual play action',
+ group: 'manual',
+ tooltip: 'manual action',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982855',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_manual-829a0804612cef47d9efc1618dba38325483657c847dba0546c3b9f0295bb36c.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build-images',
+ dropdown_path:
+ '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build-images',
+ },
+ {
+ name: 'build',
+ title: 'build: failed',
+ groups: [
+ {
+ name: 'compile_dev',
+ size: 1,
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 1123984694,
+ name: 'compile_dev',
+ started: '2018-10-31T16:39:41.598Z',
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ retry_path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:39:41.138Z',
+ updated_at: '2018-10-31T16:41:40.072Z',
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed - (script failure)',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114984694',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114984694/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ recoverable: false,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_failed',
+ text: 'failed',
+ label: 'failed',
+ group: 'failed',
+ tooltip: 'failed',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#build',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=build',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: skipped',
+ groups: [
+ {
+ name: 'review',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 1143232982857,
+ name: 'review',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.805Z',
+ updated_at: '2018-10-31T16:41:40.569Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982857',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'review_stop',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 114921313182858,
+ name: 'review_stop',
+ started: null,
+ archived: false,
+ build_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ playable: false,
+ scheduled: false,
+ created_at: '2018-10-31T16:35:23.840Z',
+ updated_at: '2018-10-31T16:41:40.480Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/-/jobs/114982858',
+ illustration: {
+ image:
+ 'https://assets.gitlab-static.net/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ illustration: null,
+ favicon:
+ 'https://gitlab.com/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ path: '/gitlab-com/gitlab-docs/pipelines/34993051#deploy',
+ dropdown_path: '/gitlab-com/gitlab-docs/pipelines/34993051/stage.json?stage=deploy',
+ },
+ ],
+ artifacts: [],
+ manual_actions: [
+ {
+ name: 'image:bootstrap',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982853/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:builder-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982854/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'image:nginx-onbuild',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982855/play',
+ playable: true,
+ scheduled: false,
+ },
+ {
+ name: 'review_stop',
+ path: '/gitlab-com/gitlab-docs/-/jobs/114982858/play',
+ playable: false,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
},
- flags: {
- latest: false,
- triggered: false,
- stuck: false,
- yaml_errors: false,
- retryable: true,
- cancelable: true,
- },
- ref: {
- name: 'crowd',
- path: '/gitlab-org/gitlab-foss/commits/crowd',
- tag: false,
- branch: true,
- },
- commit: {
- id: '6d7ced4a2311eeff037c5575cca1868a6d3f586f',
- short_id: '6d7ced4a',
- title: 'Whitespace fixes to patch',
- created_at: '2013-10-08T13:53:22.000-05:00',
- parent_ids: ['1875141a963a4238bda29011d8f7105839485253'],
- message: 'Whitespace fixes to patch\n',
- author_name: 'Dale Hamel',
- author_email: 'dale.hamel@srvthe.net',
- authored_date: '2013-10-08T13:53:22.000-05:00',
- committer_name: 'Dale Hamel',
- committer_email: 'dale.hamel@invenia.ca',
- committed_date: '2013-10-08T13:53:22.000-05:00',
- author_gravatar_url:
- 'http://www.gravatar.com/avatar/cd08930e69fa5ad1a669206e7bafe476?s=80&d=identicon',
- commit_url:
- 'http://localhost:3000/gitlab-org/gitlab-foss/commit/6d7ced4a2311eeff037c5575cca1868a6d3f586f',
- commit_path: '/gitlab-org/gitlab-foss/commit/6d7ced4a2311eeff037c5575cca1868a6d3f586f',
+ project: {
+ id: 20,
+ name: 'GitLab Docs',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
},
- retry_path: '/gitlab-org/gitlab-foss/pipelines/130/retry',
- cancel_path: '/gitlab-org/gitlab-foss/pipelines/130/cancel',
- created_at: '2017-05-24T14:46:24.630Z',
- updated_at: '2017-05-24T14:49:45.091Z',
+ triggered: [
+ {
+ id: 26,
+ user: null,
+ active: false,
+ coverage: null,
+ source: 'push',
+ created_at: '2019-01-06T17:48:37.599Z',
+ updated_at: '2019-01-06T17:48:38.371Z',
+ path: '/h5bp/html5-boilerplate/pipelines/26',
+ flags: {
+ latest: true,
+ stuck: false,
+ auto_devops: false,
+ merge_request: false,
+ yaml_errors: false,
+ retryable: true,
+ cancelable: false,
+ failure_reason: false,
+ },
+ details: {
+ status: {
+ icon: 'status_warning',
+ text: 'passed',
+ label: 'passed with warnings',
+ group: 'success-with-warnings',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ duration: null,
+ finished_at: '2019-01-06T17:48:38.370Z',
+ stages: [
+ {
+ name: 'build',
+ title: 'build: passed',
+ groups: [
+ {
+ name: 'build:linux',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/526',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/526/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 526,
+ name: 'build:linux',
+ started: '2019-01-06T08:48:20.236Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/526',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/526/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.806Z',
+ updated_at: '2019-01-06T17:48:37.806Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/526',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/526/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'build:osx',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/527',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/527/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 527,
+ name: 'build:osx',
+ started: '2019-01-06T07:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/527',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/527/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.846Z',
+ updated_at: '2019-01-06T17:48:37.846Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/527',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/527/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#build',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#build',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=build',
+ },
+ {
+ name: 'test',
+ title: 'test: passed with warnings',
+ groups: [
+ {
+ name: 'jenkins',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: null,
+ group: 'success',
+ tooltip: null,
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ jobs: [
+ {
+ id: 546,
+ name: 'jenkins',
+ started: '2019-01-06T11:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/546',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.359Z',
+ updated_at: '2019-01-06T17:48:38.359Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: null,
+ group: 'success',
+ tooltip: null,
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'rspec:linux',
+ size: 3,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ jobs: [
+ {
+ id: 528,
+ name: 'rspec:linux 0 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/528',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.885Z',
+ updated_at: '2019-01-06T17:48:37.885Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/528',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/528/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ {
+ id: 529,
+ name: 'rspec:linux 1 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/529',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/529/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.907Z',
+ updated_at: '2019-01-06T17:48:37.907Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/529',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/529/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ {
+ id: 530,
+ name: 'rspec:linux 2 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/530',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/530/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.927Z',
+ updated_at: '2019-01-06T17:48:37.927Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/530',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/530/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'rspec:osx',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/535',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/535/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 535,
+ name: 'rspec:osx',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/535',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/535/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.018Z',
+ updated_at: '2019-01-06T17:48:38.018Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/535',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/535/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'rspec:windows',
+ size: 3,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: false,
+ details_path: null,
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ jobs: [
+ {
+ id: 531,
+ name: 'rspec:windows 0 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/531',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/531/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.944Z',
+ updated_at: '2019-01-06T17:48:37.944Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/531',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/531/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ {
+ id: 532,
+ name: 'rspec:windows 1 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/532',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/532/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.962Z',
+ updated_at: '2019-01-06T17:48:37.962Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/532',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/532/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ {
+ id: 534,
+ name: 'rspec:windows 2 3',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/534',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/534/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:37.999Z',
+ updated_at: '2019-01-06T17:48:37.999Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/534',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/534/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'spinach:linux',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/536',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/536/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 536,
+ name: 'spinach:linux',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/536',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/536/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.050Z',
+ updated_at: '2019-01-06T17:48:38.050Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/536',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/536/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'spinach:osx',
+ size: 1,
+ status: {
+ icon: 'status_warning',
+ text: 'failed',
+ label: 'failed (allowed to fail)',
+ group: 'failed-with-warnings',
+ tooltip: 'failed - (unknown failure) (allowed to fail)',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/537',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/537/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 537,
+ name: 'spinach:osx',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/537',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/537/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.069Z',
+ updated_at: '2019-01-06T17:48:38.069Z',
+ status: {
+ icon: 'status_warning',
+ text: 'failed',
+ label: 'failed (allowed to fail)',
+ group: 'failed-with-warnings',
+ tooltip: 'failed - (unknown failure) (allowed to fail)',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/537',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/537/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ callout_message: 'There is an unknown failure, please try again',
+ recoverable: true,
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_warning',
+ text: 'passed',
+ label: 'passed with warnings',
+ group: 'success-with-warnings',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#test',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#test',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=test',
+ },
+ {
+ name: 'security',
+ title: 'security: passed',
+ groups: [
+ {
+ name: 'container_scanning',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/541',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/541/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 541,
+ name: 'container_scanning',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/541',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/541/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.186Z',
+ updated_at: '2019-01-06T17:48:38.186Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/541',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/541/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'dast',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/538',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/538/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 538,
+ name: 'dast',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/538',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/538/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.087Z',
+ updated_at: '2019-01-06T17:48:38.087Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/538',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/538/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'dependency_scanning',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/540',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/540/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 540,
+ name: 'dependency_scanning',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/540',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/540/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.153Z',
+ updated_at: '2019-01-06T17:48:38.153Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/540',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/540/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'sast',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/539',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/539/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 539,
+ name: 'sast',
+ started: '2019-01-06T09:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/539',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/539/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.121Z',
+ updated_at: '2019-01-06T17:48:38.121Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/539',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/539/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#security',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#security',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=security',
+ },
+ {
+ name: 'deploy',
+ title: 'deploy: passed',
+ groups: [
+ {
+ name: 'production',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/544',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 544,
+ name: 'production',
+ started: null,
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/544',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.313Z',
+ updated_at: '2019-01-06T17:48:38.313Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/544',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ {
+ name: 'staging',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/542',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/542/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ jobs: [
+ {
+ id: 542,
+ name: 'staging',
+ started: '2019-01-06T11:48:20.237Z',
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/542',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/542/retry',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.219Z',
+ updated_at: '2019-01-06T17:48:38.219Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/542',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job does not have a trace.',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/h5bp/html5-boilerplate/-/jobs/542/retry',
+ method: 'post',
+ button_title: 'Retry this job',
+ },
+ },
+ },
+ ],
+ },
+ {
+ name: 'stop staging',
+ size: 1,
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/543',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ jobs: [
+ {
+ id: 543,
+ name: 'stop staging',
+ started: null,
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/543',
+ playable: false,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.283Z',
+ updated_at: '2019-01-06T17:48:38.283Z',
+ status: {
+ icon: 'status_skipped',
+ text: 'skipped',
+ label: 'skipped',
+ group: 'skipped',
+ tooltip: 'skipped',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/543',
+ illustration: {
+ image:
+ '/assets/illustrations/skipped-job_empty-8b877955fbf175e42ae65b6cb95346e15282c6fc5b682756c329af3a0055225e.svg',
+ size: 'svg-430',
+ title: 'This job has been skipped',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_skipped-0b9c5e543588945e8c4ca57786bbf2d0c56631959c9f853300392d0315be829b.png',
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#deploy',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#deploy',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=deploy',
+ },
+ {
+ name: 'notify',
+ title: 'notify: passed',
+ groups: [
+ {
+ name: 'slack',
+ size: 1,
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'manual play action',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/545',
+ illustration: {
+ image:
+ '/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/h5bp/html5-boilerplate/-/jobs/545/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ jobs: [
+ {
+ id: 545,
+ name: 'slack',
+ started: null,
+ archived: false,
+ build_path: '/h5bp/html5-boilerplate/-/jobs/545',
+ retry_path: '/h5bp/html5-boilerplate/-/jobs/545/retry',
+ play_path: '/h5bp/html5-boilerplate/-/jobs/545/play',
+ playable: true,
+ scheduled: false,
+ created_at: '2019-01-06T17:48:38.341Z',
+ updated_at: '2019-01-06T17:48:38.341Z',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'manual play action',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/-/jobs/545',
+ illustration: {
+ image:
+ '/assets/illustrations/manual_action-2b4ca0d1bcfd92aebf33d484e36cbf7a102d007f76b5a0cfea636033a629d601.svg',
+ size: 'svg-394',
+ title: 'This job requires a manual action',
+ content:
+ 'This job depends on a user to trigger its process. Often they are used to deploy code to production environments',
+ },
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ action: {
+ icon: 'play',
+ title: 'Play',
+ path: '/h5bp/html5-boilerplate/-/jobs/545/play',
+ method: 'post',
+ button_title: 'Trigger this manual action',
+ },
+ },
+ },
+ ],
+ },
+ ],
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/h5bp/html5-boilerplate/pipelines/26#notify',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ path: '/h5bp/html5-boilerplate/pipelines/26#notify',
+ dropdown_path: '/h5bp/html5-boilerplate/pipelines/26/stage.json?stage=notify',
+ },
+ ],
+ artifacts: [
+ {
+ name: 'build:linux',
+ expired: null,
+ expire_at: null,
+ path: '/h5bp/html5-boilerplate/-/jobs/526/artifacts/download',
+ browse_path: '/h5bp/html5-boilerplate/-/jobs/526/artifacts/browse',
+ },
+ {
+ name: 'build:osx',
+ expired: null,
+ expire_at: null,
+ path: '/h5bp/html5-boilerplate/-/jobs/527/artifacts/download',
+ browse_path: '/h5bp/html5-boilerplate/-/jobs/527/artifacts/browse',
+ },
+ ],
+ manual_actions: [
+ {
+ name: 'stop staging',
+ path: '/h5bp/html5-boilerplate/-/jobs/543/play',
+ playable: false,
+ scheduled: false,
+ },
+ {
+ name: 'production',
+ path: '/h5bp/html5-boilerplate/-/jobs/544/play',
+ playable: false,
+ scheduled: false,
+ },
+ {
+ name: 'slack',
+ path: '/h5bp/html5-boilerplate/-/jobs/545/play',
+ playable: true,
+ scheduled: false,
+ },
+ ],
+ scheduled_actions: [],
+ },
+ ref: {
+ name: 'master',
+ path: '/h5bp/html5-boilerplate/commits/master',
+ tag: false,
+ branch: true,
+ merge_request: false,
+ },
+ commit: {
+ id: 'bad98c453eab56d20057f3929989251d45cd1a8b',
+ short_id: 'bad98c45',
+ title: 'remove instances of shrink-to-fit=no (#2103)',
+ created_at: '2018-12-17T20:52:18.000Z',
+ parent_ids: ['49130f6cfe9ff1f749015d735649a2bc6f66cf3a'],
+ message:
+ 'remove instances of shrink-to-fit=no (#2103)\n\ncloses #2102\r\n\r\nPer my findings, the need for it as a default was rectified with the release of iOS 9.3, where the viewport no longer shrunk to accommodate overflow, as was introduced in iOS 9.',
+ author_name: "Scott O'Hara",
+ author_email: 'scottaohara@users.noreply.github.com',
+ authored_date: '2018-12-17T20:52:18.000Z',
+ committer_name: 'Rob Larsen',
+ committer_email: 'rob@drunkenfist.com',
+ committed_date: '2018-12-17T20:52:18.000Z',
+ author: null,
+ author_gravatar_url:
+ 'https://www.gravatar.com/avatar/6d597df7cf998d16cbe00ccac063b31e?s=80\u0026d=identicon',
+ commit_url:
+ 'http://localhost:3001/h5bp/html5-boilerplate/commit/bad98c453eab56d20057f3929989251d45cd1a8b',
+ commit_path: '/h5bp/html5-boilerplate/commit/bad98c453eab56d20057f3929989251d45cd1a8b',
+ },
+ retry_path: '/h5bp/html5-boilerplate/pipelines/26/retry',
+ triggered_by: {
+ id: 4,
+ user: null,
+ active: false,
+ coverage: null,
+ source: 'push',
+ path: '/gitlab-org/gitlab-test/pipelines/4',
+ details: {
+ status: {
+ icon: 'status_warning',
+ text: 'passed',
+ label: 'passed with warnings',
+ group: 'success-with-warnings',
+ tooltip: 'passed',
+ has_details: true,
+ details_path: '/gitlab-org/gitlab-test/pipelines/4',
+ illustration: null,
+ favicon:
+ '/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png',
+ },
+ },
+ project: {
+ id: 1,
+ name: 'Gitlab Test',
+ full_path: '/gitlab-org/gitlab-test',
+ full_name: 'Gitlab Org / Gitlab Test',
+ },
+ },
+ triggered: [],
+ project: {
+ id: 20,
+ name: 'GitLab Docs',
+ full_path: '/gitlab-com/gitlab-docs',
+ full_name: 'GitLab.com / GitLab Docs',
+ },
+ },
+ ],
},
],
};
diff --git a/spec/javascripts/pipelines/graph/mock_data.js b/spec/frontend/pipelines/graph/mock_data.js
index a4a5d78f906..a4a5d78f906 100644
--- a/spec/javascripts/pipelines/graph/mock_data.js
+++ b/spec/frontend/pipelines/graph/mock_data.js
diff --git a/spec/frontend/pipelines/graph/stage_column_component_spec.js b/spec/frontend/pipelines/graph/stage_column_component_spec.js
new file mode 100644
index 00000000000..88e56eee1d6
--- /dev/null
+++ b/spec/frontend/pipelines/graph/stage_column_component_spec.js
@@ -0,0 +1,136 @@
+import { shallowMount } from '@vue/test-utils';
+
+import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+
+describe('stage column component', () => {
+ const mockJob = {
+ id: 4250,
+ name: 'test',
+ status: {
+ icon: 'status_success',
+ text: 'passed',
+ label: 'passed',
+ group: 'success',
+ details_path: '/root/ci-mock/builds/4250',
+ action: {
+ icon: 'retry',
+ title: 'Retry',
+ path: '/root/ci-mock/builds/4250/retry',
+ method: 'post',
+ },
+ },
+ };
+
+ let wrapper;
+
+ beforeEach(() => {
+ const mockGroups = [];
+ for (let i = 0; i < 3; i += 1) {
+ const mockedJob = Object.assign({}, mockJob);
+ mockedJob.id += i;
+ mockGroups.push(mockedJob);
+ }
+
+ wrapper = shallowMount(stageColumnComponent, {
+ propsData: {
+ title: 'foo',
+ groups: mockGroups,
+ hasTriggeredBy: false,
+ },
+ });
+ });
+
+ it('should render provided title', () => {
+ expect(
+ wrapper
+ .find('.stage-name')
+ .text()
+ .trim(),
+ ).toBe('foo');
+ });
+
+ it('should render the provided groups', () => {
+ expect(wrapper.findAll('.builds-container > ul > li').length).toBe(
+ wrapper.props('groups').length,
+ );
+ });
+
+ describe('jobId', () => {
+ it('escapes job name', () => {
+ wrapper = shallowMount(stageColumnComponent, {
+ propsData: {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ },
+ ],
+ title: 'test',
+ hasTriggeredBy: false,
+ },
+ });
+
+ expect(wrapper.find('.builds-container li').attributes('id')).toBe(
+ 'ci-badge-&lt;img src=x onerror=alert(document.domain)&gt;',
+ );
+ });
+ });
+
+ describe('with action', () => {
+ it('renders action button', () => {
+ wrapper = shallowMount(stageColumnComponent, {
+ propsData: {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ },
+ ],
+ title: 'test',
+ hasTriggeredBy: false,
+ action: {
+ icon: 'play',
+ title: 'Play all',
+ path: 'action',
+ },
+ },
+ });
+
+ expect(wrapper.find('.js-stage-action').exists()).toBe(true);
+ });
+ });
+
+ describe('without action', () => {
+ it('does not render action button', () => {
+ wrapper = shallowMount(stageColumnComponent, {
+ propsData: {
+ groups: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ },
+ ],
+ title: 'test',
+ hasTriggeredBy: false,
+ },
+ });
+
+ expect(wrapper.find('.js-stage-action').exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
index 9146f301f66..b585536ae09 100644
--- a/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_summary_table_spec.js
@@ -37,11 +37,47 @@ describe('Test reports summary table', () => {
describe('when test reports are supplied', () => {
beforeEach(() => createComponent());
+ const findErrorIcon = () => wrapper.find({ ref: 'suiteErrorIcon' });
it('renders the correct number of rows', () => {
expect(noSuitesToShow().exists()).toBe(false);
expect(allSuitesRows().length).toBe(testReports.test_suites.length);
});
+
+ describe('when there is a suite error', () => {
+ beforeEach(() => {
+ createComponent({
+ test_suites: [
+ {
+ ...testReports.test_suites[0],
+ suite_error: 'Suite Error',
+ },
+ ],
+ });
+ });
+
+ it('renders error icon', () => {
+ expect(findErrorIcon().exists()).toBe(true);
+ expect(findErrorIcon().attributes('title')).toEqual('Suite Error');
+ });
+ });
+
+ describe('when there is not a suite error', () => {
+ beforeEach(() => {
+ createComponent({
+ test_suites: [
+ {
+ ...testReports.test_suites[0],
+ suite_error: null,
+ },
+ ],
+ });
+ });
+
+ it('does not render error icon', () => {
+ expect(findErrorIcon().exists()).toBe(false);
+ });
+ });
});
describe('when there are no test suites', () => {
diff --git a/spec/frontend/registry/explorer/pages/list_spec.js b/spec/frontend/registry/explorer/pages/list_spec.js
index 3e46a29f776..f69b849521d 100644
--- a/spec/frontend/registry/explorer/pages/list_spec.js
+++ b/spec/frontend/registry/explorer/pages/list_spec.js
@@ -1,11 +1,12 @@
import VueRouter from 'vue-router';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlPagination, GlSkeletonLoader, GlSprintf } from '@gitlab/ui';
+import { GlPagination, GlSkeletonLoader, GlSprintf, GlAlert } from '@gitlab/ui';
import Tracking from '~/tracking';
import component from '~/registry/explorer/pages/list.vue';
import QuickstartDropdown from '~/registry/explorer/components/quickstart_dropdown.vue';
import GroupEmptyState from '~/registry/explorer/components/group_empty_state.vue';
import ProjectEmptyState from '~/registry/explorer/components/project_empty_state.vue';
+import ProjectPolicyAlert from '~/registry/explorer/components/project_policy_alert.vue';
import store from '~/registry/explorer/stores/';
import { SET_MAIN_LOADING } from '~/registry/explorer/stores/mutation_types/';
import {
@@ -35,6 +36,8 @@ describe('List Page', () => {
const findQuickStartDropdown = () => wrapper.find(QuickstartDropdown);
const findProjectEmptyState = () => wrapper.find(ProjectEmptyState);
const findGroupEmptyState = () => wrapper.find(GroupEmptyState);
+ const findProjectPolicyAlert = () => wrapper.find(ProjectPolicyAlert);
+ const findDeleteAlert = () => wrapper.find(GlAlert);
beforeEach(() => {
wrapper = shallowMount(component, {
@@ -57,6 +60,18 @@ describe('List Page', () => {
wrapper.destroy();
});
+ describe('Expiration policy notification', () => {
+ it('shows up on project page', () => {
+ expect(findProjectPolicyAlert().exists()).toBe(true);
+ });
+ it('does show up on group page', () => {
+ store.dispatch('setInitialState', { isGroupPage: true });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findProjectPolicyAlert().exists()).toBe(false);
+ });
+ });
+ });
+
describe('connection error', () => {
const config = {
characterError: true,
@@ -179,32 +194,38 @@ describe('List Page', () => {
it('should call deleteItem when confirming deletion', () => {
dispatchSpy.mockResolvedValue();
- const itemToDelete = wrapper.vm.images[0];
- wrapper.setData({ itemToDelete });
+ findDeleteBtn().vm.$emit('click');
+ expect(wrapper.vm.itemToDelete).not.toEqual({});
findDeleteModal().vm.$emit('ok');
expect(store.dispatch).toHaveBeenCalledWith(
'requestDeleteImage',
- itemToDelete.destroy_path,
+ wrapper.vm.itemToDelete,
);
});
- it('should show a success toast when delete request is successful', () => {
+ it('should show a success alert when delete request is successful', () => {
dispatchSpy.mockResolvedValue();
+ findDeleteBtn().vm.$emit('click');
+ expect(wrapper.vm.itemToDelete).not.toEqual({});
return wrapper.vm.handleDeleteImage().then(() => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(DELETE_IMAGE_SUCCESS_MESSAGE, {
- type: 'success',
- });
- expect(wrapper.vm.itemToDelete).toEqual({});
+ const alert = findDeleteAlert();
+ expect(alert.exists()).toBe(true);
+ expect(alert.text().replace(/\s\s+/gm, ' ')).toBe(
+ DELETE_IMAGE_SUCCESS_MESSAGE.replace('%{title}', wrapper.vm.itemToDelete.path),
+ );
});
});
- it('should show a error toast when delete request fails', () => {
+ it('should show an error alert when delete request fails', () => {
dispatchSpy.mockRejectedValue();
+ findDeleteBtn().vm.$emit('click');
+ expect(wrapper.vm.itemToDelete).not.toEqual({});
return wrapper.vm.handleDeleteImage().then(() => {
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(DELETE_IMAGE_ERROR_MESSAGE, {
- type: 'error',
- });
- expect(wrapper.vm.itemToDelete).toEqual({});
+ const alert = findDeleteAlert();
+ expect(alert.exists()).toBe(true);
+ expect(alert.text().replace(/\s\s+/gm, ' ')).toBe(
+ DELETE_IMAGE_ERROR_MESSAGE.replace('%{title}', wrapper.vm.itemToDelete.path),
+ );
});
});
});
diff --git a/spec/frontend/registry/explorer/stores/actions_spec.js b/spec/frontend/registry/explorer/stores/actions_spec.js
index b39c79dd1ab..58f61a0e8c2 100644
--- a/spec/frontend/registry/explorer/stores/actions_spec.js
+++ b/spec/frontend/registry/explorer/stores/actions_spec.js
@@ -279,39 +279,32 @@ describe('Actions RegistryExplorer Store', () => {
});
describe('request delete single image', () => {
- const deletePath = 'delete/path';
+ const image = {
+ destroy_path: 'delete/path',
+ };
+
it('successfully performs the delete request', done => {
- mock.onDelete(deletePath).replyOnce(200);
+ mock.onDelete(image.destroy_path).replyOnce(200);
testAction(
actions.requestDeleteImage,
- deletePath,
- {
- pagination: {},
- },
+ image,
+ {},
[
{ type: types.SET_MAIN_LOADING, payload: true },
+ { type: types.UPDATE_IMAGE, payload: { ...image, deleting: true } },
{ type: types.SET_MAIN_LOADING, payload: false },
],
- [
- {
- type: 'setShowGarbageCollectionTip',
- payload: true,
- },
- {
- type: 'requestImagesList',
- payload: { pagination: {} },
- },
- ],
+ [],
done,
);
});
it('should turn off loading on error', done => {
- mock.onDelete(deletePath).replyOnce(400);
+ mock.onDelete(image.destroy_path).replyOnce(400);
testAction(
actions.requestDeleteImage,
- deletePath,
+ image,
{},
[
{ type: types.SET_MAIN_LOADING, payload: true },
diff --git a/spec/frontend/registry/explorer/stores/mutations_spec.js b/spec/frontend/registry/explorer/stores/mutations_spec.js
index 029fd23f7ce..43b2ba84218 100644
--- a/spec/frontend/registry/explorer/stores/mutations_spec.js
+++ b/spec/frontend/registry/explorer/stores/mutations_spec.js
@@ -28,14 +28,32 @@ describe('Mutations Registry Explorer Store', () => {
describe('SET_IMAGES_LIST_SUCCESS', () => {
it('should set the images list', () => {
- const images = [1, 2, 3];
- const expectedState = { ...mockState, images };
+ const images = [{ name: 'foo' }, { name: 'bar' }];
+ const defaultStatus = { deleting: false, failedDelete: false };
+ const expectedState = {
+ ...mockState,
+ images: [{ name: 'foo', ...defaultStatus }, { name: 'bar', ...defaultStatus }],
+ };
mutations[types.SET_IMAGES_LIST_SUCCESS](mockState, images);
expect(mockState).toEqual(expectedState);
});
});
+ describe('UPDATE_IMAGE', () => {
+ it('should update an image', () => {
+ mockState.images = [{ id: 1, name: 'foo' }, { id: 2, name: 'bar' }];
+ const payload = { id: 1, name: 'baz' };
+ const expectedState = {
+ ...mockState,
+ images: [payload, { id: 2, name: 'bar' }],
+ };
+ mutations[types.UPDATE_IMAGE](mockState, payload);
+
+ expect(mockState).toEqual(expectedState);
+ });
+ });
+
describe('SET_TAGS_LIST_SUCCESS', () => {
it('should set the tags list', () => {
const tags = [1, 2, 3];
diff --git a/spec/frontend/releases/components/release_block_spec.js b/spec/frontend/releases/components/release_block_spec.js
index 9846fcb65eb..19119d99f3c 100644
--- a/spec/frontend/releases/components/release_block_spec.js
+++ b/spec/frontend/releases/components/release_block_spec.js
@@ -1,6 +1,5 @@
import $ from 'jquery';
import { mount } from '@vue/test-utils';
-import { first } from 'underscore';
import EvidenceBlock from '~/releases/components/evidence_block.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
@@ -80,11 +79,11 @@ describe('Release block', () => {
);
expect(wrapper.find('.js-sources-dropdown li a').attributes().href).toEqual(
- first(release.assets.sources).url,
+ release.assets.sources[0].url,
);
expect(wrapper.find('.js-sources-dropdown li a').text()).toContain(
- first(release.assets.sources).format,
+ release.assets.sources[0].format,
);
});
@@ -92,12 +91,10 @@ describe('Release block', () => {
expect(wrapper.findAll('.js-assets-list li').length).toEqual(release.assets.links.length);
expect(wrapper.find('.js-assets-list li a').attributes().href).toEqual(
- first(release.assets.links).directAssetUrl,
+ release.assets.links[0].directAssetUrl,
);
- expect(wrapper.find('.js-assets-list li a').text()).toContain(
- first(release.assets.links).name,
- );
+ expect(wrapper.find('.js-assets-list li a').text()).toContain(release.assets.links[0].name);
});
it('renders author avatar', () => {
@@ -264,7 +261,7 @@ describe('Release block', () => {
});
it('renders a link to the milestone with a tooltip', () => {
- const milestone = first(release.milestones);
+ const milestone = release.milestones[0];
const milestoneLink = wrapper.find('.js-milestone-link');
expect(milestoneLink.exists()).toBe(true);
diff --git a/spec/frontend/reports/accessibility_report/mock_data.js b/spec/frontend/reports/accessibility_report/mock_data.js
new file mode 100644
index 00000000000..1db2d8db25a
--- /dev/null
+++ b/spec/frontend/reports/accessibility_report/mock_data.js
@@ -0,0 +1,86 @@
+export const baseReport = {
+ results: {
+ 'http://about.gitlab.com/users/sign_in': [
+ {
+ code: 'WCAG2AA.Principle1.Guideline1_4.1_4_3.G18.Fail',
+ type: 'error',
+ typeCode: 1,
+ message:
+ 'This element has insufficient contrast at this conformance level. Expected a contrast ratio of at least 4.5:1, but text in this element has a contrast ratio of 2.82:1. Recommendation: change background to #d1470c.',
+ context:
+ '<a class="btn btn-nav-cta btn-nav-link-cta" href="/free-trial">\nGet free trial\n</a>',
+ selector: '#main-nav > div:nth-child(2) > ul > div:nth-child(8) > a',
+ runner: 'htmlcs',
+ runnerExtras: {},
+ },
+ ],
+ 'https://about.gitlab.com': [
+ {
+ code: 'WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent',
+ type: 'error',
+ typeCode: 1,
+ message:
+ 'Anchor element found with a valid href attribute, but no link content has been supplied.',
+ context: '<a href="/" class="navbar-brand animated"><svg height="36" viewBox="0 0 1...</a>',
+ selector: '#main-nav > div:nth-child(1) > a',
+ runner: 'htmlcs',
+ runnerExtras: {},
+ },
+ ],
+ },
+};
+
+export const parsedBaseReport = [
+ '{"code":"WCAG2AA.Principle1.Guideline1_4.1_4_3.G18.Fail","type":"error","typeCode":1,"message":"This element has insufficient contrast at this conformance level. Expected a contrast ratio of at least 4.5:1, but text in this element has a contrast ratio of 2.82:1. Recommendation: change background to #d1470c.","context":"<a class=\\"btn btn-nav-cta btn-nav-link-cta\\" href=\\"/free-trial\\">\\nGet free trial\\n</a>","selector":"#main-nav > div:nth-child(2) > ul > div:nth-child(8) > a","runner":"htmlcs","runnerExtras":{}}',
+ '{"code":"WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent","type":"error","typeCode":1,"message":"Anchor element found with a valid href attribute, but no link content has been supplied.","context":"<a href=\\"/\\" class=\\"navbar-brand animated\\"><svg height=\\"36\\" viewBox=\\"0 0 1...</a>","selector":"#main-nav > div:nth-child(1) > a","runner":"htmlcs","runnerExtras":{}}',
+];
+
+export const headReport = {
+ results: {
+ 'http://about.gitlab.com/users/sign_in': [
+ {
+ code: 'WCAG2AA.Principle1.Guideline1_4.1_4_3.G18.Fail',
+ type: 'error',
+ typeCode: 1,
+ message:
+ 'This element has insufficient contrast at this conformance level. Expected a contrast ratio of at least 4.5:1, but text in this element has a contrast ratio of 3.84:1. Recommendation: change text colour to #767676.',
+ context: '<a href="/stages-devops-lifecycle/" class="main-nav-link">Product</a>',
+ selector: '#main-nav > div:nth-child(2) > ul > li:nth-child(1) > a',
+ runner: 'htmlcs',
+ runnerExtras: {},
+ },
+ ],
+ 'https://about.gitlab.com': [
+ {
+ code: 'WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent',
+ type: 'error',
+ typeCode: 1,
+ message:
+ 'Anchor element found with a valid href attribute, but no link content has been supplied.',
+ context: '<a href="/" class="navbar-brand animated"><svg height="36" viewBox="0 0 1...</a>',
+ selector: '#main-nav > div:nth-child(1) > a',
+ runner: 'htmlcs',
+ runnerExtras: {},
+ },
+ ],
+ },
+};
+
+export const comparedReportResult = {
+ status: 'failed',
+ summary: {
+ total: 2,
+ notes: 0,
+ errors: 2,
+ warnings: 0,
+ },
+ new_errors: [headReport.results['http://about.gitlab.com/users/sign_in'][0]],
+ new_notes: [],
+ new_warnings: [],
+ resolved_errors: [baseReport.results['http://about.gitlab.com/users/sign_in'][0]],
+ resolved_notes: [],
+ resolved_warnings: [],
+ existing_errors: [headReport.results['https://about.gitlab.com'][0]],
+ existing_notes: [],
+ existing_warnings: [],
+};
diff --git a/spec/frontend/reports/accessibility_report/store/actions_spec.js b/spec/frontend/reports/accessibility_report/store/actions_spec.js
new file mode 100644
index 00000000000..4e156cd6736
--- /dev/null
+++ b/spec/frontend/reports/accessibility_report/store/actions_spec.js
@@ -0,0 +1,111 @@
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+import * as actions from '~/reports/accessibility_report/store/actions';
+import * as types from '~/reports/accessibility_report/store/mutation_types';
+import createStore from '~/reports/accessibility_report/store';
+import { TEST_HOST } from 'spec/test_constants';
+import testAction from 'helpers/vuex_action_helper';
+import { baseReport, headReport, comparedReportResult } from '../mock_data';
+
+describe('Accessibility Reports actions', () => {
+ let localState;
+ let localStore;
+
+ beforeEach(() => {
+ localStore = createStore();
+ localState = localStore.state;
+ });
+
+ describe('fetchReport', () => {
+ let mock;
+
+ beforeEach(() => {
+ localState.baseEndpoint = `${TEST_HOST}/endpoint.json`;
+ localState.headEndpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('when no endpoints are given', () => {
+ beforeEach(() => {
+ localState.baseEndpoint = null;
+ localState.headEndpoint = null;
+ });
+
+ it('should commit REQUEST_REPORT and RECEIVE_REPORT_ERROR mutations', done => {
+ testAction(
+ actions.fetchReport,
+ null,
+ localState,
+ [
+ { type: types.REQUEST_REPORT },
+ {
+ type: types.RECEIVE_REPORT_ERROR,
+ payload: 'Accessibility report artifact not found',
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('success', () => {
+ it('should commit REQUEST_REPORT mutation and dispatch receiveReportSuccess', done => {
+ const data = { report: { summary: {} } };
+ mock.onGet(`${TEST_HOST}/endpoint.json`).reply(200, data);
+
+ testAction(
+ actions.fetchReport,
+ null,
+ localState,
+ [{ type: types.REQUEST_REPORT }],
+ [
+ {
+ payload: [{ ...data, isHead: false }, { ...data, isHead: true }],
+ type: 'receiveReportSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('should commit REQUEST_REPORT and RECEIVE_REPORT_ERROR mutations', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
+
+ testAction(
+ actions.fetchReport,
+ null,
+ localState,
+ [
+ { type: types.REQUEST_REPORT },
+ {
+ type: types.RECEIVE_REPORT_ERROR,
+ payload: 'Failed to retrieve accessibility report',
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('receiveReportSuccess', () => {
+ it('should commit RECEIVE_REPORT_SUCCESS mutation', done => {
+ testAction(
+ actions.receiveReportSuccess,
+ [{ ...baseReport, isHead: false }, { ...headReport, isHead: true }],
+ localState,
+ [{ type: types.RECEIVE_REPORT_SUCCESS, payload: comparedReportResult }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/reports/accessibility_report/store/mutations_spec.js b/spec/frontend/reports/accessibility_report/store/mutations_spec.js
new file mode 100644
index 00000000000..88e3d1f7e16
--- /dev/null
+++ b/spec/frontend/reports/accessibility_report/store/mutations_spec.js
@@ -0,0 +1,61 @@
+import mutations from '~/reports/accessibility_report/store/mutations';
+import createStore from '~/reports/accessibility_report/store';
+
+describe('Accessibility Reports mutations', () => {
+ let localState;
+ let localStore;
+
+ beforeEach(() => {
+ localStore = createStore();
+ localState = localStore.state;
+ });
+
+ describe('REQUEST_REPORT', () => {
+ it('sets isLoading to true', () => {
+ mutations.REQUEST_REPORT(localState);
+
+ expect(localState.isLoading).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_REPORT_SUCCESS', () => {
+ it('sets isLoading to false', () => {
+ mutations.RECEIVE_REPORT_SUCCESS(localState, {});
+
+ expect(localState.isLoading).toEqual(false);
+ });
+
+ it('sets hasError to false', () => {
+ mutations.RECEIVE_REPORT_SUCCESS(localState, {});
+
+ expect(localState.hasError).toEqual(false);
+ });
+
+ it('sets report to response report', () => {
+ const report = { data: 'testing' };
+ mutations.RECEIVE_REPORT_SUCCESS(localState, report);
+
+ expect(localState.report).toEqual(report);
+ });
+ });
+
+ describe('RECEIVE_REPORT_ERROR', () => {
+ it('sets isLoading to false', () => {
+ mutations.RECEIVE_REPORT_ERROR(localState);
+
+ expect(localState.isLoading).toEqual(false);
+ });
+
+ it('sets hasError to true', () => {
+ mutations.RECEIVE_REPORT_ERROR(localState);
+
+ expect(localState.hasError).toEqual(true);
+ });
+
+ it('sets errorMessage to given message', () => {
+ mutations.RECEIVE_REPORT_ERROR(localState, 'message');
+
+ expect(localState.errorMessage).toEqual('message');
+ });
+ });
+});
diff --git a/spec/frontend/reports/accessibility_report/store/utils_spec.js b/spec/frontend/reports/accessibility_report/store/utils_spec.js
new file mode 100644
index 00000000000..a5fa1889503
--- /dev/null
+++ b/spec/frontend/reports/accessibility_report/store/utils_spec.js
@@ -0,0 +1,35 @@
+import * as utils from '~/reports/accessibility_report/store/utils';
+import { baseReport, headReport, parsedBaseReport, comparedReportResult } from '../mock_data';
+
+describe('Accessibility Report store utils', () => {
+ describe('parseAccessibilityReport', () => {
+ it('returns array of stringified issues', () => {
+ const result = utils.parseAccessibilityReport(baseReport);
+
+ expect(result).toEqual(parsedBaseReport);
+ });
+ });
+
+ describe('compareAccessibilityReports', () => {
+ let reports;
+
+ beforeEach(() => {
+ reports = [
+ {
+ isHead: false,
+ issues: utils.parseAccessibilityReport(baseReport),
+ },
+ {
+ isHead: true,
+ issues: utils.parseAccessibilityReport(headReport),
+ },
+ ];
+ });
+
+ it('returns the comparison report with a new, resolved, and existing error', () => {
+ const result = utils.compareAccessibilityReports(reports);
+
+ expect(result).toEqual(comparedReportResult);
+ });
+ });
+});
diff --git a/spec/frontend/snippet/snippet_edit_spec.js b/spec/frontend/snippet/snippet_edit_spec.js
new file mode 100644
index 00000000000..cfe5062c86b
--- /dev/null
+++ b/spec/frontend/snippet/snippet_edit_spec.js
@@ -0,0 +1,45 @@
+import '~/snippet/snippet_edit';
+import { SnippetEditInit } from '~/snippets';
+import initSnippet from '~/snippet/snippet_bundle';
+
+import { triggerDOMEvent } from 'jest/helpers/dom_events_helper';
+
+jest.mock('~/snippet/snippet_bundle');
+jest.mock('~/snippets');
+
+describe('Snippet edit form initialization', () => {
+ const setFF = flag => {
+ gon.features = { snippetsEditVue: flag };
+ };
+ let features;
+
+ beforeEach(() => {
+ features = gon.features;
+ setFixtures('<div class="snippet-form"></div>');
+ });
+
+ afterEach(() => {
+ gon.features = features;
+ });
+
+ it.each`
+ name | flag | isVue
+ ${'Regular'} | ${false} | ${false}
+ ${'Vue'} | ${true} | ${true}
+ `('correctly initializes $name Snippet Edit form', ({ flag, isVue }) => {
+ initSnippet.mockClear();
+ SnippetEditInit.mockClear();
+
+ setFF(flag);
+
+ triggerDOMEvent('DOMContentLoaded');
+
+ if (isVue) {
+ expect(initSnippet).not.toHaveBeenCalled();
+ expect(SnippetEditInit).toHaveBeenCalled();
+ } else {
+ expect(initSnippet).toHaveBeenCalled();
+ expect(SnippetEditInit).not.toHaveBeenCalled();
+ }
+ });
+});
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index 3c3f9764f64..334ceaa064f 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -39,7 +39,6 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
qa-description-textarea"
data-supports-quick-actions="false"
dir="auto"
- id="snippet-description"
placeholder="Write a comment or drag your files here…"
/>
</markdown-field-stub>
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
new file mode 100644
index 00000000000..ba62a0a92ca
--- /dev/null
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -0,0 +1,295 @@
+import { shallowMount } from '@vue/test-utils';
+import axios from '~/lib/utils/axios_utils';
+
+import { GlLoadingIcon } from '@gitlab/ui';
+import { joinPaths, redirectTo } from '~/lib/utils/url_utility';
+
+import SnippetEditApp from '~/snippets/components/edit.vue';
+import SnippetDescriptionEdit from '~/snippets/components/snippet_description_edit.vue';
+import SnippetVisibilityEdit from '~/snippets/components/snippet_visibility_edit.vue';
+import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
+import TitleField from '~/vue_shared/components/form/title.vue';
+import FormFooterActions from '~/vue_shared/components/form/form_footer_actions.vue';
+
+import UpdateSnippetMutation from '~/snippets/mutations/updateSnippet.mutation.graphql';
+import CreateSnippetMutation from '~/snippets/mutations/createSnippet.mutation.graphql';
+
+import AxiosMockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import { ApolloMutation } from 'vue-apollo';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ getBaseURL: jest.fn().mockReturnValue('foo/'),
+ redirectTo: jest.fn().mockName('redirectTo'),
+ joinPaths: jest
+ .fn()
+ .mockName('joinPaths')
+ .mockReturnValue('contentApiURL'),
+}));
+
+let flashSpy;
+
+const contentMock = 'Foo Bar';
+const rawPathMock = '/foo/bar';
+const rawProjectPathMock = '/project/path';
+const newlyEditedSnippetUrl = 'http://foo.bar';
+const apiError = { message: 'Ufff' };
+
+const defaultProps = {
+ snippetGid: 'gid://gitlab/PersonalSnippet/42',
+ markdownPreviewPath: 'http://preview.foo.bar',
+ markdownDocsPath: 'http://docs.foo.bar',
+};
+
+describe('Snippet Edit app', () => {
+ let wrapper;
+ let axiosMock;
+
+ const resolveMutate = jest.fn().mockResolvedValue({
+ data: {
+ updateSnippet: {
+ errors: [],
+ snippet: {
+ webUrl: newlyEditedSnippetUrl,
+ },
+ },
+ },
+ });
+
+ const rejectMutation = jest.fn().mockRejectedValue(apiError);
+
+ const mutationTypes = {
+ RESOLVE: resolveMutate,
+ REJECT: rejectMutation,
+ };
+
+ function createComponent({
+ props = defaultProps,
+ data = {},
+ loading = false,
+ mutationRes = mutationTypes.RESOLVE,
+ } = {}) {
+ const $apollo = {
+ queries: {
+ snippet: {
+ loading,
+ },
+ },
+ mutate: mutationRes,
+ };
+
+ wrapper = shallowMount(SnippetEditApp, {
+ mocks: { $apollo },
+ stubs: {
+ FormFooterActions,
+ ApolloMutation,
+ },
+ propsData: {
+ ...props,
+ },
+ data() {
+ return data;
+ },
+ });
+
+ flashSpy = jest.spyOn(wrapper.vm, 'flashAPIFailure');
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findSubmitButton = () => wrapper.find('[type=submit]');
+ const findCancellButton = () => wrapper.find('[data-testid="snippet-cancel-btn"]');
+
+ describe('rendering', () => {
+ it('renders loader while the query is in flight', () => {
+ createComponent({ loading: true });
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('renders all required components', () => {
+ createComponent();
+
+ expect(wrapper.contains(TitleField)).toBe(true);
+ expect(wrapper.contains(SnippetDescriptionEdit)).toBe(true);
+ expect(wrapper.contains(SnippetBlobEdit)).toBe(true);
+ expect(wrapper.contains(SnippetVisibilityEdit)).toBe(true);
+ expect(wrapper.contains(FormFooterActions)).toBe(true);
+ });
+
+ it('does not fail if there is no snippet yet (new snippet creation)', () => {
+ const snippetGid = '';
+ createComponent({
+ props: {
+ ...defaultProps,
+ snippetGid,
+ },
+ });
+
+ expect(wrapper.props('snippetGid')).toBe(snippetGid);
+ });
+
+ it.each`
+ title | content | expectation
+ ${''} | ${''} | ${true}
+ ${'foo'} | ${''} | ${true}
+ ${''} | ${'foo'} | ${true}
+ ${'foo'} | ${'bar'} | ${false}
+ `(
+ 'disables submit button unless both title and content are present',
+ ({ title, content, expectation }) => {
+ createComponent({
+ data: {
+ snippet: { title },
+ content,
+ },
+ });
+ const isBtnDisabled = Boolean(findSubmitButton().attributes('disabled'));
+ expect(isBtnDisabled).toBe(expectation);
+ },
+ );
+
+ it.each`
+ isNew | status | expectation
+ ${true} | ${`new`} | ${`/snippets`}
+ ${false} | ${`existing`} | ${newlyEditedSnippetUrl}
+ `('sets correct href for the cancel button on a $status snippet', ({ isNew, expectation }) => {
+ createComponent({
+ data: {
+ snippet: { webUrl: newlyEditedSnippetUrl },
+ newSnippet: isNew,
+ },
+ });
+
+ expect(findCancellButton().attributes('href')).toBe(expectation);
+ });
+ });
+
+ describe('functionality', () => {
+ describe('handling of the data from GraphQL response', () => {
+ const snippet = {
+ blob: {
+ rawPath: rawPathMock,
+ },
+ };
+ const getResSchema = newSnippet => {
+ return {
+ data: {
+ snippets: {
+ edges: newSnippet ? [] : [snippet],
+ },
+ },
+ };
+ };
+
+ const bootstrapForExistingSnippet = resp => {
+ createComponent({
+ data: {
+ snippet,
+ },
+ });
+
+ if (resp === 500) {
+ axiosMock.onGet('contentApiURL').reply(500);
+ } else {
+ axiosMock.onGet('contentApiURL').reply(200, contentMock);
+ }
+ wrapper.vm.onSnippetFetch(getResSchema());
+ };
+
+ const bootstrapForNewSnippet = () => {
+ createComponent();
+ wrapper.vm.onSnippetFetch(getResSchema(true));
+ };
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
+ it('fetches blob content with the additional query', () => {
+ bootstrapForExistingSnippet();
+
+ return waitForPromises().then(() => {
+ expect(joinPaths).toHaveBeenCalledWith('foo/', rawPathMock);
+ expect(wrapper.vm.newSnippet).toBe(false);
+ expect(wrapper.vm.content).toBe(contentMock);
+ });
+ });
+
+ it('flashes the error message if fetching content fails', () => {
+ bootstrapForExistingSnippet(500);
+
+ return waitForPromises().then(() => {
+ expect(flashSpy).toHaveBeenCalled();
+ expect(wrapper.vm.content).toBe('');
+ });
+ });
+
+ it('does not fetch content for new snippet', () => {
+ bootstrapForNewSnippet();
+
+ return waitForPromises().then(() => {
+ // we keep using waitForPromises to make sure we do not run failed test
+ expect(wrapper.vm.newSnippet).toBe(true);
+ expect(wrapper.vm.content).toBe('');
+ expect(joinPaths).not.toHaveBeenCalled();
+ expect(wrapper.vm.snippet).toEqual(wrapper.vm.$options.newSnippetSchema);
+ });
+ });
+ });
+
+ describe('form submission handling', () => {
+ it.each`
+ newSnippet | projectPath | mutation | mutationName
+ ${true} | ${rawProjectPathMock} | ${CreateSnippetMutation} | ${'CreateSnippetMutation with projectPath'}
+ ${true} | ${''} | ${CreateSnippetMutation} | ${'CreateSnippetMutation without projectPath'}
+ ${false} | ${rawProjectPathMock} | ${UpdateSnippetMutation} | ${'UpdateSnippetMutation with projectPath'}
+ ${false} | ${''} | ${UpdateSnippetMutation} | ${'UpdateSnippetMutation without projectPath'}
+ `('should submit $mutationName correctly', ({ newSnippet, projectPath, mutation }) => {
+ createComponent({
+ data: {
+ newSnippet,
+ },
+ props: {
+ ...defaultProps,
+ projectPath,
+ },
+ });
+
+ const mutationPayload = {
+ mutation,
+ variables: {
+ input: newSnippet ? expect.objectContaining({ projectPath }) : expect.any(Object),
+ },
+ };
+
+ wrapper.vm.handleFormSubmit();
+ expect(resolveMutate).toHaveBeenCalledWith(mutationPayload);
+ });
+
+ it('redirects to snippet view on successful mutation', () => {
+ createComponent();
+ wrapper.vm.handleFormSubmit();
+ return waitForPromises().then(() => {
+ expect(redirectTo).toHaveBeenCalledWith(newlyEditedSnippetUrl);
+ });
+ });
+
+ it('flashes an error if mutation failed', () => {
+ createComponent({
+ mutationRes: mutationTypes.REJECT,
+ });
+ wrapper.vm.handleFormSubmit();
+ return waitForPromises().then(() => {
+ expect(redirectTo).not.toHaveBeenCalled();
+ expect(flashSpy).toHaveBeenCalledWith(apiError);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/static_site_editor/components/saved_changes_message_spec.js b/spec/frontend/static_site_editor/components/saved_changes_message_spec.js
index cac990df40f..659e9be59d2 100644
--- a/spec/frontend/static_site_editor/components/saved_changes_message_spec.js
+++ b/spec/frontend/static_site_editor/components/saved_changes_message_spec.js
@@ -1,22 +1,17 @@
import { shallowMount } from '@vue/test-utils';
+
import SavedChangesMessage from '~/static_site_editor/components/saved_changes_message.vue';
+import { returnUrl, savedContentMeta } from '../mock_data';
+
describe('~/static_site_editor/components/saved_changes_message.vue', () => {
let wrapper;
+ const { branch, commit, mergeRequest } = savedContentMeta;
const props = {
- branch: {
- label: '123-the-branch',
- url: 'https://gitlab.com/gitlab-org/gitlab/-/tree/123-the-branch',
- },
- commit: {
- label: 'a123',
- url: 'https://gitlab.com/gitlab-org/gitlab/-/commit/a123',
- },
- mergeRequest: {
- label: '123',
- url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123',
- },
- returnUrl: 'https://www.the-static-site.com/post',
+ branch,
+ commit,
+ mergeRequest,
+ returnUrl,
};
const findReturnToSiteButton = () => wrapper.find({ ref: 'returnToSiteButton' });
const findMergeRequestButton = () => wrapper.find({ ref: 'mergeRequestButton' });
diff --git a/spec/frontend/static_site_editor/components/static_site_editor_spec.js b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
index 14e9fe211ce..5d4e3758557 100644
--- a/spec/frontend/static_site_editor/components/static_site_editor_spec.js
+++ b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
@@ -1,6 +1,5 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-
import { GlSkeletonLoader } from '@gitlab/ui';
import createState from '~/static_site_editor/store/state';
@@ -11,8 +10,15 @@ import EditHeader from '~/static_site_editor/components/edit_header.vue';
import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
import SubmitChangesError from '~/static_site_editor/components/submit_changes_error.vue';
+import SavedChangesMessage from '~/static_site_editor/components/saved_changes_message.vue';
-import { sourceContent, sourceContentTitle, submitChangesError } from '../mock_data';
+import {
+ returnUrl,
+ sourceContent,
+ sourceContentTitle,
+ savedContentMeta,
+ submitChangesError,
+} from '../mock_data';
const localVue = createLocalVue();
@@ -74,6 +80,7 @@ describe('StaticSiteEditor', () => {
const findPublishToolbar = () => wrapper.find(PublishToolbar);
const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
const findSubmitChangesError = () => wrapper.find(SubmitChangesError);
+ const findSavedChangesMessage = () => wrapper.find(SavedChangesMessage);
beforeEach(() => {
buildStore();
@@ -84,6 +91,17 @@ describe('StaticSiteEditor', () => {
wrapper.destroy();
});
+ it('renders the saved changes message when changes are submitted successfully', () => {
+ buildStore({ initialState: { returnUrl, savedContentMeta } });
+ buildWrapper();
+
+ expect(findSavedChangesMessage().exists()).toBe(true);
+ expect(findSavedChangesMessage().props()).toEqual({
+ returnUrl,
+ ...savedContentMeta,
+ });
+ });
+
describe('when content is not loaded', () => {
it('does not render edit area', () => {
expect(findEditArea().exists()).toBe(false);
@@ -96,6 +114,10 @@ describe('StaticSiteEditor', () => {
it('does not render toolbar', () => {
expect(findPublishToolbar().exists()).toBe(false);
});
+
+ it('does not render saved changes message', () => {
+ expect(findSavedChangesMessage().exists()).toBe(false);
+ });
});
describe('when content is loaded', () => {
diff --git a/spec/frontend/static_site_editor/mock_data.js b/spec/frontend/static_site_editor/mock_data.js
index 345ae0ce6f6..962047e6dd2 100644
--- a/spec/frontend/static_site_editor/mock_data.js
+++ b/spec/frontend/static_site_editor/mock_data.js
@@ -21,10 +21,10 @@ export const sourcePath = 'foobar.md.html';
export const savedContentMeta = {
branch: {
label: 'foobar',
- url: 'foobar/-/tree/foorbar',
+ url: 'foobar/-/tree/foobar',
},
commit: {
- label: 'c1461b08 ',
+ label: 'c1461b08',
url: 'foobar/-/c1461b08',
},
mergeRequest: {
diff --git a/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
new file mode 100644
index 00000000000..df4b30f1cb8
--- /dev/null
+++ b/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
@@ -0,0 +1,287 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
+<div
+ class="awards js-awards-block"
+>
+ <button
+ class="btn award-control"
+ data-boundary="viewport"
+ data-original-title="Ada, Leonardo, and Marie"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/thumbsup-59ec2457ab33e8897261d01a495f6cf5c668d0004807dc541c3b1be5294b1e61.png"
+ data-name="thumbsup"
+ data-unicode-version="6.0"
+ title="thumbs up sign"
+ >
+
+ 👍
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 3
+ </span>
+ </button>
+ <button
+ class="btn award-control active"
+ data-boundary="viewport"
+ data-original-title="You, Ada, and Marie"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/thumbsdown-5954334e2dae5357312b3d629f10a496c728029e02216f8c8b887f9b51561c61.png"
+ data-name="thumbsdown"
+ data-unicode-version="6.0"
+ title="thumbs down sign"
+ >
+
+ 👎
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 3
+ </span>
+ </button>
+ <button
+ class="btn award-control"
+ data-boundary="viewport"
+ data-original-title="Ada and Jane"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/smile-14905c372d5bf7719bd727c9efae31a03291acec79801652a23710c6848c5d14.png"
+ data-name="smile"
+ data-unicode-version="6.0"
+ title="smiling face with open mouth and smiling eyes"
+ >
+
+ 😄
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 2
+ </span>
+ </button>
+ <button
+ class="btn award-control active"
+ data-boundary="viewport"
+ data-original-title="You, Ada, Jane, and Leonardo"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/ok_hand-d63002dce3cc3655b67b8765b7c28d370edba0e3758b2329b60e0e61c4d8e78d.png"
+ data-name="ok_hand"
+ data-unicode-version="6.0"
+ title="ok hand sign"
+ >
+
+ 👌
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 4
+ </span>
+ </button>
+ <button
+ class="btn award-control active"
+ data-boundary="viewport"
+ data-original-title="You"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/cactus-2c5c4c35f26c7046fdc002b337e0d939729b33a26980e675950f9934c91e40fd.png"
+ data-name="cactus"
+ data-unicode-version="6.0"
+ title="cactus"
+ >
+
+ 🌵
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 1
+ </span>
+ </button>
+ <button
+ class="btn award-control"
+ data-boundary="viewport"
+ data-original-title="Marie"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/a-bddbb39e8a1d35d42b7c08e7d47f63988cb4d8614b79f74e70b9c67c221896cc.png"
+ data-name="a"
+ data-unicode-version="6.0"
+ title="negative squared latin capital letter a"
+ >
+
+ 🅰
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 1
+ </span>
+ </button>
+ <button
+ class="btn award-control active"
+ data-boundary="viewport"
+ data-original-title="You"
+ data-testid="award-button"
+ title=""
+ type="button"
+ >
+ <span
+ data-testid="award-html"
+ >
+
+
+ <gl-emoji
+ data-fallback-src="/assets/emoji/b-722f9db9442e7c0fc0d0ac0f5291fbf47c6a0ac4d8abd42e97957da705fb82bf.png"
+ data-name="b"
+ data-unicode-version="6.0"
+ title="negative squared latin capital letter b"
+ >
+
+ 🅱
+
+ </gl-emoji>
+
+
+ </span>
+
+ <span
+ class="award-control-text js-counter"
+ >
+ 1
+ </span>
+ </button>
+
+ <div
+ class="award-menu-holder"
+ >
+ <button
+ aria-label="Add reaction"
+ class="award-control btn js-add-award js-test-add-button-class"
+ data-boundary="viewport"
+ data-original-title="Add reaction"
+ title=""
+ type="button"
+ >
+ <span
+ class="award-control-icon award-control-icon-neutral"
+ >
+ <gl-icon-stub
+ aria-hidden="true"
+ name="slight-smile"
+ size="16"
+ />
+ </span>
+
+ <span
+ class="award-control-icon award-control-icon-positive"
+ >
+ <gl-icon-stub
+ aria-hidden="true"
+ name="smiley"
+ size="16"
+ />
+ </span>
+
+ <span
+ class="award-control-icon award-control-icon-super-positive"
+ >
+ <gl-icon-stub
+ aria-hidden="true"
+ name="smiley"
+ size="16"
+ />
+ </span>
+
+ <i
+ aria-hidden="true"
+ class="fa fa-spinner fa-spin award-control-icon award-control-icon-loading"
+ />
+ </button>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/awards_list_spec.js b/spec/frontend/vue_shared/components/awards_list_spec.js
new file mode 100644
index 00000000000..bb3e60ab9e2
--- /dev/null
+++ b/spec/frontend/vue_shared/components/awards_list_spec.js
@@ -0,0 +1,213 @@
+import { shallowMount } from '@vue/test-utils';
+import AwardsList from '~/vue_shared/components/awards_list.vue';
+
+const createUser = (id, name) => ({ id, name });
+const createAward = (name, user) => ({ name, user });
+
+const USERS = {
+ root: createUser(1, 'Root'),
+ ada: createUser(2, 'Ada'),
+ marie: createUser(3, 'Marie'),
+ jane: createUser(4, 'Jane'),
+ leonardo: createUser(5, 'Leonardo'),
+};
+
+const EMOJI_SMILE = 'smile';
+const EMOJI_OK = 'ok_hand';
+const EMOJI_THUMBSUP = 'thumbsup';
+const EMOJI_THUMBSDOWN = 'thumbsdown';
+const EMOJI_A = 'a';
+const EMOJI_B = 'b';
+const EMOJI_CACTUS = 'cactus';
+const EMOJI_100 = '100';
+
+const TEST_AWARDS = [
+ createAward(EMOJI_SMILE, USERS.ada),
+ createAward(EMOJI_OK, USERS.ada),
+ createAward(EMOJI_THUMBSUP, USERS.ada),
+ createAward(EMOJI_THUMBSDOWN, USERS.ada),
+ createAward(EMOJI_SMILE, USERS.jane),
+ createAward(EMOJI_OK, USERS.jane),
+ createAward(EMOJI_OK, USERS.leonardo),
+ createAward(EMOJI_THUMBSUP, USERS.leonardo),
+ createAward(EMOJI_THUMBSUP, USERS.marie),
+ createAward(EMOJI_THUMBSDOWN, USERS.marie),
+ createAward(EMOJI_THUMBSDOWN, USERS.root),
+ createAward(EMOJI_OK, USERS.root),
+ // Test that emoji list preserves order of occurrence, not alphabetical order
+ createAward(EMOJI_CACTUS, USERS.root),
+ createAward(EMOJI_A, USERS.marie),
+ createAward(EMOJI_B, USERS.root),
+];
+const TEST_ADD_BUTTON_CLASS = 'js-test-add-button-class';
+
+describe('vue_shared/components/awards_list', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const createComponent = (props = {}) => {
+ if (wrapper) {
+ throw new Error('There should only be one wrapper created per test');
+ }
+
+ wrapper = shallowMount(AwardsList, { propsData: props });
+ };
+ const matchingEmojiTag = name => expect.stringMatching(`gl-emoji data-name="${name}"`);
+ const findAwardButtons = () => wrapper.findAll('[data-testid="award-button"');
+ const findAwardsData = () =>
+ findAwardButtons().wrappers.map(x => {
+ return {
+ classes: x.classes(),
+ title: x.attributes('data-original-title'),
+ html: x.find('[data-testid="award-html"]').element.innerHTML,
+ count: Number(x.find('.js-counter').text()),
+ };
+ });
+ const findAddAwardButton = () => wrapper.find('.js-add-award');
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: TEST_AWARDS,
+ canAwardEmoji: true,
+ currentUserId: USERS.root.id,
+ addButtonClass: TEST_ADD_BUTTON_CLASS,
+ });
+ });
+
+ it('matches snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('shows awards in correct order', () => {
+ expect(findAwardsData()).toEqual([
+ {
+ classes: ['btn', 'award-control'],
+ count: 3,
+ html: matchingEmojiTag(EMOJI_THUMBSUP),
+ title: 'Ada, Leonardo, and Marie',
+ },
+ {
+ classes: ['btn', 'award-control', 'active'],
+ count: 3,
+ html: matchingEmojiTag(EMOJI_THUMBSDOWN),
+ title: 'You, Ada, and Marie',
+ },
+ {
+ classes: ['btn', 'award-control'],
+ count: 2,
+ html: matchingEmojiTag(EMOJI_SMILE),
+ title: 'Ada and Jane',
+ },
+ {
+ classes: ['btn', 'award-control', 'active'],
+ count: 4,
+ html: matchingEmojiTag(EMOJI_OK),
+ title: 'You, Ada, Jane, and Leonardo',
+ },
+ {
+ classes: ['btn', 'award-control', 'active'],
+ count: 1,
+ html: matchingEmojiTag(EMOJI_CACTUS),
+ title: 'You',
+ },
+ {
+ classes: ['btn', 'award-control'],
+ count: 1,
+ html: matchingEmojiTag(EMOJI_A),
+ title: 'Marie',
+ },
+ {
+ classes: ['btn', 'award-control', 'active'],
+ count: 1,
+ html: matchingEmojiTag(EMOJI_B),
+ title: 'You',
+ },
+ ]);
+ });
+
+ it('with award clicked, it emits award', () => {
+ expect(wrapper.emitted().award).toBeUndefined();
+
+ findAwardButtons()
+ .at(2)
+ .trigger('click');
+
+ expect(wrapper.emitted().award).toEqual([[EMOJI_SMILE]]);
+ });
+
+ it('shows add award button', () => {
+ const btn = findAddAwardButton();
+
+ expect(btn.exists()).toBe(true);
+ expect(btn.classes(TEST_ADD_BUTTON_CLASS)).toBe(true);
+ });
+ });
+
+ describe('with numeric award', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: [createAward(EMOJI_100, USERS.ada)],
+ canAwardEmoji: true,
+ currentUserId: USERS.root.id,
+ });
+ });
+
+ it('when clicked, it emits award as number', () => {
+ expect(wrapper.emitted().award).toBeUndefined();
+
+ findAwardButtons()
+ .at(0)
+ .trigger('click');
+
+ expect(wrapper.emitted().award).toEqual([[Number(EMOJI_100)]]);
+ });
+ });
+
+ describe('with no awards', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: [],
+ canAwardEmoji: true,
+ });
+ });
+
+ it('has no award buttons', () => {
+ expect(findAwardButtons().length).toBe(0);
+ });
+ });
+
+ describe('when cannot award emoji', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: [createAward(EMOJI_CACTUS, USERS.root.id)],
+ canAwardEmoji: false,
+ currentUserId: USERS.marie.id,
+ });
+ });
+
+ it('does not have add button', () => {
+ expect(findAddAwardButton().exists()).toBe(false);
+ });
+ });
+
+ describe('with no user', () => {
+ beforeEach(() => {
+ createComponent({
+ awards: TEST_AWARDS,
+ canAwardEmoji: false,
+ });
+ });
+
+ it('disables award buttons', () => {
+ const buttons = findAwardButtons();
+
+ expect(buttons.length).toBe(7);
+ expect(buttons.wrappers.every(x => x.classes('disabled'))).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap b/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap
index 980e9b517db..e5035614196 100644
--- a/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap
+++ b/spec/frontend/vue_shared/components/form/__snapshots__/title_spec.js.snap
@@ -5,8 +5,6 @@ exports[`Title edit field matches the snapshot 1`] = `
label="Title"
label-for="title-field-edit"
>
- <gl-form-input-stub
- id="title-field-edit"
- />
+ <gl-form-input-stub />
</gl-form-group-stub>
`;
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index a2e2d2447d5..2c7fce714f0 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -10,8 +10,7 @@ const DEFAULT_PROPS = {
name: 'Administrator',
location: 'Vienna',
bio: null,
- organization: null,
- jobTitle: null,
+ workInformation: null,
status: null,
},
};
@@ -59,8 +58,7 @@ describe('User Popover Component', () => {
username: null,
location: null,
bio: null,
- organization: null,
- jobTitle: null,
+ workInformation: null,
status: null,
},
},
@@ -93,7 +91,7 @@ describe('User Popover Component', () => {
const findWorkInformation = () => wrapper.find({ ref: 'workInformation' });
const findBio = () => wrapper.find({ ref: 'bio' });
- it('should show only bio if organization and job title are not available', () => {
+ it('should show only bio if work information is not available', () => {
const user = { ...DEFAULT_PROPS.user, bio: 'My super interesting bio' };
createWrapper({ user });
@@ -102,27 +100,10 @@ describe('User Popover Component', () => {
expect(findWorkInformation().exists()).toBe(false);
});
- it('should show only organization if job title is not available', () => {
- const user = { ...DEFAULT_PROPS.user, organization: 'GitLab' };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('GitLab');
- });
-
- it('should show only job title if organization is not available', () => {
- const user = { ...DEFAULT_PROPS.user, jobTitle: 'Frontend Engineer' };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('Frontend Engineer');
- });
-
- it('should show organization and job title if they are both available', () => {
+ it('should show work information when it is available', () => {
const user = {
...DEFAULT_PROPS.user,
- organization: 'GitLab',
- jobTitle: 'Frontend Engineer',
+ workInformation: 'Frontend Engineer at GitLab',
};
createWrapper({ user });
@@ -130,17 +111,17 @@ describe('User Popover Component', () => {
expect(findWorkInformation().text()).toBe('Frontend Engineer at GitLab');
});
- it('should display bio and job info in separate lines', () => {
+ it('should display bio and work information in separate lines', () => {
const user = {
...DEFAULT_PROPS.user,
bio: 'My super interesting bio',
- organization: 'GitLab',
+ workInformation: 'Frontend Engineer at GitLab',
};
createWrapper({ user });
expect(findBio().text()).toBe('My super interesting bio');
- expect(findWorkInformation().text()).toBe('GitLab');
+ expect(findWorkInformation().text()).toBe('Frontend Engineer at GitLab');
});
it('should not encode special characters in bio', () => {
@@ -154,40 +135,6 @@ describe('User Popover Component', () => {
expect(findBio().text()).toBe('I like <html> & CSS');
});
- it('should not encode special characters in organization', () => {
- const user = {
- ...DEFAULT_PROPS.user,
- organization: 'Me & my <funky> Company',
- };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('Me & my <funky> Company');
- });
-
- it('should not encode special characters in job title', () => {
- const user = {
- ...DEFAULT_PROPS.user,
- jobTitle: 'Manager & Team Lead',
- };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('Manager & Team Lead');
- });
-
- it('should not encode special characters when both job title and organization are set', () => {
- const user = {
- ...DEFAULT_PROPS.user,
- jobTitle: 'Manager & Team Lead',
- organization: 'Me & my <funky> Company',
- };
-
- createWrapper({ user });
-
- expect(findWorkInformation().text()).toBe('Manager & Team Lead at Me & my <funky> Company');
- });
-
it('shows icon for bio', () => {
const user = {
...DEFAULT_PROPS.user,
@@ -201,10 +148,10 @@ describe('User Popover Component', () => {
);
});
- it('shows icon for organization', () => {
+ it('shows icon for work information', () => {
const user = {
...DEFAULT_PROPS.user,
- organization: 'GitLab',
+ workInformation: 'GitLab',
};
createWrapper({ user });
diff --git a/spec/graphql/resolvers/board_lists_resolver_spec.rb b/spec/graphql/resolvers/board_lists_resolver_spec.rb
new file mode 100644
index 00000000000..5f6c440a8ed
--- /dev/null
+++ b/spec/graphql/resolvers/board_lists_resolver_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resolvers::BoardListsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:unauth_user) { create(:user) }
+ let_it_be(:project) { create(:project, creator_id: user.id, namespace: user.namespace ) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project_label) { create(:label, project: project, name: 'Development') }
+ let_it_be(:group_label) { create(:group_label, group: group, name: 'Development') }
+
+ shared_examples_for 'group and project board lists resolver' do
+ let(:board) { create(:board, resource_parent: board_parent) }
+
+ before do
+ board_parent.add_developer(user)
+ end
+
+ it 'does not create the backlog list' do
+ lists = resolve_board_lists.items
+
+ expect(lists.count).to eq 1
+ expect(lists[0].list_type).to eq 'closed'
+ end
+
+ context 'with unauthorized user' do
+ it 'raises an error' do
+ expect do
+ resolve_board_lists(current_user: unauth_user)
+ end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when authorized' do
+ let!(:label_list) { create(:list, board: board, label: label) }
+ let!(:backlog_list) { create(:backlog_list, board: board) }
+
+ it 'returns a list of board lists' do
+ lists = resolve_board_lists.items
+
+ expect(lists.count).to eq 3
+ expect(lists.map(&:list_type)).to eq %w(backlog label closed)
+ end
+
+ context 'when another user has list preferences' do
+ before do
+ board.lists.first.update_preferences_for(guest, collapsed: true)
+ end
+
+ it 'returns the complete list of board lists for this user' do
+ lists = resolve_board_lists.items
+
+ expect(lists.count).to eq 3
+ end
+ end
+ end
+ end
+
+ describe '#resolve' do
+ context 'when project boards' do
+ let(:board_parent) { project }
+ let(:label) { project_label }
+
+ it_behaves_like 'group and project board lists resolver'
+ end
+
+ context 'when group boards' do
+ let(:board_parent) { group }
+ let(:label) { group_label }
+
+ it_behaves_like 'group and project board lists resolver'
+ end
+ end
+
+ def resolve_board_lists(args: {}, current_user: user)
+ resolve(described_class, obj: board, args: args, ctx: { current_user: current_user })
+ end
+end
diff --git a/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
new file mode 100644
index 00000000000..c06fbef53b6
--- /dev/null
+++ b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resolvers::Metrics::Dashboards::AnnotationResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ context 'user with developer access' do
+ subject(:resolve_annotations) { resolve(described_class, obj: dashboard, args: args, ctx: { current_user: current_user }) }
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:environment) { create(:environment) }
+ let_it_be(:path) { 'config/prometheus/common_metrics.yml' }
+ let(:dashboard) { PerformanceMonitoring::PrometheusDashboard.new(path: path, environment: environment) }
+ let(:args) do
+ {
+ from: 10.minutes.ago,
+ to: 5.minutes.ago
+ }
+ end
+
+ before_all do
+ environment.project.add_developer(current_user)
+ end
+
+ context 'with annotation records' do
+ let_it_be(:annotation_1) { create(:metrics_dashboard_annotation, environment: environment, starting_at: 9.minutes.ago, dashboard_path: path) }
+
+ it 'loads annotations with usage of finder class', :aggregate_failures do
+ expect_next_instance_of(::Metrics::Dashboards::AnnotationsFinder, dashboard: dashboard, params: args) do |finder|
+ expect(finder).to receive(:execute).and_return [annotation_1]
+ end
+
+ expect(resolve_annotations).to eql [annotation_1]
+ end
+
+ context 'dashboard is missing' do
+ let(:dashboard) { nil }
+
+ it 'returns empty array', :aggregate_failures do
+ expect(::Metrics::Dashboards::AnnotationsFinder).not_to receive(:new)
+
+ expect(resolve_annotations).to be_empty
+ end
+ end
+
+ context 'there are no annotations records' do
+ it 'returns empty array' do
+ allow_next_instance_of(::Metrics::Dashboards::AnnotationsFinder) do |finder|
+ allow(finder).to receive(:execute).and_return []
+ end
+
+ expect(resolve_annotations).to be_empty
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb b/spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb
index 47889126531..7146bfb441b 100644
--- a/spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb
@@ -16,7 +16,7 @@ describe Resolvers::Projects::JiraImportsResolver do
context 'when anonymous user' do
let(:current_user) { nil }
- it_behaves_like 'no jira import access'
+ it_behaves_like 'no Jira import access'
end
end
@@ -25,7 +25,7 @@ describe Resolvers::Projects::JiraImportsResolver do
project.add_guest(user)
end
- it_behaves_like 'no jira import data present'
+ it_behaves_like 'no Jira import data present'
it 'does not raise access error' do
expect do
@@ -47,14 +47,14 @@ describe Resolvers::Projects::JiraImportsResolver do
stub_feature_flags(jira_issue_import: false)
end
- it_behaves_like 'no jira import access'
+ it_behaves_like 'no Jira import access'
end
context 'when user cannot read Jira imports' do
context 'when anonymous user' do
let(:current_user) { nil }
- it_behaves_like 'no jira import access'
+ it_behaves_like 'no Jira import access'
end
end
diff --git a/spec/graphql/types/board_list_type_spec.rb b/spec/graphql/types/board_list_type_spec.rb
new file mode 100644
index 00000000000..b5c842ae884
--- /dev/null
+++ b/spec/graphql/types/board_list_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['BoardList'] do
+ it { expect(described_class.graphql_name).to eq('BoardList') }
+
+ it 'has specific fields' do
+ expected_fields = %w[id list_type position label]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/metrics/dashboard_type_spec.rb b/spec/graphql/types/metrics/dashboard_type_spec.rb
index 4795fd77537..76f2b4b8935 100644
--- a/spec/graphql/types/metrics/dashboard_type_spec.rb
+++ b/spec/graphql/types/metrics/dashboard_type_spec.rb
@@ -7,9 +7,16 @@ describe GitlabSchema.types['MetricsDashboard'] do
it 'has the expected fields' do
expected_fields = %w[
- path
- ]
+ path annotations
+ ]
expect(described_class).to have_graphql_fields(*expected_fields)
end
+
+ describe 'annotations field' do
+ subject { described_class.fields['annotations'] }
+
+ it { is_expected.to have_graphql_type(Types::Metrics::Dashboards::AnnotationType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Metrics::Dashboards::AnnotationResolver) }
+ end
end
diff --git a/spec/graphql/types/metrics/dashboards/annotation_type_spec.rb b/spec/graphql/types/metrics/dashboards/annotation_type_spec.rb
new file mode 100644
index 00000000000..2956a2512eb
--- /dev/null
+++ b/spec/graphql/types/metrics/dashboards/annotation_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe GitlabSchema.types['MetricsDashboardAnnotation'] do
+ it { expect(described_class.graphql_name).to eq('MetricsDashboardAnnotation') }
+
+ it 'has the expected fields' do
+ expected_fields = %w[
+ description id panel_id starting_at ending_at
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ it { expect(described_class).to require_graphql_authorizations(:read_metrics_dashboard_annotation) }
+end
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index 8c76ce43e95..8c9fad86a2d 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -9,7 +9,7 @@ describe GitlabSchema.types['User'] do
it 'has the expected fields' do
expected_fields = %w[
- user_permissions snippets name username avatarUrl webUrl todos
+ id user_permissions snippets name username avatarUrl webUrl todos
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/haml_lint/linter/no_plain_nodes_spec.rb b/spec/haml_lint/linter/no_plain_nodes_spec.rb
index 08deb5a4e9e..dc647467db6 100644
--- a/spec/haml_lint/linter/no_plain_nodes_spec.rb
+++ b/spec/haml_lint/linter/no_plain_nodes_spec.rb
@@ -53,4 +53,42 @@ describe HamlLint::Linter::NoPlainNodes do
it { is_expected.to report_lint count: 3 }
end
+
+ context 'does not report when a html entity' do
+ let(:haml) { '%tag &nbsp;' }
+
+ it { is_expected.not_to report_lint }
+ end
+
+ context 'does report when something that looks like a html entity' do
+ let(:haml) { '%tag &some text;' }
+
+ it { is_expected.to report_lint }
+ end
+
+ context 'does not report multiline when one or more html entities' do
+ %w(&nbsp;&gt; &#x000A9; &#187;).each do |elem|
+ let(:haml) { <<-HAML }
+ %tag
+ #{elem}
+ HAML
+
+ it elem do
+ is_expected.not_to report_lint
+ end
+ end
+ end
+
+ context 'does report multiline when one or more html entities amidst plain text' do
+ %w(&nbsp;Test Test&gt; &#x000A9;Hello &nbsp;Hello&#187;).each do |elem|
+ let(:haml) { <<-HAML }
+ %tag
+ #{elem}
+ HAML
+
+ it elem do
+ is_expected.to report_lint
+ end
+ end
+ end
end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index 152e9c84ec5..0756e0162a5 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -37,10 +37,26 @@ describe EnvironmentsHelper do
'environment-state' => environment.state,
'custom-metrics-path' => project_prometheus_metrics_path(project),
'validate-query-path' => validate_query_project_prometheus_metrics_path(project),
- 'custom-metrics-available' => 'true'
+ 'custom-metrics-available' => 'true',
+ 'alerts-endpoint' => project_prometheus_alerts_path(project, environment_id: environment.id, format: :json),
+ 'prometheus-alerts-available' => 'true'
)
end
+ context 'without read_prometheus_alerts permission' do
+ before do
+ allow(helper).to receive(:can?)
+ .with(user, :read_prometheus_alerts, project)
+ .and_return(false)
+ end
+
+ it 'returns false' do
+ expect(metrics_data).to include(
+ 'prometheus-alerts-available' => 'false'
+ )
+ end
+ end
+
context 'with metrics_setting' do
before do
create(:project_metrics_setting, project: project, external_dashboard_url: 'http://gitlab.com')
diff --git a/spec/helpers/projects/alert_management_helper_spec.rb b/spec/helpers/projects/alert_management_helper_spec.rb
new file mode 100644
index 00000000000..ee180cef692
--- /dev/null
+++ b/spec/helpers/projects/alert_management_helper_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::AlertManagementHelper do
+ include Gitlab::Routing.url_helpers
+
+ let(:project) { create(:project) }
+
+ describe '#alert_management_data' do
+ let(:setting_path) { project_settings_operations_path(project) }
+
+ let(:index_path) do
+ project_alert_management_index_path(project, format: :json)
+ end
+
+ context 'without alert_managements_setting' do
+ it 'returns frontend configuration' do
+ expect(alert_management_data(project)).to eq(
+ 'index-path' => index_path,
+ 'enable-alert-management-path' => setting_path,
+ "empty-alert-svg-path" => "/images/illustrations/alert-management-empty-state.svg"
+ )
+ end
+ end
+ end
+end
diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb
index b5b431b5818..6fdf4f5cfb4 100644
--- a/spec/helpers/snippets_helper_spec.rb
+++ b/spec/helpers/snippets_helper_spec.rb
@@ -151,35 +151,4 @@ describe SnippetsHelper do
"<input type=\"text\" readonly=\"readonly\" class=\"js-snippet-url-area snippet-embed-input form-control\" data-url=\"#{url}\" value=\"<script src=&quot;#{url}.js&quot;></script>\" autocomplete=\"off\"></input>"
end
end
-
- describe '#snippet_file_name' do
- subject { helper.snippet_file_name(snippet) }
-
- where(:snippet_type, :flag_enabled, :trait, :filename) do
- [
- [:personal_snippet, false, nil, 'foo.txt'],
- [:personal_snippet, true, nil, 'foo.txt'],
- [:personal_snippet, false, :repository, 'foo.txt'],
- [:personal_snippet, true, :repository, '.gitattributes'],
-
- [:project_snippet, false, nil, 'foo.txt'],
- [:project_snippet, true, nil, 'foo.txt'],
- [:project_snippet, false, :repository, 'foo.txt'],
- [:project_snippet, true, :repository, '.gitattributes']
- ]
- end
-
- with_them do
- let(:snippet) { create(snippet_type, trait, file_name: 'foo.txt') }
-
- before do
- allow(helper).to receive(:current_user).and_return(snippet.author)
- stub_feature_flags(version_snippets: flag_enabled)
- end
-
- it 'returns the correct filename' do
- expect(subject).to eq filename
- end
- end
- end
end
diff --git a/spec/initializers/lograge_spec.rb b/spec/initializers/lograge_spec.rb
index 0068b894474..48acdac74ac 100644
--- a/spec/initializers/lograge_spec.rb
+++ b/spec/initializers/lograge_spec.rb
@@ -64,11 +64,11 @@ describe 'lograge', type: :request do
)
expect(Lograge.formatter).to receive(:call)
- .with(a_hash_including(cpu_s: 0.1111115))
+ .with(a_hash_including(cpu_s: 0.11))
.and_call_original
expect(Lograge.logger).to receive(:send)
- .with(anything, include('"cpu_s":0.1111115'))
+ .with(anything, include('"cpu_s":0.11'))
.and_call_original
subject
diff --git a/spec/javascripts/pipelines/graph/graph_component_spec.js b/spec/javascripts/pipelines/graph/graph_component_spec.js
deleted file mode 100644
index d2c10362ba3..00000000000
--- a/spec/javascripts/pipelines/graph/graph_component_spec.js
+++ /dev/null
@@ -1,274 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import PipelineStore from '~/pipelines/stores/pipeline_store';
-import graphComponent from '~/pipelines/components/graph/graph_component.vue';
-import graphJSON from './mock_data';
-import linkedPipelineJSON from '../linked_pipelines_mock.json';
-import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
-
-describe('graph component', () => {
- const GraphComponent = Vue.extend(graphComponent);
- const store = new PipelineStore();
- store.storePipeline(linkedPipelineJSON);
- const mediator = new PipelinesMediator({ endpoint: '' });
-
- let component;
-
- beforeEach(() => {
- setFixtures(`
- <div class="layout-page"></div>
- `);
- });
-
- afterEach(() => {
- component.$destroy();
- });
-
- describe('while is loading', () => {
- it('should render a loading icon', () => {
- component = mountComponent(GraphComponent, {
- isLoading: true,
- pipeline: {},
- mediator,
- });
-
- expect(component.$el.querySelector('.loading-icon')).toBeDefined();
- });
- });
-
- describe('with data', () => {
- it('should render the graph', () => {
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: graphJSON,
- mediator,
- });
-
- expect(component.$el.classList.contains('js-pipeline-graph')).toEqual(true);
-
- expect(
- component.$el.querySelector('.stage-column:first-child').classList.contains('no-margin'),
- ).toEqual(true);
-
- expect(
- component.$el.querySelector('.stage-column:nth-child(2)').classList.contains('left-margin'),
- ).toEqual(true);
-
- expect(
- component.$el
- .querySelector('.stage-column:nth-child(2) .build:nth-child(1)')
- .classList.contains('left-connector'),
- ).toEqual(true);
-
- expect(component.$el.querySelector('loading-icon')).toBe(null);
-
- expect(component.$el.querySelector('.stage-column-list')).toBeDefined();
- });
- });
-
- describe('when linked pipelines are present', () => {
- beforeEach(() => {
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- });
- });
-
- describe('rendered output', () => {
- it('should include the pipelines graph', () => {
- expect(component.$el.classList.contains('js-pipeline-graph')).toEqual(true);
- });
-
- it('should not include the loading icon', () => {
- expect(component.$el.querySelector('.fa-spinner')).toBeNull();
- });
-
- it('should include the stage column list', () => {
- expect(component.$el.querySelector('.stage-column-list')).not.toBeNull();
- });
-
- it('should include the no-margin class on the first child', () => {
- const firstStageColumnElement = component.$el.querySelector(
- '.stage-column-list .stage-column',
- );
-
- expect(firstStageColumnElement.classList.contains('no-margin')).toEqual(true);
- });
-
- it('should include the has-only-one-job class on the first child', () => {
- const firstStageColumnElement = component.$el.querySelector(
- '.stage-column-list .stage-column',
- );
-
- expect(firstStageColumnElement.classList.contains('has-only-one-job')).toEqual(true);
- });
-
- it('should include the left-margin class on the second child', () => {
- const firstStageColumnElement = component.$el.querySelector(
- '.stage-column-list .stage-column:last-child',
- );
-
- expect(firstStageColumnElement.classList.contains('left-margin')).toEqual(true);
- });
-
- it('should include the js-has-linked-pipelines flag', () => {
- expect(component.$el.querySelector('.js-has-linked-pipelines')).not.toBeNull();
- });
- });
-
- describe('computeds and methods', () => {
- describe('capitalizeStageName', () => {
- it('it capitalizes the stage name', () => {
- expect(component.capitalizeStageName('mystage')).toBe('Mystage');
- });
- });
-
- describe('stageConnectorClass', () => {
- it('it returns left-margin when there is a triggerer', () => {
- expect(component.stageConnectorClass(0, { groups: ['job'] })).toBe('no-margin');
- });
- });
- });
-
- describe('linked pipelines components', () => {
- beforeEach(() => {
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- });
- });
-
- it('should render an upstream pipelines column', () => {
- expect(component.$el.querySelector('.linked-pipelines-column')).not.toBeNull();
- expect(component.$el.innerHTML).toContain('Upstream');
- });
-
- it('should render a downstream pipelines column', () => {
- expect(component.$el.querySelector('.linked-pipelines-column')).not.toBeNull();
- expect(component.$el.innerHTML).toContain('Downstream');
- });
-
- describe('triggered by', () => {
- describe('on click', () => {
- it('should emit `onClickTriggeredBy` when triggered by linked pipeline is clicked', () => {
- spyOn(component, '$emit');
-
- component.$el.querySelector('#js-linked-pipeline-12').click();
-
- expect(component.$emit).toHaveBeenCalledWith(
- 'onClickTriggeredBy',
- component.pipeline.triggered_by[0],
- );
- });
- });
-
- describe('with expanded pipeline', () => {
- it('should render expanded pipeline', done => {
- // expand the pipeline
- store.state.pipeline.triggered_by[0].isExpanded = true;
-
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- });
-
- Vue.nextTick()
- .then(() => {
- expect(component.$el.querySelector('.js-upstream-pipeline-12')).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
- });
- });
-
- describe('triggered', () => {
- describe('on click', () => {
- it('should emit `onClickTriggered`', () => {
- spyOn(component, '$emit');
- spyOn(component, 'calculateMarginTop').and.callFake(() => '16px');
-
- component.$el.querySelector('#js-linked-pipeline-34993051').click();
-
- expect(component.$emit).toHaveBeenCalledWith(
- 'onClickTriggered',
- component.pipeline.triggered[0],
- );
- });
- });
-
- describe('with expanded pipeline', () => {
- it('should render expanded pipeline', done => {
- // expand the pipeline
- store.state.pipeline.triggered[0].isExpanded = true;
-
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: store.state.pipeline,
- mediator,
- });
-
- Vue.nextTick()
- .then(() => {
- expect(
- component.$el.querySelector('.js-downstream-pipeline-34993051'),
- ).not.toBeNull();
- })
- .then(done)
- .catch(done.fail);
- });
- });
- });
- });
- });
-
- describe('when linked pipelines are not present', () => {
- beforeEach(() => {
- const pipeline = Object.assign(linkedPipelineJSON, { triggered: null, triggered_by: null });
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline,
- mediator,
- });
- });
-
- describe('rendered output', () => {
- it('should include the first column with a no margin', () => {
- const firstColumn = component.$el.querySelector('.stage-column:first-child');
-
- expect(firstColumn.classList.contains('no-margin')).toEqual(true);
- });
-
- it('should not render a linked pipelines column', () => {
- expect(component.$el.querySelector('.linked-pipelines-column')).toBeNull();
- });
- });
-
- describe('stageConnectorClass', () => {
- it('it returns left-margin when no triggerer and there is one job', () => {
- expect(component.stageConnectorClass(0, { groups: ['job'] })).toBe('no-margin');
- });
-
- it('it returns left-margin when no triggerer and not the first stage', () => {
- expect(component.stageConnectorClass(99, { groups: ['job'] })).toBe('left-margin');
- });
- });
- });
-
- describe('capitalizeStageName', () => {
- it('capitalizes and escapes stage name', () => {
- component = mountComponent(GraphComponent, {
- isLoading: false,
- pipeline: graphJSON,
- mediator,
- });
-
- expect(
- component.$el.querySelector('.stage-column:nth-child(2) .stage-name').textContent.trim(),
- ).toEqual('Deploy &lt;img src=x onerror=alert(document.domain)&gt;');
- });
- });
-});
diff --git a/spec/javascripts/pipelines/graph/job_name_component_spec.js b/spec/javascripts/pipelines/graph/job_name_component_spec.js
deleted file mode 100644
index c861d452dd0..00000000000
--- a/spec/javascripts/pipelines/graph/job_name_component_spec.js
+++ /dev/null
@@ -1,27 +0,0 @@
-import Vue from 'vue';
-import jobNameComponent from '~/pipelines/components/graph/job_name_component.vue';
-
-describe('job name component', () => {
- let component;
-
- beforeEach(() => {
- const JobNameComponent = Vue.extend(jobNameComponent);
- component = new JobNameComponent({
- propsData: {
- name: 'foo',
- status: {
- icon: 'status_success',
- },
- },
- }).$mount();
- });
-
- it('should render the provided name', () => {
- expect(component.$el.querySelector('.ci-status-text').textContent.trim()).toEqual('foo');
- });
-
- it('should render an icon with the provided status', () => {
- expect(component.$el.querySelector('.ci-status-icon-success')).toBeDefined();
- expect(component.$el.querySelector('.ci-status-icon-success svg')).toBeDefined();
- });
-});
diff --git a/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js b/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js
deleted file mode 100644
index 613ab2a906f..00000000000
--- a/spec/javascripts/pipelines/graph/linked_pipelines_column_spec.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
-import mockData from './linked_pipelines_mock_data';
-
-describe('Linked Pipelines Column', () => {
- const Component = Vue.extend(LinkedPipelinesColumn);
- const props = {
- columnTitle: 'Upstream',
- linkedPipelines: mockData.triggered,
- graphPosition: 'right',
- projectId: 19,
- };
- let vm;
-
- beforeEach(() => {
- vm = mountComponent(Component, props);
- });
-
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders the pipeline orientation', () => {
- const titleElement = vm.$el.querySelector('.linked-pipelines-column-title');
-
- expect(titleElement.innerText).toContain(props.columnTitle);
- });
-
- it('has the correct number of linked pipeline child components', () => {
- expect(vm.$children.length).toBe(props.linkedPipelines.length);
- });
-
- it('renders the correct number of linked pipelines', () => {
- const linkedPipelineElements = vm.$el.querySelectorAll('.linked-pipeline');
-
- expect(linkedPipelineElements.length).toBe(props.linkedPipelines.length);
- });
-
- it('renders cross project triangle when column is upstream', () => {
- expect(vm.$el.querySelector('.cross-project-triangle')).toBeDefined();
- });
-});
diff --git a/spec/javascripts/pipelines/graph/linked_pipelines_mock_data.js b/spec/javascripts/pipelines/graph/linked_pipelines_mock_data.js
deleted file mode 100644
index 3079d5e4e68..00000000000
--- a/spec/javascripts/pipelines/graph/linked_pipelines_mock_data.js
+++ /dev/null
@@ -1,3 +0,0 @@
-import mockData from '../../../frontend/pipelines/graph/linked_pipelines_mock_data';
-
-export default mockData;
diff --git a/spec/javascripts/pipelines/graph/stage_column_component_spec.js b/spec/javascripts/pipelines/graph/stage_column_component_spec.js
deleted file mode 100644
index dbfeeae43fe..00000000000
--- a/spec/javascripts/pipelines/graph/stage_column_component_spec.js
+++ /dev/null
@@ -1,122 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'spec/helpers/vue_mount_component_helper';
-import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
-
-describe('stage column component', () => {
- let component;
- const StageColumnComponent = Vue.extend(stageColumnComponent);
-
- const mockJob = {
- id: 4250,
- name: 'test',
- status: {
- icon: 'status_success',
- text: 'passed',
- label: 'passed',
- group: 'success',
- details_path: '/root/ci-mock/builds/4250',
- action: {
- icon: 'retry',
- title: 'Retry',
- path: '/root/ci-mock/builds/4250/retry',
- method: 'post',
- },
- },
- };
-
- beforeEach(() => {
- const mockGroups = [];
- for (let i = 0; i < 3; i += 1) {
- const mockedJob = Object.assign({}, mockJob);
- mockedJob.id += i;
- mockGroups.push(mockedJob);
- }
-
- component = mountComponent(StageColumnComponent, {
- title: 'foo',
- groups: mockGroups,
- hasTriggeredBy: false,
- });
- });
-
- it('should render provided title', () => {
- expect(component.$el.querySelector('.stage-name').textContent.trim()).toEqual('foo');
- });
-
- it('should render the provided groups', () => {
- expect(component.$el.querySelectorAll('.builds-container > ul > li').length).toEqual(3);
- });
-
- describe('jobId', () => {
- it('escapes job name', () => {
- component = mountComponent(StageColumnComponent, {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- });
-
- expect(component.$el.querySelector('.builds-container li').getAttribute('id')).toEqual(
- 'ci-badge-&lt;img src=x onerror=alert(document.domain)&gt;',
- );
- });
- });
-
- describe('with action', () => {
- it('renders action button', () => {
- component = mountComponent(StageColumnComponent, {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- action: {
- icon: 'play',
- title: 'Play all',
- path: 'action',
- },
- });
-
- expect(component.$el.querySelector('.js-stage-action')).not.toBeNull();
- });
- });
-
- describe('without action', () => {
- it('does not render action button', () => {
- component = mountComponent(StageColumnComponent, {
- groups: [
- {
- id: 4259,
- name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
- },
- ],
- title: 'test',
- hasTriggeredBy: false,
- });
-
- expect(component.$el.querySelector('.js-stage-action')).toBeNull();
- });
- });
-});
diff --git a/spec/javascripts/reports/components/grouped_test_reports_app_spec.js b/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
index bafc47c952a..9d7150d95cd 100644
--- a/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
+++ b/spec/javascripts/reports/components/grouped_test_reports_app_spec.js
@@ -4,6 +4,7 @@ import axios from '~/lib/utils/axios_utils';
import state from '~/reports/store/state';
import component from '~/reports/components/grouped_test_reports_app.vue';
import mountComponent from '../../helpers/vue_mount_component_helper';
+import { failedReport } from '../mock_data/mock_data';
import newFailedTestReports from '../mock_data/new_failures_report.json';
import newErrorsTestReports from '../mock_data/new_errors_report.json';
import successTestReports from '../mock_data/no_failures_report.json';
@@ -199,6 +200,26 @@ describe('Grouped Test Reports App', () => {
});
});
+ describe('with a report that failed to load', () => {
+ beforeEach(() => {
+ mock.onGet('test_results.json').reply(200, failedReport, {});
+ vm = mountComponent(Component, {
+ endpoint: 'test_results.json',
+ });
+ });
+
+ it('renders an error status for the report', done => {
+ setTimeout(() => {
+ const { name } = failedReport.suites[0];
+
+ expect(vm.$el.querySelector('.report-block-list-issue').textContent).toContain(
+ `An error occurred while loading ${name} results`,
+ );
+ done();
+ });
+ });
+ });
+
describe('with error', () => {
beforeEach(() => {
mock.onGet('test_results.json').reply(500, {}, {});
diff --git a/spec/javascripts/reports/mock_data/mock_data.js b/spec/javascripts/reports/mock_data/mock_data.js
index 0d90253bad2..3caaab2fd79 100644
--- a/spec/javascripts/reports/mock_data/mock_data.js
+++ b/spec/javascripts/reports/mock_data/mock_data.js
@@ -1,4 +1,3 @@
-// eslint-disable-next-line import/prefer-default-export
export const issue = {
result: 'failure',
name: 'Test#sum when a is 1 and b is 2 returns summary',
@@ -6,3 +5,20 @@ export const issue = {
system_output:
"Failure/Error: is_expected.to eq(3)\n\n expected: 3\n got: -1\n\n (compared using ==)\n./spec/test_spec.rb:12:in `block (4 levels) in \u003ctop (required)\u003e'",
};
+
+export const failedReport = {
+ summary: { total: 11, resolved: 0, errored: 2, failed: 0 },
+ suites: [
+ {
+ name: 'rspec:pg',
+ status: 'error',
+ summary: { total: 0, resolved: 0, errored: 0, failed: 0 },
+ new_failures: [],
+ resolved_failures: [],
+ existing_failures: [],
+ new_errors: [],
+ resolved_errors: [],
+ existing_errors: [],
+ },
+ ],
+};
diff --git a/spec/lib/api/entities/project_import_failed_relation_spec.rb b/spec/lib/api/entities/project_import_failed_relation_spec.rb
new file mode 100644
index 00000000000..f8330713480
--- /dev/null
+++ b/spec/lib/api/entities/project_import_failed_relation_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Entities::ProjectImportFailedRelation do
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ let(:import_failure) { build(:import_failure) }
+ let(:entity) { described_class.new(import_failure) }
+
+ it 'includes basic fields', :aggregate_failures do
+ expect(subject).to eq(
+ id: import_failure.id,
+ created_at: import_failure.created_at,
+ exception_class: import_failure.exception_class,
+ exception_message: import_failure.exception_message,
+ relation_name: import_failure.relation_key,
+ source: import_failure.source
+ )
+ end
+ end
+end
diff --git a/spec/lib/api/entities/project_import_status_spec.rb b/spec/lib/api/entities/project_import_status_spec.rb
new file mode 100644
index 00000000000..650f9c156a3
--- /dev/null
+++ b/spec/lib/api/entities/project_import_status_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Entities::ProjectImportStatus do
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ let(:correlation_id) { 'cid' }
+
+ context 'when import has not finished yet' do
+ let(:project) { create(:project, :import_scheduled, import_correlation_id: correlation_id) }
+ let(:entity) { described_class.new(project) }
+
+ it 'includes basic fields and no failures', :aggregate_failures do
+ expect(subject[:import_status]).to eq('scheduled')
+ expect(subject[:correlation_id]).to eq(correlation_id)
+ expect(subject[:import_error]).to be_nil
+ expect(subject[:failed_relations]).to eq([])
+ end
+ end
+
+ context 'when import has finished with failed relations' do
+ let(:project) { create(:project, :import_finished, import_correlation_id: correlation_id) }
+ let(:entity) { described_class.new(project) }
+
+ it 'includes basic fields with failed relations', :aggregate_failures do
+ create(:import_failure, :hard_failure, project: project, correlation_id_value: correlation_id)
+
+ expect(subject[:import_status]).to eq('finished')
+ expect(subject[:correlation_id]).to eq(correlation_id)
+ expect(subject[:import_error]).to be_nil
+ expect(subject[:failed_relations]).not_to be_empty
+ end
+ end
+
+ context 'when import has failed' do
+ let(:project) { create(:project, :import_failed, import_correlation_id: correlation_id, import_last_error: 'error') }
+ let(:entity) { described_class.new(project) }
+
+ it 'includes basic fields with import error', :aggregate_failures do
+ expect(subject[:import_status]).to eq('failed')
+ expect(subject[:correlation_id]).to eq(correlation_id)
+ expect(subject[:import_error]).to eq('error')
+ expect(subject[:failed_relations]).to eq([])
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/entities/snippet_spec.rb b/spec/lib/api/entities/snippet_spec.rb
new file mode 100644
index 00000000000..a6cc96838e1
--- /dev/null
+++ b/spec/lib/api/entities/snippet_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::API::Entities::Snippet do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: user ) }
+ let_it_be(:project_snippet) { create(:project_snippet, :repository, author: user) }
+
+ let(:entity) { described_class.new(snippet) }
+
+ subject { entity.as_json }
+
+ shared_examples 'common attributes' do
+ it { expect(subject[:id]).to eq snippet.id }
+ it { expect(subject[:title]).to eq snippet.title }
+ it { expect(subject[:description]).to eq snippet.description }
+ it { expect(subject[:updated_at]).to eq snippet.updated_at }
+ it { expect(subject[:created_at]).to eq snippet.created_at }
+ it { expect(subject[:project_id]).to eq snippet.project_id }
+ it { expect(subject[:visibility]).to eq snippet.visibility }
+ it { expect(subject).to include(:author) }
+
+ describe 'file_name' do
+ it 'returns attribute from repository' do
+ expect(subject[:file_name]).to eq snippet.blobs.first.path
+ end
+
+ context 'when feature flag :version_snippets is disabled' do
+ it 'returns attribute from db' do
+ stub_feature_flags(version_snippets: false)
+
+ expect(subject[:file_name]).to eq snippet.file_name
+ end
+ end
+
+ context 'when repository is empty' do
+ it 'returns attribute from db' do
+ allow(snippet.repository).to receive(:empty?).and_return(true)
+
+ expect(subject[:file_name]).to eq snippet.file_name
+ end
+ end
+ end
+
+ describe 'ssh_url_to_repo' do
+ it 'returns attribute' do
+ expect(subject[:ssh_url_to_repo]).to eq snippet.ssh_url_to_repo
+ end
+
+ context 'when feature flag :version_snippets is disabled' do
+ it 'does not include attribute' do
+ stub_feature_flags(version_snippets: false)
+
+ expect(subject).not_to include(:ssh_url_to_repo)
+ end
+ end
+
+ context 'when repository does not exist' do
+ it 'does not include attribute' do
+ allow(snippet).to receive(:repository_exists?).and_return(false)
+
+ expect(subject).not_to include(:ssh_url_to_repo)
+ end
+ end
+ end
+
+ describe 'http_url_to_repo' do
+ it 'returns attribute' do
+ expect(subject[:http_url_to_repo]).to eq snippet.http_url_to_repo
+ end
+
+ context 'when feature flag :version_snippets is disabled' do
+ it 'does not include attribute' do
+ stub_feature_flags(version_snippets: false)
+
+ expect(subject).not_to include(:http_url_to_repo)
+ end
+ end
+
+ context 'when repository does not exist' do
+ it 'does not include attribute' do
+ allow(snippet).to receive(:repository_exists?).and_return(false)
+
+ expect(subject).not_to include(:http_url_to_repo)
+ end
+ end
+ end
+ end
+
+ context 'with PersonalSnippet' do
+ let(:snippet) { personal_snippet }
+
+ it_behaves_like 'common attributes'
+
+ it 'returns snippet web_url attribute' do
+ expect(subject[:web_url]).to match("/snippets/#{snippet.id}")
+ end
+
+ it 'returns snippet raw_url attribute' do
+ expect(subject[:raw_url]).to match("/snippets/#{snippet.id}/raw")
+ end
+ end
+
+ context 'with ProjectSnippet' do
+ let(:snippet) { project_snippet }
+
+ it_behaves_like 'common attributes'
+
+ it 'returns snippet web_url attribute' do
+ expect(subject[:web_url]).to match("#{snippet.project.full_path}/snippets/#{snippet.id}")
+ end
+
+ it 'returns snippet raw_url attribute' do
+ expect(subject[:raw_url]).to match("#{snippet.project.full_path}/snippets/#{snippet.id}/raw")
+ end
+ end
+end
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
new file mode 100644
index 00000000000..20524b197e0
--- /dev/null
+++ b/spec/lib/api/entities/user_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Entities::User do
+ let(:user) { create(:user) }
+ let(:current_user) { create(:user) }
+
+ subject { described_class.new(user, current_user: current_user).as_json }
+
+ it 'exposes correct attributes' do
+ expect(subject).to include(:bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :work_information)
+ end
+
+ it 'exposes created_at if the current user can read the user profile' do
+ allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, user).and_return(true)
+
+ expect(subject).to include(:created_at)
+ end
+
+ it 'does not expose created_at if the current user cannot read the user profile' do
+ allow(Ability).to receive(:allowed?).with(current_user, :read_user_profile, user).and_return(false)
+
+ expect(subject).not_to include(:created_at)
+ end
+end
diff --git a/spec/lib/api/validations/validators/limit_spec.rb b/spec/lib/api/validations/validators/limit_spec.rb
new file mode 100644
index 00000000000..600f74e1fb2
--- /dev/null
+++ b/spec/lib/api/validations/validators/limit_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Validations::Validators::Limit do
+ include ApiValidatorsHelpers
+
+ subject do
+ described_class.new(['test'], 255, false, scope.new)
+ end
+
+ context 'valid limit param' do
+ it 'does not raise a validation error' do
+ expect_no_validation_error('test' => '123-456')
+ expect_no_validation_error('test' => '00000000-ffff-0000-ffff-000000000000')
+ expect_no_validation_error('test' => "#{'a' * 255}")
+ end
+ end
+
+ context 'longer than limit param' do
+ it 'raises a validation error' do
+ expect_validation_error('test' => "#{'a' * 256}")
+ end
+ end
+end
diff --git a/spec/lib/banzai/pipeline_spec.rb b/spec/lib/banzai/pipeline_spec.rb
new file mode 100644
index 00000000000..eeff7287ff5
--- /dev/null
+++ b/spec/lib/banzai/pipeline_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Pipeline do
+ describe '.[]' do
+ subject { described_class[name] }
+
+ shared_examples 'error' do |exception, message|
+ it do
+ expect { subject }.to raise_error(exception, message)
+ end
+ end
+
+ context 'for nil' do
+ let(:name) { nil }
+
+ it { is_expected.to eq(Banzai::Pipeline::FullPipeline) }
+ end
+
+ context 'for symbols' do
+ context 'when known' do
+ let(:name) { :full }
+
+ it { is_expected.to eq(Banzai::Pipeline::FullPipeline) }
+ end
+
+ context 'when unknown' do
+ let(:name) { :unknown }
+
+ it_behaves_like 'error', NameError,
+ 'uninitialized constant Banzai::Pipeline::UnknownPipeline'
+ end
+ end
+
+ context 'for classes' do
+ let(:name) { klass }
+
+ context 'subclassing Banzai::Pipeline::BasePipeline' do
+ let(:klass) { Class.new(Banzai::Pipeline::BasePipeline) }
+
+ it { is_expected.to eq(klass) }
+ end
+
+ context 'subclassing other types' do
+ let(:klass) { Class.new(Banzai::RenderContext) }
+
+ before do
+ stub_const('Foo', klass)
+ end
+
+ it_behaves_like 'error', ArgumentError,
+ 'unsupported pipeline name Foo (Class)'
+ end
+ end
+
+ context 'for other types' do
+ let(:name) { 'label' }
+
+ it_behaves_like 'error', ArgumentError,
+ 'unsupported pipeline name "label" (String)'
+ end
+ end
+end
diff --git a/spec/lib/csv_builder_spec.rb b/spec/lib/csv_builder_spec.rb
new file mode 100644
index 00000000000..0d5e2b81b16
--- /dev/null
+++ b/spec/lib/csv_builder_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe CsvBuilder do
+ let(:object) { double(question: :answer) }
+ let(:fake_relation) { FakeRelation.new([object]) }
+ let(:subject) { described_class.new(fake_relation, 'Q & A' => :question, 'Reversed' => -> (o) { o.question.to_s.reverse }) }
+ let(:csv_data) { subject.render }
+
+ before do
+ stub_const('FakeRelation', Array)
+
+ FakeRelation.class_eval do
+ def find_each(&block)
+ each(&block)
+ end
+ end
+ end
+
+ it 'generates a csv' do
+ expect(csv_data.scan(/(,|\n)/).join).to include ",\n,"
+ end
+
+ it 'uses a temporary file to reduce memory allocation' do
+ expect(CSV).to receive(:new).with(instance_of(Tempfile)).and_call_original
+
+ subject.render
+ end
+
+ it 'counts the number of rows' do
+ subject.render
+
+ expect(subject.rows_written).to eq 1
+ end
+
+ describe 'rows_expected' do
+ it 'uses rows_written if CSV rendered successfully' do
+ subject.render
+
+ expect(fake_relation).not_to receive(:count)
+ expect(subject.rows_expected).to eq 1
+ end
+
+ it 'falls back to calling .count before rendering begins' do
+ expect(subject.rows_expected).to eq 1
+ end
+ end
+
+ describe 'truncation' do
+ let(:big_object) { double(question: 'Long' * 1024) }
+ let(:row_size) { big_object.question.length * 2 }
+ let(:fake_relation) { FakeRelation.new([big_object, big_object, big_object]) }
+
+ it 'occurs after given number of bytes' do
+ expect(subject.render(row_size * 2).length).to be_between(row_size * 2, row_size * 3)
+ expect(subject).to be_truncated
+ expect(subject.rows_written).to eq 2
+ end
+
+ it 'is ignored by default' do
+ expect(subject.render.length).to be > row_size * 3
+ expect(subject.rows_written).to eq 3
+ end
+
+ it 'causes rows_expected to fall back to .count' do
+ subject.render(0)
+
+ expect(fake_relation).to receive(:count).and_call_original
+ expect(subject.rows_expected).to eq 3
+ end
+ end
+
+ it 'avoids loading all data in a single query' do
+ expect(fake_relation).to receive(:find_each)
+
+ subject.render
+ end
+
+ it 'uses hash keys as headers' do
+ expect(csv_data).to start_with 'Q & A'
+ end
+
+ it 'gets data by calling method provided as hash value' do
+ expect(csv_data).to include 'answer'
+ end
+
+ it 'allows lamdas to look up more complicated data' do
+ expect(csv_data).to include 'rewsna'
+ end
+
+ describe 'excel sanitization' do
+ let(:dangerous_title) { double(title: "=cmd|' /C calc'!A0 title", description: "*safe_desc") }
+ let(:dangerous_desc) { double(title: "*safe_title", description: "=cmd|' /C calc'!A0 desc") }
+ let(:fake_relation) { FakeRelation.new([dangerous_title, dangerous_desc]) }
+ let(:subject) { described_class.new(fake_relation, 'Title' => 'title', 'Description' => 'description') }
+ let(:csv_data) { subject.render }
+
+ it 'sanitizes dangerous characters at the beginning of a column' do
+ expect(csv_data).to include "'=cmd|' /C calc'!A0 title"
+ expect(csv_data).to include "'=cmd|' /C calc'!A0 desc"
+ end
+
+ it 'does not sanitize safe symbols at the beginning of a column' do
+ expect(csv_data).not_to include "'*safe_desc"
+ expect(csv_data).not_to include "'*safe_title"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
index 7dae28f72a5..09a4d4444eb 100644
--- a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
@@ -90,7 +90,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema:
end
end
- context 'with jira service' do
+ context 'with Jira service' do
let!(:service) do
services.create(id: 10, type: 'JiraService', title: nil, properties: jira_properties.to_json, category: 'issue_tracker')
end
@@ -202,7 +202,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema:
end
end
- context 'with jira service which has data fields record inconsistent with properties field' do
+ context 'with Jira service which has data fields record inconsistent with properties field' do
let!(:service) do
services.create(id: 16, type: 'CustomIssueTrackerService', description: 'Existing description', properties: jira_properties.to_json, category: 'issue_tracker').tap do |service|
JiraTrackerData.create!(service_id: service.id, url: 'http://other_jira_url')
@@ -241,7 +241,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema:
end
end
- context 'jira service with empty properties' do
+ context 'Jira service with empty properties' do
let!(:service) do
services.create(id: 18, type: 'JiraService', properties: '', category: 'issue_tracker')
end
@@ -253,7 +253,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema:
end
end
- context 'jira service with nil properties' do
+ context 'Jira service with nil properties' do
let!(:service) do
services.create(id: 18, type: 'JiraService', properties: nil, category: 'issue_tracker')
end
@@ -265,7 +265,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema:
end
end
- context 'jira service with invalid properties' do
+ context 'Jira service with invalid properties' do
let!(:service) do
services.create(id: 18, type: 'JiraService', properties: 'invalid data', category: 'issue_tracker')
end
@@ -277,7 +277,7 @@ describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema:
end
end
- context 'with jira service with invalid properties, valid jira service and valid bugzilla service' do
+ context 'with Jira service with invalid properties, valid Jira service and valid bugzilla service' do
let!(:jira_service_invalid) do
services.create(id: 19, title: 'invalid - title', description: 'invalid - description', type: 'JiraService', properties: 'invalid data', category: 'issue_tracker')
end
diff --git a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
index ff8b9dd1005..d4f52a11ce7 100644
--- a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
+++ b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
@@ -79,7 +79,7 @@ describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, s
context 'migrate commit mentions' do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
+ let(:commit) { Commit.new(RepoHelpers.sample_commit, project) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:note1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions) }
diff --git a/spec/lib/gitlab/chat/responder/mattermost_spec.rb b/spec/lib/gitlab/chat/responder/mattermost_spec.rb
new file mode 100644
index 00000000000..f3480dfef06
--- /dev/null
+++ b/spec/lib/gitlab/chat/responder/mattermost_spec.rb
@@ -0,0 +1,117 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Chat::Responder::Mattermost do
+ let(:chat_name) { create(:chat_name, chat_id: 'U123') }
+
+ let(:pipeline) do
+ pipeline = create(:ci_pipeline)
+
+ pipeline.create_chat_data!(
+ response_url: 'http://example.com',
+ chat_name_id: chat_name.id
+ )
+
+ pipeline
+ end
+
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+ let(:responder) { described_class.new(build) }
+
+ describe '#send_response' do
+ it 'sends a response back to Slack' do
+ expect(Gitlab::HTTP).to receive(:post).with(
+ 'http://example.com',
+ { headers: { 'Content-Type': 'application/json' }, body: 'hello'.to_json }
+ )
+
+ responder.send_response('hello')
+ end
+ end
+
+ describe '#success' do
+ it 'returns the output for a successful build' do
+ expect(responder)
+ .to receive(:send_response)
+ .with(
+ hash_including(
+ response_type: :in_channel,
+ attachments: array_including(
+ a_hash_including(
+ text: /#{pipeline.chat_data.chat_name.user.name}.*completed successfully/,
+ fields: array_including(
+ a_hash_including(value: /##{build.id}/),
+ a_hash_including(value: build.name),
+ a_hash_including(value: "```shell\nscript output\n```")
+ )
+ )
+ )
+ )
+ )
+
+ responder.success('script output')
+ end
+
+ it 'limits the output to a fixed size' do
+ expect(responder)
+ .to receive(:send_response)
+ .with(
+ hash_including(
+ response_type: :in_channel,
+ attachments: array_including(
+ a_hash_including(
+ fields: array_including(
+ a_hash_including(value: /The output is too large/)
+ )
+ )
+ )
+ )
+ )
+
+ responder.success('a' * 4000)
+ end
+
+ it 'does not send a response if the output is empty' do
+ expect(responder).not_to receive(:send_response)
+
+ responder.success('')
+ end
+ end
+
+ describe '#failure' do
+ it 'returns the output for a failed build' do
+ expect(responder)
+ .to receive(:send_response)
+ .with(
+ hash_including(
+ response_type: :in_channel,
+ attachments: array_including(
+ a_hash_including(
+ text: /#{pipeline.chat_data.chat_name.user.name}.*failed/,
+ fields: array_including(
+ a_hash_including(value: /##{build.id}/),
+ a_hash_including(value: build.name)
+ )
+ )
+ )
+ )
+ )
+
+ responder.failure
+ end
+ end
+
+ describe '#scheduled_output' do
+ it 'returns the output for a scheduled build' do
+ output = responder.scheduled_output
+
+ expect(output).to match(
+ hash_including(
+ response_type: :ephemeral,
+ text: /##{build.id}/
+ )
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
index 513a9b8f2b4..53469159110 100644
--- a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
@@ -123,28 +123,6 @@ describe Gitlab::Ci::Config::Entry::Artifacts do
end
end
end
-
- context 'when feature flag :ci_expose_arbitrary_artifacts_in_mr is disabled' do
- before do
- stub_feature_flags(ci_expose_arbitrary_artifacts_in_mr: false)
- end
-
- context 'when syntax is correct' do
- let(:config) { { expose_as: 'Test results', paths: ['test.txt'] } }
-
- it 'is valid' do
- expect(entry.errors).to be_empty
- end
- end
-
- context 'when syntax for :expose_as is incorrect' do
- let(:config) { { paths: %w[results.txt], expose_as: '' } }
-
- it 'is valid' do
- expect(entry.errors).to be_empty
- end
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb
new file mode 100644
index 00000000000..f2897708b08
--- /dev/null
+++ b/spec/lib/gitlab/ci/jwt_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Jwt do
+ let(:namespace) { build_stubbed(:namespace) }
+ let(:project) { build_stubbed(:project, namespace: namespace) }
+ let(:user) { build_stubbed(:user) }
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'auto-deploy-2020-03-19') }
+ let(:build) do
+ build_stubbed(
+ :ci_build,
+ project: project,
+ user: user,
+ pipeline: pipeline
+ )
+ end
+
+ describe '#payload' do
+ subject(:payload) { described_class.new(build, ttl: 30).payload }
+
+ it 'has correct values for the standard JWT attributes' do
+ Timecop.freeze do
+ now = Time.now.to_i
+
+ aggregate_failures do
+ expect(payload[:iss]).to eq(Settings.gitlab.host)
+ expect(payload[:iat]).to eq(now)
+ expect(payload[:exp]).to eq(now + 30)
+ expect(payload[:sub]).to eq("job_#{build.id}")
+ end
+ end
+ end
+
+ it 'has correct values for the custom attributes' do
+ aggregate_failures do
+ expect(payload[:namespace_id]).to eq(namespace.id.to_s)
+ expect(payload[:namespace_path]).to eq(namespace.full_path)
+ expect(payload[:project_id]).to eq(project.id.to_s)
+ expect(payload[:project_path]).to eq(project.full_path)
+ expect(payload[:user_id]).to eq(user.id.to_s)
+ expect(payload[:user_email]).to eq(user.email)
+ expect(payload[:user_login]).to eq(user.username)
+ expect(payload[:pipeline_id]).to eq(pipeline.id.to_s)
+ expect(payload[:job_id]).to eq(build.id.to_s)
+ expect(payload[:ref]).to eq(pipeline.source_ref)
+ end
+ end
+
+ it 'skips user related custom attributes if build has no user assigned' do
+ allow(build).to receive(:user).and_return(nil)
+
+ expect { payload }.not_to raise_error
+ end
+
+ describe 'ref type' do
+ context 'branches' do
+ it 'is "branch"' do
+ expect(payload[:ref_type]).to eq('branch')
+ end
+ end
+
+ context 'tags' do
+ let(:build) { build_stubbed(:ci_build, :on_tag, project: project) }
+
+ it 'is "tag"' do
+ expect(payload[:ref_type]).to eq('tag')
+ end
+ end
+
+ context 'merge requests' do
+ let(:pipeline) { build_stubbed(:ci_pipeline, :detached_merge_request_pipeline) }
+
+ it 'is "branch"' do
+ expect(payload[:ref_type]).to eq('branch')
+ end
+ end
+ end
+
+ describe 'ref_protected' do
+ it 'is false when ref is not protected' do
+ expect(build).to receive(:protected).and_return(false)
+
+ expect(payload[:ref_protected]).to eq('false')
+ end
+
+ it 'is true when ref is protected' do
+ expect(build).to receive(:protected).and_return(true)
+
+ expect(payload[:ref_protected]).to eq('true')
+ end
+ end
+ end
+
+ describe '.for_build' do
+ let(:rsa_key) { OpenSSL::PKey::RSA.new(Rails.application.secrets.openid_connect_signing_key) }
+
+ subject(:jwt) { described_class.for_build(build) }
+
+ it 'generates JWT with key id' do
+ _payload, headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+
+ expect(headers['kid']).to eq(rsa_key.public_key.to_jwk['kid'])
+ end
+
+ it 'generates JWT for the given job with ttl equal to build timeout' do
+ expect(build).to receive(:metadata_timeout).and_return(3_600)
+
+ payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+ ttl = payload["exp"] - payload["iat"]
+
+ expect(ttl).to eq(3_600)
+ end
+
+ it 'generates JWT for the given job with default ttl if build timeout is not set' do
+ expect(build).to receive(:metadata_timeout).and_return(nil)
+
+ payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+ ttl = payload["exp"] - payload["iat"]
+
+ expect(ttl).to eq(5.minutes.to_i)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb
new file mode 100644
index 00000000000..19cd75e586c
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Parsers::Terraform::Tfplan do
+ describe '#parse!' do
+ let_it_be(:artifact) { create(:ci_job_artifact, :terraform) }
+
+ let(:reports) { Gitlab::Ci::Reports::TerraformReports.new }
+
+ context 'when data is tfplan.json' do
+ context 'when there is no data' do
+ it 'raises an error' do
+ plan = '{}'
+
+ expect { subject.parse!(plan, reports, artifact: artifact) }.to raise_error(
+ described_class::TfplanParserError
+ )
+ end
+ end
+
+ context 'when there is data' do
+ it 'parses JSON and returns a report' do
+ plan = '{ "create": 0, "update": 1, "delete": 0 }'
+
+ expect { subject.parse!(plan, reports, artifact: artifact) }.not_to raise_error
+
+ expect(reports.plans).to match(
+ a_hash_including(
+ 'tfplan.json' => a_hash_including(
+ 'create' => 0,
+ 'update' => 1,
+ 'delete' => 0
+ )
+ )
+ )
+ end
+ end
+ end
+
+ context 'when data is not tfplan.json' do
+ it 'raises an error' do
+ plan = { 'create' => 0, 'update' => 1, 'delete' => 0 }.to_s
+
+ expect { subject.parse!(plan, reports, artifact: artifact) }.to raise_error(
+ described_class::TfplanParserError
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
index b4be5a41cd7..7b7ace02bba 100644
--- a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
@@ -215,8 +215,64 @@ describe Gitlab::Ci::Parsers::Test::Junit do
context 'when data is not JUnit style XML' do
let(:junit) { { testsuite: 'abc' }.to_json }
- it 'raises an error' do
- expect { subject }.to raise_error(described_class::JunitParserError)
+ it 'attaches an error to the TestSuite object' do
+ expect { subject }.not_to raise_error
+ expect(test_cases).to be_empty
+ end
+ end
+
+ context 'when data is malformed JUnit XML' do
+ let(:junit) do
+ <<-EOF.strip_heredoc
+ <testsuite>
+ <testcase classname='Calculator' name='sumTest1' time='0.01'></testcase>
+ <testcase classname='Calculator' name='sumTest2' time='0.02'></testcase
+ </testsuite>
+ EOF
+ end
+
+ it 'attaches an error to the TestSuite object' do
+ expect { subject }.not_to raise_error
+ expect(test_suite.suite_error).to eq("JUnit XML parsing failed: 4:1: FATAL: expected '>'")
+ end
+
+ it 'returns 0 tests cases' do
+ subject
+
+ expect(test_cases).to be_empty
+ expect(test_suite.total_count).to eq(0)
+ expect(test_suite.success_count).to eq(0)
+ expect(test_suite.error_count).to eq(0)
+ end
+
+ it 'returns a failure status' do
+ subject
+
+ expect(test_suite.total_status).to eq(Gitlab::Ci::Reports::TestCase::STATUS_ERROR)
+ end
+ end
+
+ context 'when data is not XML' do
+ let(:junit) { double(:random_trash) }
+
+ it 'attaches an error to the TestSuite object' do
+ expect { subject }.not_to raise_error
+ expect(test_suite.suite_error).to eq('JUnit data parsing failed: no implicit conversion of RSpec::Mocks::Double into String')
+ end
+
+ it 'returns 0 tests cases' do
+ subject
+
+ expect(test_cases).to be_empty
+ expect(test_suite.total_count).to eq(0)
+ expect(test_suite.success_count).to eq(0)
+ expect(test_suite.error_count).to eq(0)
+ end
+
+ it 'returns a failure status' do
+ subject
+
+ expect(test_suite.total_status).to eq(Gitlab::Ci::Reports::TestCase::STATUS_ERROR)
end
end
diff --git a/spec/lib/gitlab/ci/parsers_spec.rb b/spec/lib/gitlab/ci/parsers_spec.rb
index 9d6896b3cb4..addb8b8190f 100644
--- a/spec/lib/gitlab/ci/parsers_spec.rb
+++ b/spec/lib/gitlab/ci/parsers_spec.rb
@@ -22,6 +22,14 @@ describe Gitlab::Ci::Parsers do
end
end
+ context 'when file_type is terraform' do
+ let(:file_type) { 'terraform' }
+
+ it 'fabricates the class' do
+ is_expected.to be_a(described_class::Terraform::Tfplan)
+ end
+ end
+
context 'when file_type does not exist' do
let(:file_type) { 'undefined' }
diff --git a/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb b/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb
new file mode 100644
index 00000000000..061029299ac
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Reports::TerraformReports do
+ it 'initializes plans with and empty hash' do
+ expect(subject.plans).to eq({})
+ end
+
+ describe '#add_plan' do
+ context 'when providing two unique plans' do
+ it 'returns two plans' do
+ subject.add_plan('a/tfplan.json', { 'create' => 0, 'update' => 1, 'delete' => 0 })
+ subject.add_plan('b/tfplan.json', { 'create' => 0, 'update' => 1, 'delete' => 0 })
+
+ expect(subject.plans).to eq({
+ 'a/tfplan.json' => { 'create' => 0, 'update' => 1, 'delete' => 0 },
+ 'b/tfplan.json' => { 'create' => 0, 'update' => 1, 'delete' => 0 }
+ })
+ end
+ end
+
+ context 'when providing the same plan twice' do
+ it 'returns the last added plan' do
+ subject.add_plan('tfplan.json', { 'create' => 0, 'update' => 0, 'delete' => 0 })
+ subject.add_plan('tfplan.json', { 'create' => 0, 'update' => 1, 'delete' => 0 })
+
+ expect(subject.plans).to eq({
+ 'tfplan.json' => { 'create' => 0, 'update' => 1, 'delete' => 0 }
+ })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/test_reports_spec.rb b/spec/lib/gitlab/ci/reports/test_reports_spec.rb
index 638acde69eb..6462ac23eac 100644
--- a/spec/lib/gitlab/ci/reports/test_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_reports_spec.rb
@@ -141,6 +141,29 @@ describe Gitlab::Ci::Reports::TestReports do
end
end
+ describe '#suite_errors' do
+ subject { test_reports.suite_errors }
+
+ context 'when a suite has normal spec errors or failures' do
+ before do
+ test_reports.get_suite('junit').add_test_case(create_test_case_java_success)
+ test_reports.get_suite('junit').add_test_case(create_test_case_java_failed)
+ test_reports.get_suite('junit').add_test_case(create_test_case_java_error)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when there is an error test case' do
+ before do
+ test_reports.get_suite('rspec').add_test_case(create_test_case_rspec_success)
+ test_reports.get_suite('junit').set_suite_error('Existential parsing error')
+ end
+
+ it { is_expected.to eq({ 'junit' => 'Existential parsing error' }) }
+ end
+ end
+
Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type|
describe "##{status_type}_count" do
subject { test_reports.public_send("#{status_type}_count") }
diff --git a/spec/lib/gitlab/ci/reports/test_suite_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
index 9d9774afc82..fef27fbf3e8 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
@@ -114,6 +114,31 @@ describe Gitlab::Ci::Reports::TestSuite do
end
end
+ describe '#set_suite_error' do
+ let(:set_suite_error) { test_suite.set_suite_error('message') }
+
+ context 'when @suite_error is nil' do
+ it 'returns message' do
+ expect(set_suite_error).to eq('message')
+ end
+
+ it 'sets the new message' do
+ set_suite_error
+ expect(test_suite.suite_error).to eq('message')
+ end
+ end
+
+ context 'when a suite_error has already been set' do
+ before do
+ test_suite.set_suite_error('old message')
+ end
+
+ it 'overwrites the existing message' do
+ expect { set_suite_error }.to change(test_suite, :suite_error).from('old message').to('message')
+ end
+ end
+ end
+
Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type|
describe "##{status_type}" do
subject { test_suite.public_send("#{status_type}") }
diff --git a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
new file mode 100644
index 00000000000..1f417781988
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Status::Bridge::Factory do
+ let(:user) { create(:user) }
+ let(:project) { bridge.project }
+ let(:status) { factory.fabricate! }
+ let(:factory) { described_class.new(bridge, user) }
+
+ before do
+ stub_not_protect_default_branch
+
+ project.add_developer(user)
+ end
+
+ context 'when bridge is created' do
+ let(:bridge) { create(:ci_bridge) }
+
+ it 'matches correct core status' do
+ expect(factory.core_status).to be_a Gitlab::Ci::Status::Created
+ end
+
+ it 'fabricates status with correct details' do
+ expect(status.text).to eq s_('CiStatusText|created')
+ expect(status.icon).to eq 'status_created'
+ expect(status.favicon).to eq 'favicon_status_created'
+ expect(status.label).to be_nil
+ expect(status).not_to have_details
+ expect(status).not_to have_action
+ end
+ end
+
+ context 'when bridge is failed' do
+ let(:bridge) { create(:ci_bridge, :failed) }
+
+ it 'matches correct core status' do
+ expect(factory.core_status).to be_a Gitlab::Ci::Status::Failed
+ end
+
+ it 'matches correct extended statuses' do
+ expect(factory.extended_statuses)
+ .to eq [Gitlab::Ci::Status::Bridge::Failed]
+ end
+
+ it 'fabricates a failed bridge status' do
+ expect(status).to be_a Gitlab::Ci::Status::Bridge::Failed
+ end
+
+ it 'fabricates status with correct details' do
+ expect(status.text).to eq s_('CiStatusText|failed')
+ expect(status.icon).to eq 'status_failed'
+ expect(status.favicon).to eq 'favicon_status_failed'
+ expect(status.label).to be_nil
+ expect(status.status_tooltip).to eq "#{s_('CiStatusText|failed')} - (unknown failure)"
+ expect(status).not_to have_details
+ expect(status).not_to have_action
+ end
+
+ context 'failed with downstream_pipeline_creation_failed' do
+ before do
+ bridge.failure_reason = 'downstream_pipeline_creation_failed'
+ end
+
+ it 'fabricates correct status_tooltip' do
+ expect(status.status_tooltip).to eq(
+ "#{s_('CiStatusText|failed')} - (downstream pipeline can not be created)"
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
index 3de65529e99..2242895f8ea 100644
--- a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
@@ -20,7 +20,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "finds the number of issues created after it" do
- expect(subject.first[:value]).to eq(2)
+ expect(subject.first[:value]).to eq('2')
end
context 'with subgroups' do
@@ -29,7 +29,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "finds issues from them" do
- expect(subject.first[:value]).to eq(3)
+ expect(subject.first[:value]).to eq('3')
end
end
@@ -41,7 +41,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
subject { described_class.new(group, options: { from: Time.now, current_user: user, projects: [project.id, project_2.id] }).data }
it 'finds issues from those projects' do
- expect(subject.first[:value]).to eq(2)
+ expect(subject.first[:value]).to eq('2')
end
end
@@ -49,7 +49,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
subject { described_class.new(group, options: { from: 10.days.ago, to: Time.now, current_user: user }).data }
it 'finds issues from 5 days ago' do
- expect(subject.first[:value]).to eq(2)
+ expect(subject.first[:value]).to eq('2')
end
end
end
@@ -62,7 +62,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "doesn't find issues from them" do
- expect(subject.first[:value]).to eq(2)
+ expect(subject.first[:value]).to eq('2')
end
end
end
@@ -77,7 +77,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "finds the number of deploys made created after it" do
- expect(subject.second[:value]).to eq(2)
+ expect(subject.second[:value]).to eq('2')
end
context 'with subgroups' do
@@ -88,7 +88,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "finds deploys from them" do
- expect(subject.second[:value]).to eq(3)
+ expect(subject.second[:value]).to eq('3')
end
end
@@ -102,7 +102,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
subject { described_class.new(group, options: { from: Time.now, current_user: user, projects: [project.id, project_2.id] }).data }
it 'shows deploys from those projects' do
- expect(subject.second[:value]).to eq(2)
+ expect(subject.second[:value]).to eq('2')
end
end
@@ -110,7 +110,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
subject { described_class.new(group, options: { from: 10.days.ago, to: Time.now, current_user: user }).data }
it 'finds deployments from 5 days ago' do
- expect(subject.second[:value]).to eq(2)
+ expect(subject.second[:value]).to eq('2')
end
end
end
@@ -123,7 +123,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
it "doesn't find deploys from them" do
- expect(subject.second[:value]).to eq(0)
+ expect(subject.second[:value]).to eq('-')
end
end
end
@@ -153,7 +153,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
context 'when `to` is nil' do
it 'includes range until now' do
# 1 deployment over 7 days
- expect(subject[:value]).to eq(0.1)
+ expect(subject[:value]).to eq('0.1')
end
end
@@ -169,7 +169,7 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
it 'returns deployment frequency within `from` and `to` range' do
# 2 deployments over 20 days
- expect(subject[:value]).to eq(0.1)
+ expect(subject[:value]).to eq('0.1')
end
end
end
diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
index 94edef20296..a86278871ff 100644
--- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
@@ -20,13 +20,13 @@ describe Gitlab::CycleAnalytics::StageSummary do
Timecop.freeze(5.days.ago) { create(:issue, project: project) }
Timecop.freeze(5.days.from_now) { create(:issue, project: project) }
- expect(subject).to eq(1)
+ expect(subject).to eq('1')
end
it "doesn't find issues from other projects" do
Timecop.freeze(5.days.from_now) { create(:issue, project: create(:project)) }
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
context 'when `to` parameter is given' do
@@ -38,14 +38,14 @@ describe Gitlab::CycleAnalytics::StageSummary do
it "doesn't find any record" do
options[:to] = Time.now
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
it "finds records created between `from` and `to` range" do
options[:from] = 10.days.ago
options[:to] = 10.days.from_now
- expect(subject).to eq(2)
+ expect(subject).to eq('2')
end
end
end
@@ -57,19 +57,19 @@ describe Gitlab::CycleAnalytics::StageSummary do
Timecop.freeze(5.days.ago) { create_commit("Test message", project, user, 'master') }
Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master') }
- expect(subject).to eq(1)
+ expect(subject).to eq('1')
end
it "doesn't find commits from other projects" do
Timecop.freeze(5.days.from_now) { create_commit("Test message", create(:project, :repository), user, 'master') }
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
- it "finds a large (> 100) snumber of commits if present" do
+ it "finds a large (> 100) number of commits if present" do
Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master', count: 100) }
- expect(subject).to eq(100)
+ expect(subject).to eq('100')
end
context 'when `to` parameter is given' do
@@ -81,14 +81,14 @@ describe Gitlab::CycleAnalytics::StageSummary do
it "doesn't find any record" do
options[:to] = Time.now
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
it "finds records created between `from` and `to` range" do
options[:from] = 10.days.ago
options[:to] = 10.days.from_now
- expect(subject).to eq(2)
+ expect(subject).to eq('2')
end
end
@@ -118,7 +118,7 @@ describe Gitlab::CycleAnalytics::StageSummary do
Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) }
Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project) }
- expect(subject).to eq(1)
+ expect(subject).to eq('1')
end
it "doesn't find commits from other projects" do
@@ -126,7 +126,7 @@ describe Gitlab::CycleAnalytics::StageSummary do
create(:deployment, :success, project: create(:project, :repository))
end
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
context 'when `to` parameter is given' do
@@ -138,14 +138,76 @@ describe Gitlab::CycleAnalytics::StageSummary do
it "doesn't find any record" do
options[:to] = Time.now
- expect(subject).to eq(0)
+ expect(subject).to eq('-')
end
it "finds records created between `from` and `to` range" do
options[:from] = 10.days.ago
options[:to] = 10.days.from_now
- expect(subject).to eq(2)
+ expect(subject).to eq('2')
+ end
+ end
+ end
+
+ describe '#deployment_frequency' do
+ subject { stage_summary.fourth[:value] }
+
+ it 'includes the unit: `per day`' do
+ expect(stage_summary.fourth[:unit]).to eq _('per day')
+ end
+
+ before do
+ Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) }
+ end
+
+ it 'returns 0.0 when there were deploys but the frequency was too low' do
+ options[:from] = 30.days.ago
+
+ # 1 deployment over 30 days
+ # frequency of 0.03, rounded off to 0.0
+ expect(subject).to eq('0')
+ end
+
+ it 'returns `-` when there were no deploys' do
+ options[:from] = 4.days.ago
+
+ # 0 deployment in the last 4 days
+ expect(subject).to eq('-')
+ end
+
+ context 'when `to` is nil' do
+ it 'includes range until now' do
+ options[:from] = 6.days.ago
+ options[:to] = nil
+
+ # 1 deployment over 7 days
+ expect(subject).to eq('0.1')
+ end
+ end
+
+ context 'when `to` is given' do
+ before do
+ Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project) }
+ end
+
+ it 'finds records created between `from` and `to` range' do
+ options[:from] = 10.days.ago
+ options[:to] = 10.days.from_now
+
+ # 2 deployments over 20 days
+ expect(subject).to eq('0.1')
+ end
+
+ context 'when `from` and `to` are within a day' do
+ it 'returns the number of deployments made on that day' do
+ Timecop.freeze(Time.now) do
+ create(:deployment, :success, project: project)
+ options[:from] = options[:to] = Time.now
+
+ expect(subject).to eq('1')
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 3db9320c021..da531a0e6fa 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -215,6 +215,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'ON DELETE statements' do
context 'on_delete: :nullify' do
it 'appends ON DELETE SET NULL statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
@@ -230,6 +231,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'on_delete: :cascade' do
it 'appends ON DELETE CASCADE statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
@@ -245,6 +247,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'on_delete: nil' do
it 'appends no ON DELETE statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
@@ -261,6 +264,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'when no custom key name is supplied' do
it 'creates a concurrent foreign key and validates it' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
@@ -287,6 +291,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'when a custom key name is supplied' do
context 'for creating a new foreign key for a column that does not presently exist' do
it 'creates a new foreign key' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
@@ -314,6 +319,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'when the supplied key name is different from the existing foreign key name' do
it 'creates a new foreign key' do
+ expect(model).to receive(:with_lock_retries).and_call_original
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
@@ -1359,6 +1365,22 @@ describe Gitlab::Database::MigrationHelpers do
end
end
+ it 'returns the final expected delay' do
+ Sidekiq::Testing.fake! do
+ final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, batch_size: 2)
+
+ expect(final_delay.to_f).to eq(20.minutes.to_f)
+ end
+ end
+
+ it 'returns zero when nothing gets queued' do
+ Sidekiq::Testing.fake! do
+ final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User.none, 'FooJob', 10.minutes)
+
+ expect(final_delay).to eq(0)
+ end
+ end
+
context 'with batch_size option' do
it 'queues jobs correctly' do
Sidekiq::Testing.fake! do
@@ -1383,12 +1405,25 @@ describe Gitlab::Database::MigrationHelpers do
end
end
- context 'with other_arguments option' do
+ context 'with other_job_arguments option' do
it 'queues jobs correctly' do
- model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_arguments: [1, 2])
+ Sidekiq::Testing.fake! do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_job_arguments: [1, 2])
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]])
- expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]])
+ expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
+ end
+ end
+ end
+
+ context 'with initial_delay option' do
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_job_arguments: [1, 2], initial_delay: 10.minutes)
+
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]])
+ expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(20.minutes.from_now.to_f)
+ end
end
end
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index a16e5e185bb..3c128aad976 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -113,7 +113,7 @@ describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
allow(redis).to receive(:info).and_return({ "redis_version" => "3.0.0" })
expect(described_class.gitlab_redis_diff_caching_memory_usage_bytes)
- .not_to receive(:observe).and_call_original
+ .not_to receive(:observe)
cache.send(:write_to_redis_hash, diff_hash)
end
@@ -163,6 +163,56 @@ describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
end
end
+ describe "GZip usage" do
+ let(:diff_file) do
+ diffs = merge_request.diffs
+ raw_diff = diffs.diffable.raw_diffs(diffs.diff_options.merge(paths: ['CHANGELOG'])).first
+ Gitlab::Diff::File.new(raw_diff,
+ repository: diffs.project.repository,
+ diff_refs: diffs.diff_refs,
+ fallback_diff_refs: diffs.fallback_diff_refs)
+ end
+
+ context "feature flag :gzip_diff_cache disabled" do
+ before do
+ stub_feature_flags(gzip_diff_cache: true)
+ end
+
+ it "uses ActiveSupport::Gzip when reading from the cache" do
+ expect(ActiveSupport::Gzip).to receive(:decompress).at_least(:once).and_call_original
+
+ cache.write_if_empty
+ cache.decorate(diff_file)
+ end
+
+ it "uses ActiveSupport::Gzip to compress data when writing to cache" do
+ expect(ActiveSupport::Gzip).to receive(:compress).and_call_original
+
+ cache.send(:write_to_redis_hash, diff_hash)
+ end
+ end
+
+ context "feature flag :gzip_diff_cache disabled" do
+ before do
+ stub_feature_flags(gzip_diff_cache: false)
+ end
+
+ it "doesn't use ActiveSupport::Gzip when reading from the cache" do
+ expect(ActiveSupport::Gzip).not_to receive(:decompress)
+
+ cache.write_if_empty
+ cache.decorate(diff_file)
+ end
+
+ it "doesn't use ActiveSupport::Gzip to compress data when writing to cache" do
+ expect(ActiveSupport::Gzip).not_to receive(:compress)
+
+ expect { cache.send(:write_to_redis_hash, diff_hash) }
+ .to change { Gitlab::Redis::Cache.with { |r| r.hgetall(cache_key) } }
+ end
+ end
+ end
+
describe 'metrics' do
it 'defines :gitlab_redis_diff_caching_memory_usage_bytes histogram' do
expect(described_class).to respond_to(:gitlab_redis_diff_caching_memory_usage_bytes)
diff --git a/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
index 8b6a19fa2c5..ccf38b7f688 100644
--- a/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
+++ b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
@@ -29,6 +29,7 @@ describe Gitlab::Elasticsearch::Logs::Lines do
let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
let(:body_with_cursor) { JSON.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) }
+ let(:body_with_filebeat_6) { JSON.parse(fixture_file('lib/elasticsearch/query_with_filebeat_6.json')) }
RSpec::Matchers.define :a_hash_equal_to_json do |expected|
match do |actual|
@@ -85,5 +86,12 @@ describe Gitlab::Elasticsearch::Logs::Lines do
result = subject.pod_logs(namespace, pod_name: pod_name, cursor: cursor)
expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
end
+
+ it 'can search on filebeat 6' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_filebeat_6)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name: pod_name, filebeat7: false)
+ expect(result).to eq(logs: [es_message_4, es_message_3, es_message_2, es_message_1], cursor: cursor)
+ end
end
end
diff --git a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
index 36954252b6b..31ba48e9df1 100644
--- a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
+++ b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb
@@ -5,19 +5,24 @@ require 'spec_helper'
describe Gitlab::Email::Hook::SmimeSignatureInterceptor do
include SmimeHelper
- # cert generation is an expensive operation and they are used read-only,
+ # certs generation is an expensive operation and they are used read-only,
# so we share them as instance variables in all tests
before :context do
@root_ca = generate_root
- @cert = generate_cert(root_ca: @root_ca)
+ @intermediate_ca = generate_intermediate(signer_ca: @root_ca)
+ @cert = generate_cert(signer_ca: @intermediate_ca)
end
let(:root_certificate) do
Gitlab::Email::Smime::Certificate.new(@root_ca[:key], @root_ca[:cert])
end
+ let(:intermediate_certificate) do
+ Gitlab::Email::Smime::Certificate.new(@intermediate_ca[:key], @intermediate_ca[:cert])
+ end
+
let(:certificate) do
- Gitlab::Email::Smime::Certificate.new(@cert[:key], @cert[:cert])
+ Gitlab::Email::Smime::Certificate.new(@cert[:key], @cert[:cert], [intermediate_certificate.cert])
end
let(:mail_body) { "signed hello with Unicode €áø and\r\n newlines\r\n" }
@@ -48,17 +53,19 @@ describe Gitlab::Email::Hook::SmimeSignatureInterceptor do
# verify signature and obtain pkcs7 encoded content
p7enc = Gitlab::Email::Smime::Signer.verify_signature(
- cert: certificate.cert,
- ca_cert: root_certificate.cert,
+ ca_certs: root_certificate.cert,
signed_data: mail.encoded)
+ expect(p7enc).not_to be_nil
+
# re-verify signature from a new Mail object content
# See https://gitlab.com/gitlab-org/gitlab/issues/197386
- Gitlab::Email::Smime::Signer.verify_signature(
- cert: certificate.cert,
- ca_cert: root_certificate.cert,
+ p7_re_enc = Gitlab::Email::Smime::Signer.verify_signature(
+ ca_certs: root_certificate.cert,
signed_data: Mail.new(mail).encoded)
+ expect(p7_re_enc).not_to be_nil
+
# envelope in a Mail object and obtain the body
decoded_mail = Mail.new(p7enc.data)
diff --git a/spec/lib/gitlab/email/smime/certificate_spec.rb b/spec/lib/gitlab/email/smime/certificate_spec.rb
index 90b27602413..07b8c1e4de1 100644
--- a/spec/lib/gitlab/email/smime/certificate_spec.rb
+++ b/spec/lib/gitlab/email/smime/certificate_spec.rb
@@ -9,7 +9,8 @@ describe Gitlab::Email::Smime::Certificate do
# so we share them as instance variables in all tests
before :context do
@root_ca = generate_root
- @cert = generate_cert(root_ca: @root_ca)
+ @intermediate_ca = generate_intermediate(signer_ca: @root_ca)
+ @cert = generate_cert(signer_ca: @intermediate_ca)
end
describe 'testing environment setup' do
@@ -21,11 +22,23 @@ describe Gitlab::Email::Smime::Certificate do
end
end
+ describe 'generate_intermediate' do
+ subject { @intermediate_ca }
+
+ it 'generates an intermediate CA that expires a long way in the future' do
+ expect(subject[:cert].not_after).to be > 999.years.from_now
+ end
+
+ it 'generates an intermediate CA properly signed by the root CA' do
+ expect(subject[:cert].issuer).to eq(@root_ca[:cert].subject)
+ end
+ end
+
describe 'generate_cert' do
subject { @cert }
- it 'generates a cert properly signed by the root CA' do
- expect(subject[:cert].issuer).to eq(@root_ca[:cert].subject)
+ it 'generates a cert properly signed by the intermediate CA' do
+ expect(subject[:cert].issuer).to eq(@intermediate_ca[:cert].subject)
end
it 'generates a cert that expires soon' do
@@ -37,7 +50,7 @@ describe Gitlab::Email::Smime::Certificate do
end
context 'passing in INFINITE_EXPIRY' do
- subject { generate_cert(root_ca: @root_ca, expires_in: SmimeHelper::INFINITE_EXPIRY) }
+ subject { generate_cert(signer_ca: @intermediate_ca, expires_in: SmimeHelper::INFINITE_EXPIRY) }
it 'generates a cert that expires a long way in the future' do
expect(subject[:cert].not_after).to be > 999.years.from_now
@@ -50,7 +63,7 @@ describe Gitlab::Email::Smime::Certificate do
it 'parses correctly a certificate and key' do
parsed_cert = described_class.from_strings(@cert[:key].to_s, @cert[:cert].to_pem)
- common_cert_tests(parsed_cert, @cert, @root_ca)
+ common_cert_tests(parsed_cert, @cert, @intermediate_ca)
end
end
@@ -61,17 +74,43 @@ describe Gitlab::Email::Smime::Certificate do
parsed_cert = described_class.from_files('a_key', 'a_cert')
- common_cert_tests(parsed_cert, @cert, @root_ca)
+ common_cert_tests(parsed_cert, @cert, @intermediate_ca)
+ end
+
+ context 'with optional ca_certs' do
+ it 'parses correctly certificate, key and ca_certs' do
+ allow(File).to receive(:read).with('a_key').and_return(@cert[:key].to_s)
+ allow(File).to receive(:read).with('a_cert').and_return(@cert[:cert].to_pem)
+ allow(File).to receive(:read).with('a_ca_cert').and_return(@intermediate_ca[:cert].to_pem)
+
+ parsed_cert = described_class.from_files('a_key', 'a_cert', 'a_ca_cert')
+
+ common_cert_tests(parsed_cert, @cert, @intermediate_ca, with_ca_certs: [@intermediate_ca[:cert]])
+ end
+ end
+ end
+
+ context 'with no intermediate CA' do
+ it 'parses correctly a certificate and key' do
+ cert = generate_cert(signer_ca: @root_ca)
+
+ allow(File).to receive(:read).with('a_key').and_return(cert[:key].to_s)
+ allow(File).to receive(:read).with('a_cert').and_return(cert[:cert].to_pem)
+
+ parsed_cert = described_class.from_files('a_key', 'a_cert')
+
+ common_cert_tests(parsed_cert, cert, @root_ca)
end
end
- def common_cert_tests(parsed_cert, cert, root_ca)
+ def common_cert_tests(parsed_cert, cert, signer_ca, with_ca_certs: nil)
expect(parsed_cert.cert).to be_a(OpenSSL::X509::Certificate)
expect(parsed_cert.cert.subject).to eq(cert[:cert].subject)
- expect(parsed_cert.cert.issuer).to eq(root_ca[:cert].subject)
+ expect(parsed_cert.cert.issuer).to eq(signer_ca[:cert].subject)
expect(parsed_cert.cert.not_before).to eq(cert[:cert].not_before)
expect(parsed_cert.cert.not_after).to eq(cert[:cert].not_after)
expect(parsed_cert.cert.extensions).to include(an_object_having_attributes(oid: 'extendedKeyUsage', value: match('E-mail Protection')))
expect(parsed_cert.key).to be_a(OpenSSL::PKey::RSA)
+ expect(parsed_cert.ca_certs).to match_array(Array.wrap(with_ca_certs)) if with_ca_certs
end
end
diff --git a/spec/lib/gitlab/email/smime/signer_spec.rb b/spec/lib/gitlab/email/smime/signer_spec.rb
index 56048b7148c..d891b86da08 100644
--- a/spec/lib/gitlab/email/smime/signer_spec.rb
+++ b/spec/lib/gitlab/email/smime/signer_spec.rb
@@ -5,22 +5,39 @@ require 'spec_helper'
describe Gitlab::Email::Smime::Signer do
include SmimeHelper
- it 'signs data appropriately with SMIME' do
- root_certificate = generate_root
- certificate = generate_cert(root_ca: root_certificate)
+ let_it_be(:root_ca) { generate_root }
+ let_it_be(:intermediate_ca) { generate_intermediate(signer_ca: root_ca) }
+ context 'when using an intermediate CA' do
+ it 'signs data appropriately with SMIME' do
+ cert = generate_cert(signer_ca: intermediate_ca)
+
+ sign_and_verify('signed content', cert[:cert], cert[:key], root_ca[:cert], ca_certs: intermediate_ca[:cert])
+ end
+ end
+
+ context 'when not using an intermediate CA' do
+ it 'signs data appropriately with SMIME' do
+ cert = generate_cert(signer_ca: root_ca)
+
+ sign_and_verify('signed content', cert[:cert], cert[:key], root_ca[:cert])
+ end
+ end
+
+ def sign_and_verify(data, cert, key, root_ca_cert, ca_certs: nil)
signed_content = described_class.sign(
- cert: certificate[:cert],
- key: certificate[:key],
- data: 'signed content')
+ cert: cert,
+ key: key,
+ ca_certs: ca_certs,
+ data: data)
+
expect(signed_content).not_to be_nil
p7enc = described_class.verify_signature(
- cert: certificate[:cert],
- ca_cert: root_certificate[:cert],
+ ca_certs: root_ca_cert,
signed_data: signed_content)
expect(p7enc).not_to be_nil
- expect(p7enc.data).to eq('signed content')
+ expect(p7enc.data).to eq(data)
end
end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index a39c50ab038..99442cb0ca6 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -6,19 +6,16 @@ describe Gitlab::Experimentation do
before do
stub_const('Gitlab::Experimentation::EXPERIMENTS', {
test_experiment: {
- feature_toggle: feature_toggle,
environment: environment,
- enabled_ratio: enabled_ratio,
tracking_category: 'Team'
}
})
- stub_feature_flags(feature_toggle => true)
+ allow(Feature).to receive(:get).with(:test_experiment_experiment_percentage).and_return double(percentage_of_time_value: enabled_percentage)
end
- let(:feature_toggle) { :test_experiment_toggle }
let(:environment) { Rails.env.test? }
- let(:enabled_ratio) { 0.1 }
+ let(:enabled_percentage) { 10 }
describe Gitlab::Experimentation::ControllerConcern, type: :controller do
controller(ApplicationController) do
@@ -251,44 +248,16 @@ describe Gitlab::Experimentation do
end
end
- describe 'feature toggle' do
- context 'feature toggle is not set' do
- let(:feature_toggle) { nil }
+ describe 'experiment is disabled' do
+ let(:enabled_percentage) { 0 }
- it { is_expected.to be_truthy }
- end
-
- context 'feature toggle is not set, but a feature with the experiment key as name does exist' do
- before do
- stub_feature_flags(test_experiment: false)
- end
-
- let(:feature_toggle) { nil }
-
- it { is_expected.to be_falsey }
- end
-
- context 'feature toggle is disabled' do
- before do
- stub_feature_flags(feature_toggle => false)
- end
-
- it { is_expected.to be_falsey }
- end
+ it { is_expected.to be_falsey }
end
- describe 'environment' do
- context 'environment is not set' do
- let(:environment) { nil }
-
- it { is_expected.to be_truthy }
- end
-
- context 'we are on the wrong environment' do
- let(:environment) { ::Gitlab.com? }
+ describe 'we are on the wrong environment' do
+ let(:environment) { ::Gitlab.com? }
- it { is_expected.to be_falsey }
- end
+ it { is_expected.to be_falsey }
end
end
@@ -312,12 +281,6 @@ describe Gitlab::Experimentation do
it { is_expected.to be_truthy }
- context 'enabled ratio is not set' do
- let(:enabled_ratio) { nil }
-
- it { is_expected.to be_falsey }
- end
-
describe 'experimentation_subject_index' do
context 'experimentation_subject_index is not set' do
let(:experimentation_subject_index) { nil }
diff --git a/spec/lib/gitlab/file_hook_spec.rb b/spec/lib/gitlab/file_hook_spec.rb
index d184eb483d4..fda3583289b 100644
--- a/spec/lib/gitlab/file_hook_spec.rb
+++ b/spec/lib/gitlab/file_hook_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::FileHook do
- let(:file_hook) { Rails.root.join('plugins', 'test.rb') }
+ let(:file_hook) { Rails.root.join('file_hooks', 'test.rb') }
let(:tmp_file) { Tempfile.new('file_hook-dump') }
let(:file_hook_source) do
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index b03c1feb429..2c6aee58326 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -19,6 +19,15 @@ describe Gitlab::GitalyClient do
})
end
+ describe '.query_time', :request_store do
+ it 'increments query times' do
+ subject.query_time += 0.451
+ subject.query_time += 0.322
+
+ expect(subject.query_time).to eq(0.773)
+ end
+ end
+
describe '.long_timeout' do
context 'default case' do
it { expect(subject.long_timeout).to eq(6.hours) }
diff --git a/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb
index 6f20b8877e0..09ba4b89a1a 100644
--- a/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb
@@ -21,7 +21,7 @@ describe Gitlab::GrapeLogging::Loggers::PerfLogger do
payload = subject.parameters(mock_request, nil)
expect(payload[:redis_calls]).to eq(1)
- expect(payload[:redis_duration_ms]).to be >= 0
+ expect(payload[:redis_duration_s]).to be >= 0
end
end
end
diff --git a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
index c0762e9892b..17c0659327d 100644
--- a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
@@ -25,11 +25,11 @@ describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do
)
end
- it 'returns the correct duration in ms' do
+ it 'returns the correct duration in seconds' do
Timecop.freeze(start_time) do
subject.before
- expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration': 1.hour.to_f * 1000 })
+ expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration_s': 1.hour.to_f })
end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 5d5e2fe2a33..c3b0d4fa506 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -443,6 +443,7 @@ project:
- vulnerability_scanners
- operations_feature_flags
- operations_feature_flags_client
+- operations_feature_flags_user_lists
- prometheus_alerts
- prometheus_alert_events
- self_managed_prometheus_alert_events
diff --git a/spec/lib/gitlab/import_export/project/import_task_spec.rb b/spec/lib/gitlab/import_export/project/import_task_spec.rb
index f7b9cbaa095..4f4fcd3ad8a 100644
--- a/spec/lib/gitlab/import_export/project/import_task_spec.rb
+++ b/spec/lib/gitlab/import_export/project/import_task_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-describe Gitlab::ImportExport::Project::ImportTask do
+describe Gitlab::ImportExport::Project::ImportTask, :request_store do
let(:username) { 'root' }
let(:namespace_path) { username }
let!(:user) { create(:user, username: username) }
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 80ae9a08257..04e8bd05666 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -6,7 +6,7 @@ def match_mr1_note(content_regex)
MergeRequest.find_by(title: 'MR1').notes.select { |n| n.note.match(/#{content_regex}/)}.first
end
-describe Gitlab::ImportExport::Project::TreeRestorer, quarantine: { flaky: 'https://gitlab.com/gitlab-org/gitlab/-/issues/213793' } do
+describe Gitlab::ImportExport::Project::TreeRestorer do
include ImportExport::CommonUtil
let(:shared) { project.import_export_shared }
diff --git a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
index 0b58a75220d..e9ef990d7b2 100644
--- a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
@@ -156,7 +156,7 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
let(:reader) do
Gitlab::ImportExport::Reader.new(
shared: shared,
- config: Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.group_config_file).to_h
+ config: Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.legacy_group_config_file).to_h
)
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 55b907fff7c..727bc3de04b 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -480,6 +480,7 @@ Service:
- pipeline_events
- job_events
- comment_on_event_enabled
+- comment_detail
- category
- default
- wiki_page_events
@@ -595,6 +596,7 @@ ProjectFeature:
- builds_access_level
- repository_access_level
- pages_access_level
+- metrics_dashboard_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -811,6 +813,7 @@ ContainerExpirationPolicy:
- next_run_at
- project_id
- name_regex
+- name_regex_keep
- cadence
- older_than
- keep_n
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 9788c9f4a3c..858fa044a52 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -26,7 +26,7 @@ describe Gitlab::InstrumentationHelper do
subject
expect(payload[:gitaly_calls]).to eq(1)
- expect(payload[:gitaly_duration]).to be >= 0
+ expect(payload[:gitaly_duration_s]).to be >= 0
expect(payload[:redis_calls]).to be_nil
expect(payload[:redis_duration_ms]).to be_nil
end
@@ -39,7 +39,7 @@ describe Gitlab::InstrumentationHelper do
subject
expect(payload[:redis_calls]).to eq(1)
- expect(payload[:redis_duration_ms]).to be >= 0
+ expect(payload[:redis_duration_s]).to be >= 0
expect(payload[:gitaly_calls]).to be_nil
expect(payload[:gitaly_duration]).to be_nil
end
@@ -49,12 +49,12 @@ describe Gitlab::InstrumentationHelper do
describe '.queue_duration_for_job' do
where(:enqueued_at, :created_at, :time_now, :expected_duration) do
"2019-06-01T00:00:00.000+0000" | nil | "2019-06-01T02:00:00.000+0000" | 2.hours.to_f
- "2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T02:00:00.001+0000" | 0.001
+ "2019-06-01T02:00:00.000+0000" | nil | "2019-06-01T02:00:00.001+0000" | 0.0
"2019-06-01T02:00:00.000+0000" | "2019-05-01T02:00:00.000+0000" | "2019-06-01T02:00:01.000+0000" | 1
- nil | "2019-06-01T02:00:00.000+0000" | "2019-06-01T02:00:00.001+0000" | 0.001
+ nil | "2019-06-01T02:00:00.000+0000" | "2019-06-01T02:00:00.001+0000" | 0.0
nil | nil | "2019-06-01T02:00:00.001+0000" | nil
"2019-06-01T02:00:00.000+0200" | nil | "2019-06-01T02:00:00.000-0200" | 4.hours.to_f
- 1571825569.998168 | nil | "2019-10-23T12:13:16.000+0200" | 26.001832
+ 1571825569.998168 | nil | "2019-10-23T12:13:16.000+0200" | 26.00
1571825569 | nil | "2019-10-23T12:13:16.000+0200" | 27
"invalid_date" | nil | "2019-10-23T12:13:16.000+0200" | nil
"" | nil | "2019-10-23T12:13:16.000+0200" | nil
diff --git a/spec/lib/gitlab/jira_import/labels_importer_spec.rb b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
index 3eb4666a74f..520bbe4fbe8 100644
--- a/spec/lib/gitlab/jira_import/labels_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
@@ -18,7 +18,7 @@ describe Gitlab::JiraImport::LabelsImporter do
let_it_be(:no_label_jira_import) { create(:jira_import_state, label: nil, project: project) }
it 'raises error' do
- expect { subject }.to raise_error(Projects::ImportService::Error, 'Failed to find import label for jira import.')
+ expect { subject }.to raise_error(Projects::ImportService::Error, 'Failed to find import label for Jira import.')
end
end
diff --git a/spec/lib/gitlab/json_spec.rb b/spec/lib/gitlab/json_spec.rb
new file mode 100644
index 00000000000..5186ab041da
--- /dev/null
+++ b/spec/lib/gitlab/json_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Json do
+ describe ".parse" do
+ it "parses an object" do
+ expect(subject.parse('{ "foo": "bar" }')).to eq({ "foo" => "bar" })
+ end
+
+ it "parses an array" do
+ expect(subject.parse('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }])
+ end
+
+ it "raises an error on a string" do
+ expect { subject.parse('"foo"') }.to raise_error(JSON::ParserError)
+ end
+
+ it "raises an error on a true bool" do
+ expect { subject.parse("true") }.to raise_error(JSON::ParserError)
+ end
+
+ it "raises an error on a false bool" do
+ expect { subject.parse("false") }.to raise_error(JSON::ParserError)
+ end
+ end
+
+ describe ".parse!" do
+ it "parses an object" do
+ expect(subject.parse!('{ "foo": "bar" }')).to eq({ "foo" => "bar" })
+ end
+
+ it "parses an array" do
+ expect(subject.parse!('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }])
+ end
+
+ it "raises an error on a string" do
+ expect { subject.parse!('"foo"') }.to raise_error(JSON::ParserError)
+ end
+
+ it "raises an error on a true bool" do
+ expect { subject.parse!("true") }.to raise_error(JSON::ParserError)
+ end
+
+ it "raises an error on a false bool" do
+ expect { subject.parse!("false") }.to raise_error(JSON::ParserError)
+ end
+ end
+
+ describe ".dump" do
+ it "dumps an object" do
+ expect(subject.dump({ "foo" => "bar" })).to eq('{"foo":"bar"}')
+ end
+
+ it "dumps an array" do
+ expect(subject.dump([{ "foo" => "bar" }])).to eq('[{"foo":"bar"}]')
+ end
+
+ it "dumps a string" do
+ expect(subject.dump("foo")).to eq('"foo"')
+ end
+
+ it "dumps a true bool" do
+ expect(subject.dump(true)).to eq("true")
+ end
+
+ it "dumps a false bool" do
+ expect(subject.dump(false)).to eq("false")
+ end
+ end
+
+ describe ".generate" do
+ it "delegates to the adapter" do
+ args = [{ foo: "bar" }]
+
+ expect(JSON).to receive(:generate).with(*args)
+
+ subject.generate(*args)
+ end
+ end
+
+ describe ".pretty_generate" do
+ it "delegates to the adapter" do
+ args = [{ foo: "bar" }]
+
+ expect(JSON).to receive(:pretty_generate).with(*args)
+
+ subject.pretty_generate(*args)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
index c59078449b8..a11a9d08503 100644
--- a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
@@ -61,4 +61,56 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do
it { is_expected.to eq('install-test-class-name') }
end
+
+ describe '#service_account_resource' do
+ let(:resource) do
+ Kubeclient::Resource.new(metadata: { name: 'tiller', namespace: 'gitlab-managed-apps' })
+ end
+
+ subject { base_command.service_account_resource }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it 'generates a Kubeclient resource for the tiller ServiceAccount' do
+ is_expected.to eq(resource)
+ end
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it 'generates nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe '#cluster_role_binding_resource' do
+ let(:resource) do
+ Kubeclient::Resource.new(
+ metadata: { name: 'tiller-admin' },
+ roleRef: { apiGroup: 'rbac.authorization.k8s.io', kind: 'ClusterRole', name: 'cluster-admin' },
+ subjects: [{ kind: 'ServiceAccount', name: 'tiller', namespace: 'gitlab-managed-apps' }]
+ )
+ end
+
+ subject { base_command.cluster_role_binding_resource }
+
+ context 'rbac is enabled' do
+ let(:rbac) { true }
+
+ it 'generates a Kubeclient resource for the ClusterRoleBinding for tiller' do
+ is_expected.to eq(resource)
+ end
+ end
+
+ context 'rbac is not enabled' do
+ let(:rbac) { false }
+
+ it 'generates nothing' do
+ is_expected.to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
index f87ceb45766..13021a08f9f 100644
--- a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
@@ -83,56 +83,4 @@ describe Gitlab::Kubernetes::Helm::InitCommand do
end
end
end
-
- describe '#service_account_resource' do
- let(:resource) do
- Kubeclient::Resource.new(metadata: { name: 'tiller', namespace: 'gitlab-managed-apps' })
- end
-
- subject { init_command.service_account_resource }
-
- context 'rbac is enabled' do
- let(:rbac) { true }
-
- it 'generates a Kubeclient resource for the tiller ServiceAccount' do
- is_expected.to eq(resource)
- end
- end
-
- context 'rbac is not enabled' do
- let(:rbac) { false }
-
- it 'generates nothing' do
- is_expected.to be_nil
- end
- end
- end
-
- describe '#cluster_role_binding_resource' do
- let(:resource) do
- Kubeclient::Resource.new(
- metadata: { name: 'tiller-admin' },
- roleRef: { apiGroup: 'rbac.authorization.k8s.io', kind: 'ClusterRole', name: 'cluster-admin' },
- subjects: [{ kind: 'ServiceAccount', name: 'tiller', namespace: 'gitlab-managed-apps' }]
- )
- end
-
- subject { init_command.cluster_role_binding_resource }
-
- context 'rbac is enabled' do
- let(:rbac) { true }
-
- it 'generates a Kubeclient resource for the ClusterRoleBinding for tiller' do
- is_expected.to eq(resource)
- end
- end
-
- context 'rbac is not enabled' do
- let(:rbac) { false }
-
- it 'generates nothing' do
- is_expected.to be_nil
- end
- end
- end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
index f94ceae362a..a5ed8f57bf3 100644
--- a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
@@ -305,20 +305,4 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
is_expected.to eq(resource)
end
end
-
- describe '#service_account_resource' do
- subject { install_command.service_account_resource }
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
-
- describe '#cluster_role_binding_resource' do
- subject { install_command.cluster_role_binding_resource }
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
index 064efebdb96..e69570f5371 100644
--- a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
@@ -199,20 +199,4 @@ describe Gitlab::Kubernetes::Helm::PatchCommand do
is_expected.to eq(resource)
end
end
-
- describe '#service_account_resource' do
- subject { patch_command.service_account_resource }
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
-
- describe '#cluster_role_binding_resource' do
- subject { patch_command.cluster_role_binding_resource }
-
- it 'returns nothing' do
- is_expected.to be_nil
- end
- end
end
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index ddc41e64147..aa18a1a843c 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -19,6 +19,7 @@ describe Gitlab::ProjectTemplate do
described_class.new('plainhtml', 'Pages/Plain HTML', 'Everything you need to get started using a plain HTML Pages site.', 'https://gitlab.com/pages/plain-html'),
described_class.new('gitbook', 'Pages/GitBook', 'Everything you need to get started using a GitBook Pages site.', 'https://gitlab.com/pages/gitbook'),
described_class.new('hexo', 'Pages/Hexo', 'Everything you need to get started using a Hexo Pages site.', 'https://gitlab.com/pages/hexo'),
+ described_class.new('sse_middleman', 'Static Site Editor/Middleman', _('Middleman project with Static Site Editor support'), 'https://gitlab.com/gitlab-org/project-templates/static-site-editor-middleman'),
described_class.new('nfhugo', 'Netlify/Hugo', _('A Hugo site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfhugo'),
described_class.new('nfjekyll', 'Netlify/Jekyll', _('A Jekyll site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfjekyll'),
described_class.new('nfplainhtml', 'Netlify/Plain HTML', _('A plain HTML site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfplain-html'),
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index db7c5f771b7..f4b939c3013 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -42,11 +42,10 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
start_payload.merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
'job_status' => 'done',
- 'duration' => 0.0,
+ 'duration_s' => 0.0,
'completed_at' => timestamp.to_f,
- 'cpu_s' => 1.111112,
- 'db_duration' => 0,
- 'db_duration_s' => 0
+ 'cpu_s' => 1.11,
+ 'db_duration_s' => 0.0
)
end
let(:exception_payload) do
@@ -160,11 +159,11 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:timing_data) do
{
gitaly_calls: 10,
- gitaly_duration: 10000,
+ gitaly_duration_s: 10000,
rugged_calls: 1,
- rugged_duration_ms: 5000,
+ rugged_duration_s: 5000,
redis_calls: 3,
- redis_duration_ms: 1234
+ redis_duration_s: 1234
}
end
@@ -193,12 +192,11 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:expected_start_payload) { start_payload.except('args') }
let(:expected_end_payload) do
- end_payload.except('args').merge('cpu_s' => a_value > 0)
+ end_payload.except('args').merge('cpu_s' => a_value >= 0)
end
let(:expected_end_payload_with_db) do
expected_end_payload.merge(
- 'db_duration' => a_value >= 100,
'db_duration_s' => a_value >= 0.1
)
end
@@ -226,7 +224,7 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:time) { { duration: 0.1231234, cputime: 1.2342345 } }
let(:payload) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status' } }
let(:current_utc_time) { Time.now.utc }
- let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration' => 0.123123, 'cpu_s' => 1.234235, 'completed_at' => current_utc_time.to_f } }
+ let(:payload_with_time_keys) { { 'class' => 'my-class', 'message' => 'my-message', 'job_status' => 'my-job-status', 'duration_s' => 0.12, 'cpu_s' => 1.23, 'completed_at' => current_utc_time.to_f } }
subject { described_class.new }
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
index 0ea248fbcf1..312ebd30a76 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
@@ -21,18 +21,9 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_redis_q
end
around do |example|
- Sidekiq::Testing.inline! { example.run }
- end
-
- before(:context) do
- Sidekiq::Testing.server_middleware do |chain|
+ with_sidekiq_server_middleware do |chain|
chain.add described_class
- end
- end
-
- after(:context) do
- Sidekiq::Testing.server_middleware do |chain|
- chain.remove described_class
+ Sidekiq::Testing.inline! { example.run }
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
index f64ebece930..fdf643a8ad1 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
@@ -41,18 +41,9 @@ describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
end
around do |example|
- Sidekiq::Testing.inline! { example.run }
- end
-
- before(:context) do
- Sidekiq::Testing.server_middleware do |chain|
+ with_sidekiq_server_middleware do |chain|
chain.add described_class
- end
- end
-
- after(:context) do
- Sidekiq::Testing.server_middleware do |chain|
- chain.remove described_class
+ Sidekiq::Testing.inline! { example.run }
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 32c1807ba6e..752ec6a0a3f 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -28,11 +28,16 @@ describe Gitlab::SidekiqMiddleware do
# 2) yielding exactly once
describe '.server_configurator' do
around do |example|
- original = Sidekiq::Testing.server_middleware.dup
-
- example.run
-
- Sidekiq::Testing.instance_variable_set :@server_chain, original
+ with_sidekiq_server_middleware do |chain|
+ described_class.server_configurator(
+ metrics: metrics,
+ arguments_logger: arguments_logger,
+ memory_killer: memory_killer,
+ request_store: request_store
+ ).call(chain)
+
+ example.run
+ end
end
let(:middleware_expected_args) { [a_kind_of(worker_class), hash_including({ 'args' => job_args }), anything] }
@@ -54,21 +59,17 @@ describe Gitlab::SidekiqMiddleware do
end
let(:enabled_sidekiq_middlewares) { all_sidekiq_middlewares - disabled_sidekiq_middlewares }
- before do
- Sidekiq::Testing.server_middleware.clear
- Sidekiq::Testing.server_middleware(&described_class.server_configurator(
- metrics: metrics,
- arguments_logger: arguments_logger,
- memory_killer: memory_killer,
- request_store: request_store
- ))
-
- enabled_sidekiq_middlewares.each do |middleware|
- expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.and_call_original
- end
+ shared_examples "a server middleware chain" do
+ it "passes through the right server middlewares" do
+ enabled_sidekiq_middlewares.each do |middleware|
+ expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.and_call_original
+ end
- disabled_sidekiq_middlewares.each do |middleware|
- expect_any_instance_of(Gitlab::SidekiqMiddleware::ArgumentsLogger).not_to receive(:call)
+ disabled_sidekiq_middlewares.each do |middleware|
+ expect_any_instance_of(middleware).not_to receive(:call)
+ end
+
+ worker_class.perform_async(*job_args)
end
end
@@ -86,9 +87,7 @@ describe Gitlab::SidekiqMiddleware do
]
end
- it "passes through server middlewares" do
- worker_class.perform_async(*job_args)
- end
+ it_behaves_like "a server middleware chain"
end
context "all optional middlewares on" do
@@ -98,9 +97,7 @@ describe Gitlab::SidekiqMiddleware do
let(:request_store) { true }
let(:disabled_sidekiq_middlewares) { [] }
- it "passes through server middlewares" do
- worker_class.perform_async(*job_args)
- end
+ it_behaves_like "a server middleware chain"
context "server metrics" do
let(:gitaly_histogram) { double(:gitaly_histogram) }
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb
index 56d6bf1c788..47b9a67f54f 100644
--- a/spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_show_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
describe Gitlab::SlashCommands::Presenters::IssueShow do
- let(:project) { create(:project) }
+ let(:user) { create(:user, :with_avatar) }
+ let(:project) { create(:project, creator: user) }
let(:issue) { create(:issue, project: project) }
let(:attachment) { subject[:attachments].first }
@@ -15,6 +16,7 @@ describe Gitlab::SlashCommands::Presenters::IssueShow do
expect(subject[:response_type]).to be(:in_channel)
expect(subject).to have_key(:attachments)
expect(attachment[:title]).to start_with(issue.title)
+ expect(attachment[:author_icon]).to eq(user.avatar_url(only_path: false))
end
context 'with upvotes' do
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index d3780d22241..e34367cbbf9 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe Gitlab::Utils do
delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which,
:ensure_array_from_string, :to_exclusive_sentence, :bytes_to_megabytes,
- :append_path, :check_path_traversal!, to: :described_class
+ :append_path, :check_path_traversal!, :ms_to_round_sec, to: :described_class
describe '.check_path_traversal!' do
it 'detects path traversal at the start of the string' do
@@ -55,6 +55,22 @@ describe Gitlab::Utils do
end
end
+ describe '.ms_to_round_sec' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:original, :expected) do
+ 1999.8999 | 2
+ 12384 | 12.38
+ 333 | 0.33
+ end
+
+ with_them do
+ it "returns rounded seconds" do
+ expect(ms_to_round_sec(original)).to eq(expected)
+ end
+ end
+ end
+
describe '.to_exclusive_sentence' do
it 'calls #to_sentence on the array' do
array = double
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 921ed568b71..d12a2823d0e 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -24,7 +24,7 @@ describe Gitlab::Workhorse do
let(:ref) { 'master' }
let(:format) { 'zip' }
let(:storage_path) { Gitlab.config.gitlab.repository_downloads_path }
- let(:path) { 'some/path' if Feature.enabled?(:git_archive_path, default_enabled: true) }
+ let(:path) { 'some/path' }
let(:metadata) { repository.archive_metadata(ref, storage_path, format, append_sha: nil, path: path) }
let(:cache_disabled) { false }
@@ -36,70 +36,36 @@ describe Gitlab::Workhorse do
allow(described_class).to receive(:git_archive_cache_disabled?).and_return(cache_disabled)
end
- context 'feature flag disabled' do
- before do
- stub_feature_flags(git_archive_path: false)
- end
-
- it 'sets the header correctly' do
- key, command, params = decode_workhorse_header(subject)
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
- expected_params = metadata.merge(
- 'GitalyRepository' => repository.gitaly_repository.to_h,
- 'GitalyServer' => {
- features: { 'gitaly-feature-foobar' => 'true' },
- address: Gitlab::GitalyClient.address(project.repository_storage),
- token: Gitlab::GitalyClient.token(project.repository_storage)
- }
+ expect(key).to eq('Gitlab-Workhorse-Send-Data')
+ expect(command).to eq('git-archive')
+ expect(params).to eq({
+ 'GitalyServer' => {
+ features: { 'gitaly-feature-foobar' => 'true' },
+ address: Gitlab::GitalyClient.address(project.repository_storage),
+ token: Gitlab::GitalyClient.token(project.repository_storage)
+ },
+ 'ArchivePath' => metadata['ArchivePath'],
+ 'GetArchiveRequest' => Base64.encode64(
+ Gitaly::GetArchiveRequest.new(
+ repository: repository.gitaly_repository,
+ commit_id: metadata['CommitId'],
+ prefix: metadata['ArchivePrefix'],
+ format: Gitaly::GetArchiveRequest::Format::ZIP,
+ path: path
+ ).to_proto
)
-
- expect(key).to eq('Gitlab-Workhorse-Send-Data')
- expect(command).to eq('git-archive')
- expect(params).to eq(expected_params.deep_stringify_keys)
- end
-
- context 'when archive caching is disabled' do
- let(:cache_disabled) { true }
-
- it 'tells workhorse not to use the cache' do
- _, _, params = decode_workhorse_header(subject)
- expect(params).to include({ 'DisableCache' => true })
- end
- end
+ }.deep_stringify_keys)
end
- context 'feature flag enabled' do
- it 'sets the header correctly' do
- key, command, params = decode_workhorse_header(subject)
-
- expect(key).to eq('Gitlab-Workhorse-Send-Data')
- expect(command).to eq('git-archive')
- expect(params).to eq({
- 'GitalyServer' => {
- features: { 'gitaly-feature-foobar' => 'true' },
- address: Gitlab::GitalyClient.address(project.repository_storage),
- token: Gitlab::GitalyClient.token(project.repository_storage)
- },
- 'ArchivePath' => metadata['ArchivePath'],
- 'GetArchiveRequest' => Base64.encode64(
- Gitaly::GetArchiveRequest.new(
- repository: repository.gitaly_repository,
- commit_id: metadata['CommitId'],
- prefix: metadata['ArchivePrefix'],
- format: Gitaly::GetArchiveRequest::Format::ZIP,
- path: path
- ).to_proto
- )
- }.deep_stringify_keys)
- end
-
- context 'when archive caching is disabled' do
- let(:cache_disabled) { true }
+ context 'when archive caching is disabled' do
+ let(:cache_disabled) { true }
- it 'tells workhorse not to use the cache' do
- _, _, params = decode_workhorse_header(subject)
- expect(params).to include({ 'DisableCache' => true })
- end
+ it 'tells workhorse not to use the cache' do
+ _, _, params = decode_workhorse_header(subject)
+ expect(params).to include({ 'DisableCache' => true })
end
end
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index d4b84c5cdc4..2f446694083 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -24,20 +24,6 @@ describe 'Marginalia spec' do
end
end
- def add_sidekiq_middleware
- # Reference: https://github.com/mperham/sidekiq/wiki/Testing#testing-server-middlewaresidekiq
- # Sidekiq test harness fakes worker without its server middlewares, so include instrumentation to 'Sidekiq::Testing' server middleware.
- Sidekiq::Testing.server_middleware do |chain|
- chain.add Marginalia::SidekiqInstrumentation::Middleware
- end
- end
-
- def remove_sidekiq_middleware
- Sidekiq::Testing.server_middleware do |chain|
- chain.remove Marginalia::SidekiqInstrumentation::Middleware
- end
- end
-
def stub_feature(value)
allow(Gitlab::Marginalia).to receive(:cached_feature_enabled?).and_return(value)
end
@@ -88,20 +74,16 @@ describe 'Marginalia spec' do
end
describe 'for Sidekiq worker jobs' do
- before(:all) do
- add_sidekiq_middleware
-
- # Because of faking, 'Sidekiq.server?' does not work so implicitly set application name which is done in config/initializers/0_marginalia.rb
- Marginalia.application_name = "sidekiq"
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ chain.add Marginalia::SidekiqInstrumentation::Middleware
+ Marginalia.application_name = "sidekiq"
+ example.run
+ end
end
after(:all) do
MarginaliaTestJob.clear
- remove_sidekiq_middleware
- end
-
- around do |example|
- Sidekiq::Testing.fake! { example.run }
end
before do
diff --git a/spec/lib/rspec_flaky/flaky_example_spec.rb b/spec/lib/rspec_flaky/flaky_example_spec.rb
index 47c88e053e1..d4a1d6c882a 100644
--- a/spec/lib/rspec_flaky/flaky_example_spec.rb
+++ b/spec/lib/rspec_flaky/flaky_example_spec.rb
@@ -77,7 +77,7 @@ describe RspecFlaky::FlakyExample, :aggregate_failures do
it 'updates the first_flaky_at' do
now = Time.now
- expected_first_flaky_at = flaky_example.first_flaky_at ? flaky_example.first_flaky_at : now
+ expected_first_flaky_at = flaky_example.first_flaky_at || now
Timecop.freeze(now) { flaky_example.update_flakiness! }
expect(flaky_example.first_flaky_at).to eq(expected_first_flaky_at)
diff --git a/spec/mailers/emails/issues_spec.rb b/spec/mailers/emails/issues_spec.rb
index 5b5bd6f4308..dfd974aa5f3 100644
--- a/spec/mailers/emails/issues_spec.rb
+++ b/spec/mailers/emails/issues_spec.rb
@@ -6,6 +6,12 @@ require 'email_spec'
describe Emails::Issues do
include EmailSpec::Matchers
+ it 'adds email methods to Notify' do
+ subject.instance_methods.each do |email_method|
+ expect(Notify).to be_respond_to(email_method)
+ end
+ end
+
describe "#import_issues_csv_email" do
let(:user) { create(:user) }
let(:project) { create(:project) }
@@ -39,4 +45,47 @@ describe Emails::Issues do
it_behaves_like 'appearance header and footer not enabled'
end
end
+
+ describe '#issues_csv_email' do
+ let(:user) { create(:user) }
+ let(:empty_project) { create(:project, path: 'myproject') }
+ let(:export_status) { { truncated: false, rows_expected: 3, rows_written: 3 } }
+ let(:attachment) { subject.attachments.first }
+
+ subject { Notify.issues_csv_email(user, empty_project, "dummy content", export_status) }
+
+ include_context 'gitlab email notification'
+
+ it 'attachment has csv mime type' do
+ expect(attachment.mime_type).to eq 'text/csv'
+ end
+
+ it 'generates a useful filename' do
+ expect(attachment.filename).to include(Date.today.year.to_s)
+ expect(attachment.filename).to include('issues')
+ expect(attachment.filename).to include('myproject')
+ expect(attachment.filename).to end_with('.csv')
+ end
+
+ it 'mentions number of issues and project name' do
+ expect(subject).to have_content '3'
+ expect(subject).to have_content empty_project.name
+ end
+
+ it "doesn't need to mention truncation by default" do
+ expect(subject).not_to have_content 'truncated'
+ end
+
+ context 'when truncated' do
+ let(:export_status) { { truncated: true, rows_expected: 12, rows_written: 10 } }
+
+ it 'mentions that the csv has been truncated' do
+ expect(subject).to have_content 'truncated'
+ end
+
+ it 'mentions the number of issues written and expected' do
+ expect(subject).to have_content '10 of 12 issues'
+ end
+ end
+ end
end
diff --git a/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb b/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb
index 7e6afbec520..529fe046e32 100644
--- a/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb
+++ b/spec/migrations/cleanup_empty_commit_user_mentions_spec.rb
@@ -14,7 +14,7 @@ describe CleanupEmptyCommitUserMentions, :migration, :sidekiq do
let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
+ let(:commit) { Commit.new(RepoHelpers.sample_commit, project) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
diff --git a/spec/migrations/cleanup_optimistic_locking_nulls_pt2_spec.rb b/spec/migrations/cleanup_optimistic_locking_nulls_pt2_spec.rb
new file mode 100644
index 00000000000..8d7d5ac2d26
--- /dev/null
+++ b/spec/migrations/cleanup_optimistic_locking_nulls_pt2_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200217210353_cleanup_optimistic_locking_nulls_pt2')
+
+describe CleanupOptimisticLockingNullsPt2, :migration do
+ let(:ci_stages) { table(:ci_stages) }
+ let(:ci_builds) { table(:ci_builds) }
+ let(:ci_pipelines) { table(:ci_pipelines) }
+ let(:tables) { [ci_stages, ci_builds, ci_pipelines] }
+
+ before do
+ # Create necessary rows
+ ci_stages.create!
+ ci_builds.create!
+ ci_pipelines.create!
+
+ # Nullify `lock_version` column for all rows
+ # Needs to be done with a SQL fragment, otherwise Rails will coerce it to 0
+ tables.each do |table|
+ table.update_all('lock_version = NULL')
+ end
+ end
+
+ it 'correctly migrates nullified lock_version column', :sidekiq_might_not_need_inline do
+ tables.each do |table|
+ expect(table.where(lock_version: nil).count).to eq(1)
+ end
+
+ tables.each do |table|
+ expect(table.where(lock_version: 0).count).to eq(0)
+ end
+
+ migrate!
+
+ tables.each do |table|
+ expect(table.where(lock_version: nil).count).to eq(0)
+ end
+
+ tables.each do |table|
+ expect(table.where(lock_version: 0).count).to eq(1)
+ end
+ end
+end
diff --git a/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb b/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb
index d32a374b914..6e541c903ff 100644
--- a/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb
+++ b/spec/migrations/cleanup_optimistic_locking_nulls_spec.rb
@@ -4,11 +4,10 @@ require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200128210353_cleanup_optimistic_locking_nulls')
describe CleanupOptimisticLockingNulls do
- TABLES = %w(epics merge_requests issues).freeze
- TABLES.each do |table|
- let(table.to_sym) { table(table.to_sym) }
- end
- let(:tables) { TABLES.map { |t| method(t.to_sym).call } }
+ let(:epics) { table(:epics) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:issues) { table(:issues) }
+ let(:tables) { [epics, merge_requests, issues] }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb b/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb
index aa78381ba3a..dc40d0865f2 100644
--- a/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb
+++ b/spec/migrations/migrate_commit_notes_mentions_to_db_spec.rb
@@ -14,7 +14,7 @@ describe MigrateCommitNotesMentionsToDb, :migration, :sidekiq do
let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:commit) { Commit.new(RepoHelpers.sample_commit, project.becomes(Project)) }
+ let(:commit) { Commit.new(RepoHelpers.sample_commit, project) }
let(:commit_user_mentions) { table(:commit_user_mentions) }
let!(:resource1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: user.id, note: 'note1 for @root to check') }
diff --git a/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb b/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb
index 77648f5c64a..77721eab77d 100644
--- a/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb
+++ b/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb
@@ -20,6 +20,20 @@ describe ScheduleBackfillPushRulesIdInProjects do
expect(setting.push_rule_id).to eq(sample_rule.id)
end
+ it 'adds global rule association to last application settings when there is more than one record without failing' do
+ application_settings = table(:application_settings)
+ setting_old = application_settings.create!
+ setting = application_settings.create!
+ sample_rule = push_rules.create!(is_sample: true)
+
+ Sidekiq::Testing.fake! do
+ disable_migrations_output { migrate! }
+ end
+
+ expect(setting_old.reload.push_rule_id).to be_nil
+ expect(setting.reload.push_rule_id).to eq(sample_rule.id)
+ end
+
it 'schedules worker to migrate project push rules' do
rule_1 = push_rules.create!
rule_2 = push_rules.create!
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index ff9f26faad3..5e9226989a5 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -2280,6 +2280,7 @@ describe Ci::Build do
{ key: 'CI_REGISTRY_USER', value: 'gitlab-ci-token', public: true, masked: false },
{ key: 'CI_REGISTRY_PASSWORD', value: 'my-token', public: false, masked: true },
{ key: 'CI_REPOSITORY_URL', value: build.repo_url, public: false, masked: false },
+ { key: 'CI_JOB_JWT', value: 'ci.job.jwt', public: false, masked: true },
{ key: 'CI_JOB_NAME', value: 'test', public: true, masked: false },
{ key: 'CI_JOB_STAGE', value: 'test', public: true, masked: false },
{ key: 'CI_NODE_TOTAL', value: '1', public: true, masked: false },
@@ -2332,23 +2333,36 @@ describe Ci::Build do
end
before do
+ allow(Gitlab::Ci::Jwt).to receive(:for_build).with(build).and_return('ci.job.jwt')
build.set_token('my-token')
build.yaml_variables = []
end
it { is_expected.to eq(predefined_variables) }
+ context 'when ci_job_jwt feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_job_jwt: false)
+ end
+
+ it 'CI_JOB_JWT is not included' do
+ expect(subject.pluck(:key)).not_to include('CI_JOB_JWT')
+ end
+ end
+
describe 'variables ordering' do
context 'when variables hierarchy is stubbed' do
let(:build_pre_var) { { key: 'build', value: 'value', public: true, masked: false } }
let(:project_pre_var) { { key: 'project', value: 'value', public: true, masked: false } }
let(:pipeline_pre_var) { { key: 'pipeline', value: 'value', public: true, masked: false } }
let(:build_yaml_var) { { key: 'yaml', value: 'value', public: true, masked: false } }
+ let(:job_jwt_var) { { key: 'CI_JOB_JWT', value: 'ci.job.jwt', public: false, masked: true } }
before do
allow(build).to receive(:predefined_variables) { [build_pre_var] }
allow(build).to receive(:yaml_variables) { [build_yaml_var] }
allow(build).to receive(:persisted_variables) { [] }
+ allow(build).to receive(:job_jwt_variables) { [job_jwt_var] }
allow_any_instance_of(Project)
.to receive(:predefined_variables) { [project_pre_var] }
@@ -2361,7 +2375,8 @@ describe Ci::Build do
it 'returns variables in order depending on resource hierarchy' do
is_expected.to eq(
- [build_pre_var,
+ [job_jwt_var,
+ build_pre_var,
project_pre_var,
pipeline_pre_var,
build_yaml_var,
@@ -3797,8 +3812,13 @@ describe Ci::Build do
create(:ci_job_artifact, :junit_with_corrupted_data, job: build, project: build.project)
end
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Ci::Parsers::Test::Junit::JunitParserError)
+ it 'returns no test data and includes a suite_error message' do
+ expect { subject }.not_to raise_error
+
+ expect(test_reports.get_suite(build.name).total_count).to eq(0)
+ expect(test_reports.get_suite(build.name).success_count).to eq(0)
+ expect(test_reports.get_suite(build.name).failed_count).to eq(0)
+ expect(test_reports.get_suite(build.name).suite_error).to eq('JUnit XML parsing failed: 1:1: FATAL: Document is empty')
end
end
end
@@ -3851,6 +3871,48 @@ describe Ci::Build do
end
end
+ describe '#collect_terraform_reports!' do
+ let(:terraform_reports) { Gitlab::Ci::Reports::TerraformReports.new }
+
+ it 'returns an empty hash' do
+ expect(build.collect_terraform_reports!(terraform_reports).plans).to eq({})
+ end
+
+ context 'when build has a terraform report' do
+ context 'when there is a valid tfplan.json' do
+ before do
+ create(:ci_job_artifact, :terraform, job: build, project: build.project)
+ end
+
+ it 'parses blobs and add the results to the terraform report' do
+ expect { build.collect_terraform_reports!(terraform_reports) }.not_to raise_error
+
+ expect(terraform_reports.plans).to match(
+ a_hash_including(
+ 'tfplan.json' => a_hash_including(
+ 'create' => 0,
+ 'update' => 1,
+ 'delete' => 0
+ )
+ )
+ )
+ end
+ end
+
+ context 'when there is an invalid tfplan.json' do
+ before do
+ create(:ci_job_artifact, :terraform_with_corrupted_data, job: build, project: build.project)
+ end
+
+ it 'raises an error' do
+ expect { build.collect_terraform_reports!(terraform_reports) }.to raise_error(
+ Gitlab::Ci::Parsers::Terraform::Tfplan::TfplanParserError
+ )
+ end
+ end
+ end
+ end
+
describe '#report_artifacts' do
subject { build.report_artifacts }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 6f6ff3704b4..f9e33657ca3 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -86,6 +86,22 @@ describe Ci::JobArtifact do
end
end
+ describe '.terraform_reports' do
+ context 'when there is a terraform report' do
+ it 'return the job artifact' do
+ artifact = create(:ci_job_artifact, :terraform)
+
+ expect(described_class.terraform_reports).to eq([artifact])
+ end
+ end
+
+ context 'when there are no terraform reports' do
+ it 'return the an empty array' do
+ expect(described_class.terraform_reports).to eq([])
+ end
+ end
+ end
+
describe '.erasable' do
subject { described_class.erasable }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 90412136c1d..c9eddd76a24 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -364,6 +364,16 @@ describe Ci::Pipeline, :mailer do
end
end
+ context 'when pipeline has a terraform report' do
+ it 'selects the pipeline' do
+ pipeline_with_report = create(:ci_pipeline, :with_terraform_reports)
+
+ expect(described_class.with_reports(Ci::JobArtifact.terraform_reports)).to eq(
+ [pipeline_with_report]
+ )
+ end
+ end
+
context 'when pipeline does not have metrics reports' do
subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 4490371bde5..d57b962c972 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -6,16 +6,12 @@ describe Ci::Processable do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let_it_be(:detached_merge_request_pipeline) do
- create(:ci_pipeline, :detached_merge_request_pipeline, :with_job, project: project)
- end
-
- let_it_be(:legacy_detached_merge_request_pipeline) do
- create(:ci_pipeline, :legacy_detached_merge_request_pipeline, :with_job, project: project)
- end
+ describe 'delegations' do
+ subject { Ci::Processable.new }
- let_it_be(:merged_result_pipeline) do
- create(:ci_pipeline, :merged_result_pipeline, :with_job, project: project)
+ it { is_expected.to delegate_method(:merge_request?).to(:pipeline) }
+ it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
+ it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
end
describe '#aggregated_needs_names' do
@@ -52,69 +48,34 @@ describe Ci::Processable do
end
describe 'validate presence of scheduling_type' do
- context 'on create' do
- let(:processable) do
- build(
- :ci_build, :created, project: project, pipeline: pipeline,
- importing: importing, scheduling_type: nil
- )
- end
-
- context 'when importing' do
- let(:importing) { true }
-
- context 'when validate_scheduling_type_of_processables is true' do
- before do
- stub_feature_flags(validate_scheduling_type_of_processables: true)
- end
+ using RSpec::Parameterized::TableSyntax
- it 'does not validate' do
- expect(processable).to be_valid
- end
- end
+ subject { build(:ci_build, project: project, pipeline: pipeline, importing: importing) }
- context 'when validate_scheduling_type_of_processables is false' do
- before do
- stub_feature_flags(validate_scheduling_type_of_processables: false)
- end
+ where(:importing, :validate_scheduling_type_flag, :should_validate) do
+ false | true | true
+ false | false | false
+ true | true | false
+ true | false | false
+ end
- it 'does not validate' do
- expect(processable).to be_valid
- end
- end
+ with_them do
+ before do
+ stub_feature_flags(validate_scheduling_type_of_processables: validate_scheduling_type_flag)
end
- context 'when not importing' do
- let(:importing) { false }
-
- context 'when validate_scheduling_type_of_processables is true' do
- before do
- stub_feature_flags(validate_scheduling_type_of_processables: true)
- end
-
- it 'validates' do
- expect(processable).not_to be_valid
- end
- end
-
- context 'when validate_scheduling_type_of_processables is false' do
- before do
- stub_feature_flags(validate_scheduling_type_of_processables: false)
- end
-
- it 'does not validate' do
- expect(processable).to be_valid
+ context 'on create' do
+ it 'validates presence' do
+ if should_validate
+ is_expected.to validate_presence_of(:scheduling_type).on(:create)
+ else
+ is_expected.not_to validate_presence_of(:scheduling_type).on(:create)
end
end
end
- end
- context 'on update' do
- let(:processable) { create(:ci_build, :created, project: project, pipeline: pipeline) }
-
- it 'does not validate' do
- processable.scheduling_type = nil
- expect(processable).to be_valid
+ context 'on update' do
+ it { is_expected.not_to validate_presence_of(:scheduling_type).on(:update) }
end
end
end
@@ -147,6 +108,8 @@ describe Ci::Processable do
describe '#needs_attributes' do
let(:build) { create(:ci_build, :created, project: project, pipeline: pipeline) }
+ subject { build.needs_attributes }
+
context 'with needs' do
before do
create(:ci_build_need, build: build, name: 'test1')
@@ -154,7 +117,7 @@ describe Ci::Processable do
end
it 'returns all needs attributes' do
- expect(build.needs_attributes).to contain_exactly(
+ is_expected.to contain_exactly(
{ 'artifacts' => true, 'name' => 'test1' },
{ 'artifacts' => true, 'name' => 'test2' }
)
@@ -162,75 +125,7 @@ describe Ci::Processable do
end
context 'without needs' do
- it 'returns all needs attributes' do
- expect(build.needs_attributes).to be_empty
- end
- end
- end
-
- describe '#merge_request?' do
- subject { pipeline.processables.first.merge_request? }
-
- context 'in a detached merge request pipeline' do
- let(:pipeline) { detached_merge_request_pipeline }
-
- it { is_expected.to eq(pipeline.merge_request?) }
- end
-
- context 'in a legacy detached merge_request_pipeline' do
- let(:pipeline) { legacy_detached_merge_request_pipeline }
-
- it { is_expected.to eq(pipeline.merge_request?) }
- end
-
- context 'in a pipeline for merged results' do
- let(:pipeline) { merged_result_pipeline }
-
- it { is_expected.to eq(pipeline.merge_request?) }
- end
- end
-
- describe '#merge_request_ref?' do
- subject { pipeline.processables.first.merge_request_ref? }
-
- context 'in a detached merge request pipeline' do
- let(:pipeline) { detached_merge_request_pipeline }
-
- it { is_expected.to eq(pipeline.merge_request_ref?) }
- end
-
- context 'in a legacy detached merge_request_pipeline' do
- let(:pipeline) { legacy_detached_merge_request_pipeline }
-
- it { is_expected.to eq(pipeline.merge_request_ref?) }
- end
-
- context 'in a pipeline for merged results' do
- let(:pipeline) { merged_result_pipeline }
-
- it { is_expected.to eq(pipeline.merge_request_ref?) }
- end
- end
-
- describe '#legacy_detached_merge_request_pipeline?' do
- subject { pipeline.processables.first.legacy_detached_merge_request_pipeline? }
-
- context 'in a detached merge request pipeline' do
- let(:pipeline) { detached_merge_request_pipeline }
-
- it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
- end
-
- context 'in a legacy detached merge_request_pipeline' do
- let(:pipeline) { legacy_detached_merge_request_pipeline }
-
- it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
- end
-
- context 'in a pipeline for merged results' do
- let(:pipeline) { merged_result_pipeline }
-
- it { is_expected.to eq(pipeline.legacy_detached_merge_request_pipeline?) }
+ it { is_expected.to be_empty }
end
end
end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index b8034ba5bf2..2dedff7f15b 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -78,6 +78,36 @@ describe Ci::Runner do
.to raise_error(ActiveRecord::RecordInvalid)
end
end
+
+ context 'cost factors validations' do
+ it 'dissalows :private_projects_minutes_cost_factor being nil' do
+ runner = build(:ci_runner, private_projects_minutes_cost_factor: nil)
+
+ expect(runner).to be_invalid
+ expect(runner.errors.full_messages).to include('Private projects minutes cost factor needs to be non-negative')
+ end
+
+ it 'dissalows :public_projects_minutes_cost_factor being nil' do
+ runner = build(:ci_runner, public_projects_minutes_cost_factor: nil)
+
+ expect(runner).to be_invalid
+ expect(runner.errors.full_messages).to include('Public projects minutes cost factor needs to be non-negative')
+ end
+
+ it 'dissalows :private_projects_minutes_cost_factor being negative' do
+ runner = build(:ci_runner, private_projects_minutes_cost_factor: -1.1)
+
+ expect(runner).to be_invalid
+ expect(runner.errors.full_messages).to include('Private projects minutes cost factor needs to be non-negative')
+ end
+
+ it 'dissalows :public_projects_minutes_cost_factor being negative' do
+ runner = build(:ci_runner, public_projects_minutes_cost_factor: -2.2)
+
+ expect(runner).to be_invalid
+ expect(runner.errors.full_messages).to include('Public projects minutes cost factor needs to be non-negative')
+ end
+ end
end
describe 'constraints' do
diff --git a/spec/models/clusters/applications/elastic_stack_spec.rb b/spec/models/clusters/applications/elastic_stack_spec.rb
index b0992c43d11..6b8cd37dca2 100644
--- a/spec/models/clusters/applications/elastic_stack_spec.rb
+++ b/spec/models/clusters/applications/elastic_stack_spec.rb
@@ -20,9 +20,10 @@ describe Clusters::Applications::ElasticStack do
it 'is initialized with elastic stack arguments' do
expect(subject.name).to eq('elastic-stack')
expect(subject.chart).to eq('stable/elastic-stack')
- expect(subject.version).to eq('1.9.0')
+ expect(subject.version).to eq('2.0.0')
expect(subject).to be_rbac
expect(subject.files).to eq(elastic_stack.files)
+ expect(subject.preinstall).to be_empty
end
context 'on a non rbac enabled cluster' do
@@ -33,11 +34,23 @@ describe Clusters::Applications::ElasticStack do
it { is_expected.not_to be_rbac }
end
+ context 'on versions older than 2' do
+ before do
+ elastic_stack.status = elastic_stack.status_states[:updating]
+ elastic_stack.version = "1.9.0"
+ end
+
+ it 'includes a preinstall script' do
+ expect(subject.preinstall).not_to be_empty
+ expect(subject.preinstall.first).to include("filebeat.enable")
+ end
+ end
+
context 'application failed to install previously' do
let(:elastic_stack) { create(:clusters_applications_elastic_stack, :errored, version: '0.0.1') }
it 'is initialized with the locked version' do
- expect(subject.version).to eq('1.9.0')
+ expect(subject.version).to eq('2.0.0')
end
end
end
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index 64d667f40f6..b070729ccac 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -219,6 +219,12 @@ describe Clusters::Applications::Ingress do
expect(subject.values).to include('extraContainers')
end
+
+ it 'includes livenessProbe for modsecurity sidecar container' do
+ probe_config = YAML.safe_load(subject.values).dig('controller', 'extraContainers', 0, 'livenessProbe')
+
+ expect(probe_config).to eq('exec' => { 'command' => ['ls', '/var/log/modsec/audit.log'] })
+ end
end
context 'when modsecurity_enabled is disabled' do
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index cc1bb164c16..24908785320 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -496,6 +496,40 @@ describe Issuable do
end
end
+ describe '.labels_hash' do
+ let(:feature_label) { create(:label, title: 'Feature') }
+ let(:second_label) { create(:label, title: 'Second Label') }
+ let!(:issues) { create_list(:labeled_issue, 3, labels: [feature_label, second_label]) }
+ let(:issue_id) { issues.first.id }
+
+ it 'maps issue ids to labels titles' do
+ expect(Issue.labels_hash[issue_id]).to include('Feature')
+ end
+
+ it 'works on relations filtered by multiple labels' do
+ relation = Issue.with_label(['Feature', 'Second Label'])
+
+ expect(relation.labels_hash[issue_id]).to include('Feature', 'Second Label')
+ end
+
+ # This tests the workaround for the lack of a NOT NULL constraint in
+ # label_links.label_id:
+ # https://gitlab.com/gitlab-org/gitlab/issues/197307
+ context 'with a NULL label ID in the link' do
+ let(:issue) { create(:labeled_issue, labels: [feature_label, second_label]) }
+
+ before do
+ label_link = issue.label_links.find_by(label_id: second_label.id)
+ label_link.label_id = nil
+ label_link.save(validate: false)
+ end
+
+ it 'filters out bad labels' do
+ expect(Issue.where(id: issue.id).labels_hash[issue.id]).to match_array(['Feature'])
+ end
+ end
+ end
+
describe '#user_notes_count' do
let(:project) { create(:project) }
let(:issue1) { create(:issue, project: project) }
diff --git a/spec/models/concerns/spammable_spec.rb b/spec/models/concerns/spammable_spec.rb
index b8537dd39f6..a8d27e174b7 100644
--- a/spec/models/concerns/spammable_spec.rb
+++ b/spec/models/concerns/spammable_spec.rb
@@ -39,43 +39,100 @@ describe Spammable do
describe '#invalidate_if_spam' do
using RSpec::Parameterized::TableSyntax
+ before do
+ stub_application_setting(recaptcha_enabled: true)
+ end
+
context 'when the model is spam' do
- where(:recaptcha_enabled, :error) do
- true | /solve the reCAPTCHA to proceed/
- false | /has been discarded/
+ subject { invalidate_if_spam(is_spam: true) }
+
+ it 'has an error related to spam on the model' do
+ expect(subject.errors.messages[:base]).to match_array /has been discarded/
end
+ end
- with_them do
- subject { invalidate_if_spam(true, recaptcha_enabled) }
+ context 'when the model needs recaptcha' do
+ subject { invalidate_if_spam(needs_recaptcha: true) }
- it 'has an error related to spam on the model' do
- expect(subject.errors.messages[:base]).to match_array error
- end
+ it 'has an error related to spam on the model' do
+ expect(subject.errors.messages[:base]).to match_array /solve the reCAPTCHA/
end
end
- context 'when the model is not spam' do
- [true, false].each do |enabled|
- let(:recaptcha_enabled) { enabled }
+ context 'if the model is spam and also needs recaptcha' do
+ subject { invalidate_if_spam(is_spam: true, needs_recaptcha: true) }
+
+ it 'has an error related to spam on the model' do
+ expect(subject.errors.messages[:base]).to match_array /solve the reCAPTCHA/
+ end
+ end
- subject { invalidate_if_spam(false, recaptcha_enabled) }
+ context 'when the model is not spam nor needs recaptcha' do
+ subject { invalidate_if_spam }
- it 'returns no error' do
- expect(subject.errors.messages[:base]).to be_empty
- end
+ it 'returns no error' do
+ expect(subject.errors.messages[:base]).to be_empty
end
end
- def invalidate_if_spam(is_spam, recaptcha_enabled)
- stub_application_setting(recaptcha_enabled: recaptcha_enabled)
+ context 'if recaptcha is not enabled and the model needs recaptcha' do
+ before do
+ stub_application_setting(recaptcha_enabled: false)
+ end
+
+ subject { invalidate_if_spam(needs_recaptcha: true) }
+ it 'has no errors' do
+ expect(subject.errors.messages[:base]).to match_array /has been discarded/
+ end
+ end
+
+ def invalidate_if_spam(is_spam: false, needs_recaptcha: false)
issue.tap do |i|
i.spam = is_spam
+ i.needs_recaptcha = needs_recaptcha
i.invalidate_if_spam
end
end
end
+ describe 'spam flags' do
+ before do
+ issue.spam = false
+ issue.needs_recaptcha = false
+ end
+
+ describe '#spam!' do
+ it 'adds only `spam` flag' do
+ issue.spam!
+
+ expect(issue.spam).to be_truthy
+ expect(issue.needs_recaptcha).to be_falsey
+ end
+ end
+
+ describe '#needs_recaptcha!' do
+ it 'adds `needs_recaptcha` flag' do
+ issue.needs_recaptcha!
+
+ expect(issue.spam).to be_falsey
+ expect(issue.needs_recaptcha).to be_truthy
+ end
+ end
+
+ describe '#clear_spam_flags!' do
+ it 'clears spam and recaptcha flags' do
+ issue.spam = true
+ issue.needs_recaptcha = true
+
+ issue.clear_spam_flags!
+
+ expect(issue).not_to be_spam
+ expect(issue.needs_recaptcha).to be_falsey
+ end
+ end
+ end
+
describe '#submittable_as_spam_by?' do
let(:admin) { build(:admin) }
let(:user) { build(:user) }
diff --git a/spec/models/cycle_analytics/group_level_spec.rb b/spec/models/cycle_analytics/group_level_spec.rb
index 5ba0f078df1..ac169ebc0cf 100644
--- a/spec/models/cycle_analytics/group_level_spec.rb
+++ b/spec/models/cycle_analytics/group_level_spec.rb
@@ -38,7 +38,7 @@ describe CycleAnalytics::GroupLevel do
end
it 'returns medians for each stage for a specific group' do
- expect(subject.summary.map { |summary| summary[:value] }).to contain_exactly(0.1, 1, 1)
+ expect(subject.summary.map { |summary| summary[:value] }).to contain_exactly('0.1', '1', '1')
end
end
end
diff --git a/spec/models/import_failure_spec.rb b/spec/models/import_failure_spec.rb
index d6574791a65..d286a4ad314 100644
--- a/spec/models/import_failure_spec.rb
+++ b/spec/models/import_failure_spec.rb
@@ -3,7 +3,28 @@
require 'spec_helper'
describe ImportFailure do
- describe "Associations" do
+ describe 'Scopes' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:correlation_id) { 'ABC' }
+ let_it_be(:hard_failure) { create(:import_failure, :hard_failure, project: project, correlation_id_value: correlation_id) }
+ let_it_be(:soft_failure) { create(:import_failure, :soft_failure, project: project, correlation_id_value: correlation_id) }
+ let_it_be(:unrelated_failure) { create(:import_failure, project: project) }
+
+ it 'returns hard failures given a correlation ID' do
+ expect(ImportFailure.hard_failures_by_correlation_id(correlation_id)).to eq([hard_failure])
+ end
+
+ it 'orders hard failures by newest first' do
+ older_failure = hard_failure.dup
+ Timecop.freeze(1.day.before(hard_failure.created_at)) do
+ older_failure.save!
+
+ expect(ImportFailure.hard_failures_by_correlation_id(correlation_id)).to eq([hard_failure, older_failure])
+ end
+ end
+ end
+
+ describe 'Associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:group) }
end
diff --git a/spec/models/jira_import_state_spec.rb b/spec/models/jira_import_state_spec.rb
index f75a17f71b2..4d91bf25b5e 100644
--- a/spec/models/jira_import_state_spec.rb
+++ b/spec/models/jira_import_state_spec.rb
@@ -130,8 +130,10 @@ describe JiraImportState do
context 'after transition to finished' do
let!(:jira_import) { build(:jira_import_state, :started, jid: 'some-other-jid', project: project)}
+ subject { jira_import.finish }
+
it 'triggers the import job' do
- jira_import.finish
+ subject
expect(jira_import.jid).to be_nil
end
@@ -139,11 +141,25 @@ describe JiraImportState do
it 'triggers the import job' do
jira_import.update!(status: :scheduled)
- jira_import.finish
+ subject
expect(jira_import.status).to eq('scheduled')
expect(jira_import.jid).to eq('some-other-jid')
end
+
+ it 'updates the record with imported issues counts' do
+ import_label = create(:label, project: project, title: 'jira-import')
+ create_list(:labeled_issue, 3, project: project, labels: [import_label])
+
+ expect(Gitlab::JiraImport).to receive(:get_import_label_id).and_return(import_label.id)
+ expect(Gitlab::JiraImport).to receive(:issue_failures).and_return(2)
+
+ subject
+
+ expect(jira_import.total_issue_count).to eq(5)
+ expect(jira_import.failed_to_import_count).to eq(2)
+ expect(jira_import.imported_issues_count).to eq(3)
+ end
end
end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 6d2ad3f0475..016af4f269b 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -566,6 +566,45 @@ describe MergeRequestDiff do
it 'returns affected file paths' do
expect(subject.modified_paths).to eq(%w{foo bar baz})
end
+
+ context "when fallback_on_overflow is true" do
+ let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') }
+ let(:diff) { merge_request.merge_request_diff }
+
+ # before do
+ # # Temporarily unstub diff.modified_paths in favor of original code
+ # #
+ # allow(diff).to receive(:modified_paths).and_call_original
+ # end
+
+ context "when the merge_request_diff is overflowed" do
+ before do
+ expect(diff).to receive(:overflow?).and_return(true)
+ end
+
+ it "returns file paths via project.repository#diff_stats" do
+ expect(diff.project.repository).to receive(:diff_stats).and_call_original
+
+ expect(
+ diff.modified_paths(fallback_on_overflow: true)
+ ).to eq(diff.modified_paths)
+ end
+ end
+
+ context "when the merge_request_diff is not overflowed" do
+ before do
+ expect(subject).to receive(:overflow?).and_return(false)
+ end
+
+ it "returns expect file paths withoout called #modified_paths_for_overflowed_mr" do
+ expect(subject.project.repository).not_to receive(:diff_stats)
+
+ expect(
+ subject.modified_paths(fallback_on_overflow: true)
+ ).to eq(subject.modified_paths)
+ end
+ end
+ end
end
describe '#opening_external_diff' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 52cd31ee65f..bf95be86eea 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1628,6 +1628,26 @@ describe MergeRequest do
end
end
+ describe '#has_terraform_reports?' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ context 'when head pipeline has terraform reports' do
+ it 'returns true' do
+ merge_request = create(:merge_request, :with_terraform_reports, source_project: project)
+
+ expect(merge_request.has_terraform_reports?).to be_truthy
+ end
+ end
+
+ context 'when head pipeline does not have terraform reports' do
+ it 'returns false' do
+ merge_request = create(:merge_request, source_project: project)
+
+ expect(merge_request.has_terraform_reports?).to be_falsey
+ end
+ end
+ end
+
describe '#calculate_reactive_cache' do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
@@ -3332,7 +3352,7 @@ describe MergeRequest do
describe 'check_state?' do
it 'indicates whether MR is still checking for mergeability' do
state_machine = described_class.state_machines[:merge_status]
- check_states = [:unchecked, :cannot_be_merged_recheck, :checking]
+ check_states = [:unchecked, :cannot_be_merged_recheck, :cannot_be_merged_rechecking, :checking]
check_states.each do |merge_status|
expect(state_machine.check_state?(merge_status)).to be true
diff --git a/spec/models/namespace/root_storage_size_spec.rb b/spec/models/namespace/root_storage_size_spec.rb
new file mode 100644
index 00000000000..60d33c975be
--- /dev/null
+++ b/spec/models/namespace/root_storage_size_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespace::RootStorageSize, type: :model do
+ let(:namespace) { create(:namespace) }
+ let(:current_size) { 50.megabytes }
+ let(:limit) { 100 }
+ let(:model) { described_class.new(namespace) }
+ let(:create_statistics) { create(:namespace_root_storage_statistics, namespace: namespace, storage_size: current_size)}
+
+ before do
+ create_statistics
+
+ stub_application_setting(namespace_storage_size_limit: limit)
+ end
+
+ describe '#above_size_limit?' do
+ subject { model.above_size_limit? }
+
+ context 'when limit is 0' do
+ let(:limit) { 0 }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when below limit' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when above limit' do
+ let(:current_size) { 101.megabytes }
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ describe '#usage_ratio' do
+ subject { model.usage_ratio }
+
+ it { is_expected.to eq(0.5) }
+
+ context 'when limit is 0' do
+ let(:limit) { 0 }
+
+ it { is_expected.to eq(0) }
+ end
+
+ context 'when there are no root_storage_statistics' do
+ let(:create_statistics) { nil }
+
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ describe '#current_size' do
+ subject { model.current_size }
+
+ it { is_expected.to eq(current_size) }
+ end
+
+ describe '#limit' do
+ subject { model.limit }
+
+ it { is_expected.to eq(limit.megabytes) }
+ end
+
+ describe '#show_alert?' do
+ subject { model.show_alert? }
+
+ it { is_expected.to eq(true) }
+
+ context 'when limit is 0' do
+ let(:limit) { 0 }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when is below threshold' do
+ let(:current_size) { 49.megabytes }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+end
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index 6a333898955..38fba5ea071 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -27,7 +27,7 @@ describe ProjectFeature do
end
describe '#feature_available?' do
- let(:features) { %w(issues wiki builds merge_requests snippets repository pages) }
+ let(:features) { %w(issues wiki builds merge_requests snippets repository pages metrics_dashboard) }
context 'when features are disabled' do
it "returns false" do
@@ -123,7 +123,7 @@ describe ProjectFeature do
end
context 'public features' do
- features = %w(issues wiki builds merge_requests snippets repository)
+ features = %w(issues wiki builds merge_requests snippets repository metrics_dashboard)
features.each do |feature|
it "does not allow public access level for #{feature}" do
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
index 720dc4f435f..cb34d898a6e 100644
--- a/spec/models/project_import_state_spec.rb
+++ b/spec/models/project_import_state_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
describe ProjectImportState, type: :model do
- subject { create(:import_state) }
+ let_it_be(:correlation_id) { 'cid' }
+ let_it_be(:import_state, refind: true) { create(:import_state, correlation_id_value: correlation_id) }
+
+ subject { import_state }
describe 'associations' do
it { is_expected.to belong_to(:project) }
@@ -33,12 +36,24 @@ describe ProjectImportState, type: :model do
end
it 'records job and correlation IDs', :sidekiq_might_not_need_inline do
- allow(Labkit::Correlation::CorrelationId).to receive(:current_or_new_id).and_return('abc')
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_or_new_id).and_return(correlation_id)
import_state.schedule
expect(import_state.jid).to be_an_instance_of(String)
- expect(import_state.correlation_id).to eq('abc')
+ expect(import_state.correlation_id).to eq(correlation_id)
+ end
+ end
+
+ describe '#relation_hard_failures' do
+ let_it_be(:failures) { create_list(:import_failure, 2, :hard_failure, project: import_state.project, correlation_id_value: correlation_id) }
+
+ it 'returns hard relation failures related to this import' do
+ expect(subject.relation_hard_failures(limit: 100)).to match_array(failures)
+ end
+
+ it 'limits returned collection to given maximum' do
+ expect(subject.relation_hard_failures(limit: 1).size).to eq(1)
end
end
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index 32e6b5afce5..cffd5232a7e 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -69,11 +69,23 @@ describe JiraService do
end
describe '.reference_pattern' do
- it_behaves_like 'allows project key on reference pattern'
+ using RSpec::Parameterized::TableSyntax
- it 'does not allow # on the code' do
- expect(described_class.reference_pattern.match('#123')).to be_nil
- expect(described_class.reference_pattern.match('1#23#12')).to be_nil
+ where(:key, :result) do
+ '#123' | ''
+ '1#23#12' | ''
+ 'JIRA-1234A' | 'JIRA-1234'
+ 'JIRA-1234-some_tag' | 'JIRA-1234'
+ 'JIRA-1234_some_tag' | 'JIRA-1234'
+ 'EXT_EXT-1234' | 'EXT_EXT-1234'
+ 'EXT3_EXT-1234' | 'EXT3_EXT-1234'
+ '3EXT_EXT-1234' | ''
+ end
+
+ with_them do
+ specify do
+ expect(described_class.reference_pattern.match(key).to_s).to eq(result)
+ end
end
end
diff --git a/spec/models/project_services/mattermost_slash_commands_service_spec.rb b/spec/models/project_services/mattermost_slash_commands_service_spec.rb
index 87e482059f2..836181929e3 100644
--- a/spec/models/project_services/mattermost_slash_commands_service_spec.rb
+++ b/spec/models/project_services/mattermost_slash_commands_service_spec.rb
@@ -121,5 +121,12 @@ describe MattermostSlashCommandsService do
end
end
end
+
+ describe '#chat_responder' do
+ it 'returns the responder to use for Mattermost' do
+ expect(described_class.new.chat_responder)
+ .to eq(Gitlab::Chat::Responder::Mattermost)
+ end
+ end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 6d4283ab666..4e75ef4fc87 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -2645,18 +2645,6 @@ describe Project do
end
end
- describe '#daily_statistics_enabled?' do
- it { is_expected.to be_daily_statistics_enabled }
-
- context 'when :project_daily_statistics is disabled for the project' do
- before do
- stub_feature_flags(project_daily_statistics: { thing: subject, enabled: false })
- end
-
- it { is_expected.not_to be_daily_statistics_enabled }
- end
- end
-
describe '#change_head' do
let(:project) { create(:project, :repository) }
diff --git a/spec/models/resource_milestone_event_spec.rb b/spec/models/resource_milestone_event_spec.rb
index 1b0181e3fd2..bf8672f95c9 100644
--- a/spec/models/resource_milestone_event_spec.rb
+++ b/spec/models/resource_milestone_event_spec.rb
@@ -52,4 +52,30 @@ describe ResourceMilestoneEvent, type: :model do
end
end
end
+
+ shared_examples 'a milestone action queryable resource event' do |expected_results_for_actions|
+ [Issue, MergeRequest].each do |klass|
+ expected_results_for_actions.each do |action, expected_result|
+ it "is #{expected_result} for action #{action} on #{klass.name.underscore}" do
+ model = create(klass.name.underscore)
+ key = model.class.name.underscore
+ event = build(described_class.name.underscore.to_sym, key => model, action: action)
+
+ expect(event.send(query_method)).to eq(expected_result)
+ end
+ end
+ end
+ end
+
+ describe '#added?' do
+ it_behaves_like 'a milestone action queryable resource event', { add: true, remove: false } do
+ let(:query_method) { :add? }
+ end
+ end
+
+ describe '#removed?' do
+ it_behaves_like 'a milestone action queryable resource event', { add: false, remove: true } do
+ let(:query_method) { :remove? }
+ end
+ end
end
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index 1d677e7ece5..3cd15e23ee2 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -5,24 +5,35 @@ require 'spec_helper'
describe Terraform::State do
subject { create(:terraform_state, :with_file) }
+ let(:terraform_state_file) { fixture_file('terraform/terraform.tfstate') }
+
it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:locked_by_user).class_name('User') }
+
it { is_expected.to validate_presence_of(:project_id) }
before do
stub_terraform_state_object_storage(Terraform::StateUploader)
end
- describe '#file_store' do
- context 'when no value is set' do
- it 'returns the default store of the uploader' do
- [ObjectStorage::Store::LOCAL, ObjectStorage::Store::REMOTE].each do |store|
- expect(Terraform::StateUploader).to receive(:default_store).and_return(store)
- expect(described_class.new.file_store).to eq(store)
- end
+ describe '#file' do
+ context 'when a file exists' do
+ it 'does not use the default file' do
+ expect(subject.file.read).to eq(terraform_state_file)
end
end
+ context 'when no file exists' do
+ subject { create(:terraform_state) }
+
+ it 'creates a default file' do
+ expect(subject.file.read).to eq('{"version":1}')
+ end
+ end
+ end
+
+ describe '#file_store' do
context 'when a value is set' do
it 'returns the value' do
[ObjectStorage::Store::LOCAL, ObjectStorage::Store::REMOTE].each do |store|
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 5a3e16baa87..8597397c3c6 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -4357,18 +4357,19 @@ describe User, :do_not_mock_admin_mode do
describe 'internal methods' do
let_it_be(:user) { create(:user) }
- let!(:ghost) { described_class.ghost }
- let!(:alert_bot) { described_class.alert_bot }
- let!(:non_internal) { [user] }
- let!(:internal) { [ghost, alert_bot] }
+ let_it_be(:ghost) { described_class.ghost }
+ let_it_be(:alert_bot) { described_class.alert_bot }
+ let_it_be(:project_bot) { create(:user, :project_bot) }
+ let_it_be(:non_internal) { [user, project_bot] }
+ let_it_be(:internal) { [ghost, alert_bot] }
it 'returns internal users' do
- expect(described_class.internal).to eq(internal)
+ expect(described_class.internal).to match_array(internal)
expect(internal.all?(&:internal?)).to eq(true)
end
it 'returns non internal users' do
- expect(described_class.non_internal).to eq(non_internal)
+ expect(described_class.non_internal).to match_array(non_internal)
expect(non_internal.all?(&:internal?)).to eq(false)
end
@@ -4420,9 +4421,12 @@ describe User, :do_not_mock_admin_mode do
it 'returns corresponding users' do
human = create(:user)
bot = create(:user, :bot)
+ project_bot = create(:user, :project_bot)
expect(described_class.humans).to match_array([human])
- expect(described_class.bots).to match_array([bot])
+ expect(described_class.bots).to match_array([bot, project_bot])
+ expect(described_class.bots_without_project_bot).to match_array([bot])
+ expect(described_class.with_project_bots).to match_array([human, project_bot])
end
end
@@ -4655,4 +4659,30 @@ describe User, :do_not_mock_admin_mode do
it { is_expected.to be :locked }
end
end
+
+ describe '#password_required?' do
+ let_it_be(:user) { create(:user) }
+
+ shared_examples 'does not require password to be present' do
+ it { expect(user).not_to validate_presence_of(:password) }
+
+ it { expect(user).not_to validate_presence_of(:password_confirmation) }
+ end
+
+ context 'when user is an internal user' do
+ before do
+ user.update(user_type: 'alert_bot')
+ end
+
+ it_behaves_like 'does not require password to be present'
+ end
+
+ context 'when user is a project bot user' do
+ before do
+ user.update(user_type: 'project_bot')
+ end
+
+ it_behaves_like 'does not require password to be present'
+ end
+ end
end
diff --git a/spec/models/user_type_enums_spec.rb b/spec/models/user_type_enums_spec.rb
new file mode 100644
index 00000000000..4f56e6ea96e
--- /dev/null
+++ b/spec/models/user_type_enums_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe UserTypeEnums do
+ it '.types' do
+ expect(described_class.types.keys).to include('alert_bot', 'project_bot', 'human', 'ghost')
+ end
+
+ it '.bots' do
+ expect(described_class.bots.keys).to include('alert_bot', 'project_bot')
+ end
+end
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 2d261241486..5e77b64a408 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
describe GlobalPolicy do
include TermsHelper
+ let_it_be(:project_bot) { create(:user, :project_bot) }
let(:current_user) { create(:user) }
let(:user) { create(:user) }
@@ -120,6 +121,12 @@ describe GlobalPolicy do
it { is_expected.to be_allowed(:access_api) }
end
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.to be_allowed(:access_api) }
+ end
+
context 'when terms are enforced' do
before do
enforce_terms
@@ -203,6 +210,12 @@ describe GlobalPolicy do
it { is_expected.not_to be_allowed(:receive_notifications) }
end
+
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.not_to be_allowed(:receive_notifications) }
+ end
end
describe 'git access' do
@@ -265,6 +278,12 @@ describe GlobalPolicy do
it { is_expected.to be_allowed(:access_git) }
end
end
+
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.to be_allowed(:access_git) }
+ end
end
describe 'read instance metadata' do
@@ -361,6 +380,12 @@ describe GlobalPolicy do
it { is_expected.not_to be_allowed(:use_slash_commands) }
end
+
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.to be_allowed(:use_slash_commands) }
+ end
end
describe 'create_snippet' do
@@ -380,4 +405,12 @@ describe GlobalPolicy do
it { is_expected.not_to be_allowed(:create_snippet) }
end
end
+
+ describe 'log in' do
+ context 'project bot' do
+ let(:current_user) { project_bot }
+
+ it { is_expected.not_to be_allowed(:log_in) }
+ end
+ end
end
diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb
index 5948c3d719f..499c334d491 100644
--- a/spec/requests/api/deploy_tokens_spec.rb
+++ b/spec/requests/api/deploy_tokens_spec.rb
@@ -205,10 +205,11 @@ describe API::DeployTokens do
context 'deploy token creation' do
shared_examples 'creating a deploy token' do |entity, unauthenticated_response|
+ let(:expires_time) { 1.year.from_now }
let(:params) do
{
name: 'Foo',
- expires_at: 1.year.from_now,
+ expires_at: expires_time,
scopes: [
'read_repository'
],
@@ -240,6 +241,10 @@ describe API::DeployTokens do
expect(response).to have_gitlab_http_status(:created)
expect(response).to match_response_schema('public_api/v4/deploy_token')
+ expect(json_response['name']).to eq('Foo')
+ expect(json_response['scopes']).to eq(['read_repository'])
+ expect(json_response['username']).to eq('Bar')
+ expect(json_response['expires_at'].to_time.to_i).to eq(expires_time.to_i)
end
context 'with no optional params given' do
diff --git a/spec/requests/api/graphql/boards/board_lists_query_spec.rb b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
new file mode 100644
index 00000000000..711c20784b0
--- /dev/null
+++ b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'get board lists' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:unauth_user) { create(:user) }
+ let_it_be(:project) { create(:project, creator_id: user.id, namespace: user.namespace ) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project_label) { create(:label, project: project, name: 'Development') }
+ let_it_be(:project_label2) { create(:label, project: project, name: 'Testing') }
+ let_it_be(:group_label) { create(:group_label, group: group, name: 'Development') }
+ let_it_be(:group_label2) { create(:group_label, group: group, name: 'Testing') }
+
+ let(:params) { '' }
+ let(:board) { }
+ let(:board_parent_type) { board_parent.class.to_s.downcase }
+ let(:board_data) { graphql_data[board_parent_type]['boards']['edges'].first['node'] }
+ let(:lists_data) { board_data['lists']['edges'] }
+ let(:start_cursor) { board_data['lists']['pageInfo']['startCursor'] }
+ let(:end_cursor) { board_data['lists']['pageInfo']['endCursor'] }
+
+ def query(list_params = params)
+ graphql_query_for(
+ board_parent_type,
+ { 'fullPath' => board_parent.full_path },
+ <<~BOARDS
+ boards(first: 1) {
+ edges {
+ node {
+ #{field_with_params('lists', list_params)} {
+ pageInfo {
+ startCursor
+ endCursor
+ }
+ edges {
+ node {
+ #{all_graphql_fields_for('board_lists'.classify)}
+ }
+ }
+ }
+ }
+ }
+ }
+ BOARDS
+ )
+ end
+
+ shared_examples 'group and project board lists query' do
+ let!(:board) { create(:board, resource_parent: board_parent) }
+
+ context 'when the user does not have access to the board' do
+ it 'returns nil' do
+ post_graphql(query, current_user: unauth_user)
+
+ expect(graphql_data[board_parent_type]).to be_nil
+ end
+ end
+
+ context 'when user can read the board' do
+ before do
+ board_parent.add_reporter(user)
+ end
+
+ describe 'sorting and pagination' do
+ context 'when using default sorting' do
+ let!(:label_list) { create(:list, board: board, label: label, position: 10) }
+ let!(:label_list2) { create(:list, board: board, label: label2, position: 2) }
+ let!(:backlog_list) { create(:backlog_list, board: board) }
+ let(:closed_list) { board.lists.find_by(list_type: :closed) }
+
+ before do
+ post_graphql(query, current_user: user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ context 'when ascending' do
+ let(:lists) { [backlog_list, label_list2, label_list, closed_list] }
+ let(:expected_list_gids) do
+ lists.map { |list| list.to_global_id.to_s }
+ end
+
+ it 'sorts lists' do
+ expect(grab_ids).to eq expected_list_gids
+ end
+
+ context 'when paginating' do
+ let(:params) { 'first: 2' }
+
+ it 'sorts boards' do
+ expect(grab_ids).to eq expected_list_gids.first(2)
+
+ cursored_query = query("after: \"#{end_cursor}\"")
+ post_graphql(cursored_query, current_user: user)
+
+ response_data = grab_list_data(response.body)
+
+ expect(grab_ids(response_data)).to eq expected_list_gids.drop(2).first(2)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe 'for a project' do
+ let(:board_parent) { project }
+ let(:label) { project_label }
+ let(:label2) { project_label2 }
+
+ it_behaves_like 'group and project board lists query'
+ end
+
+ describe 'for a group' do
+ let(:board_parent) { group }
+ let(:label) { group_label }
+ let(:label2) { group_label2 }
+
+ before do
+ allow(board_parent).to receive(:multiple_issue_boards_available?).and_return(false)
+ end
+
+ it_behaves_like 'group and project board lists query'
+ end
+
+ def grab_ids(data = lists_data)
+ data.map { |list| list.dig('node', 'id') }
+ end
+
+ def grab_list_data(response_body)
+ JSON.parse(response_body)['data'][board_parent_type]['boards']['edges'][0]['node']['lists']['edges']
+ end
+end
diff --git a/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb b/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
new file mode 100644
index 00000000000..f5a5f0a9ec2
--- /dev/null
+++ b/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Getting Metrics Dashboard Annotations' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:path) { 'config/prometheus/common_metrics.yml' }
+ let_it_be(:from) { "2020-04-01T03:29:25Z" }
+ let_it_be(:to) { Time.zone.now.advance(minutes: 5) }
+ let_it_be(:annotation) { create(:metrics_dashboard_annotation, environment: environment, dashboard_path: path) }
+ let_it_be(:annotation_for_different_env) { create(:metrics_dashboard_annotation, dashboard_path: path) }
+ let_it_be(:annotation_for_different_dashboard) { create(:metrics_dashboard_annotation, environment: environment, dashboard_path: ".gitlab/dashboards/test.yml") }
+ let_it_be(:to_old_annotation) do
+ create(:metrics_dashboard_annotation, environment: environment, starting_at: Time.parse(from).advance(minutes: -5), dashboard_path: path)
+ end
+ let_it_be(:to_new_annotation) do
+ create(:metrics_dashboard_annotation, environment: environment, starting_at: to.advance(minutes: 5), dashboard_path: path)
+ end
+
+ let(:fields) do
+ <<~QUERY
+ #{all_graphql_fields_for('MetricsDashboardAnnotation'.classify)}
+ QUERY
+ end
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath:"#{project.full_path}") {
+ environments(name: "#{environment.name}") {
+ nodes {
+ metricsDashboard(path: "#{path}"){
+ annotations(#{args}){
+ nodes {
+ #{fields}
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ context 'feature flag metrics_dashboard_annotations' do
+ let(:args) { "from: \"#{from}\", to: \"#{to}\"" }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ context 'is off' do
+ before do
+ stub_feature_flags(metrics_dashboard_annotations: false)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'returns empty nodes array' do
+ annotations = graphql_data.dig('project', 'environments', 'nodes')[0].dig('metricsDashboard', 'annotations', 'nodes')
+
+ expect(annotations).to be_empty
+ end
+ end
+
+ context 'is on' do
+ before do
+ stub_feature_flags(metrics_dashboard_annotations: true)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns annotations' do
+ annotations = graphql_data.dig('project', 'environments', 'nodes')[0].dig('metricsDashboard', 'annotations', 'nodes')
+
+ expect(annotations).to match_array [{
+ "description" => annotation.description,
+ "id" => annotation.to_global_id.to_s,
+ "panelId" => annotation.panel_xid,
+ "startingAt" => annotation.starting_at.to_s,
+ "endingAt" => nil
+ }]
+ end
+
+ context 'arguments' do
+ context 'from is missing' do
+ let(:args) { "to: \"#{from}\"" }
+
+ it 'returns error' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_errors[0]).to include("message" => "Field 'annotations' is missing required arguments: from")
+ end
+ end
+
+ context 'to is missing' do
+ let(:args) { "from: \"#{from}\"" }
+
+ it_behaves_like 'a working graphql query'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
index feca89558e3..c7dcb21ad83 100644
--- a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
+++ b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
@@ -99,12 +99,6 @@ describe 'Starting a Jira Import' do
it_behaves_like 'a mutation that returns errors in the response', errors: ['Jira integration not configured.']
end
- context 'when issues feature are disabled' do
- let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
-
- it_behaves_like 'a mutation that returns errors in the response', errors: ['Cannot import because issues are not available in this project.']
- end
-
context 'when when project has Jira service' do
let!(:service) { create(:jira_service, project: project) }
@@ -112,13 +106,19 @@ describe 'Starting a Jira Import' do
project.reload
end
+ context 'when issues feature are disabled' do
+ let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
+
+ it_behaves_like 'a mutation that returns errors in the response', errors: ['Cannot import because issues are not available in this project.']
+ end
+
context 'when jira_project_key not provided' do
let(:jira_project_key) { '' }
it_behaves_like 'a mutation that returns errors in the response', errors: ['Unable to find Jira project to import data from.']
end
- context 'when jira import successfully scheduled' do
+ context 'when Jira import successfully scheduled' do
it 'schedules a Jira import' do
post_graphql_mutation(mutation, current_user: current_user)
diff --git a/spec/requests/api/graphql/project/jira_import_spec.rb b/spec/requests/api/graphql/project/jira_import_spec.rb
index 43e1bb13342..2e631fb56ba 100644
--- a/spec/requests/api/graphql/project/jira_import_spec.rb
+++ b/spec/requests/api/graphql/project/jira_import_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'query jira import data' do
+describe 'query Jira import data' do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index dc75fdab639..93c2233e021 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -766,29 +766,98 @@ describe API::Internal::Base do
end
context 'project does not exist' do
- it 'returns a 200 response with status: false' do
- project.destroy
+ context 'git pull' do
+ it 'returns a 200 response with status: false' do
+ project.destroy
- pull(key, project)
+ pull(key, project)
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response["status"]).to be_falsey
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response["status"]).to be_falsey
+ end
+
+ it 'returns a 200 response when using a project path that does not exist' do
+ post(
+ api("/internal/allowed"),
+ params: {
+ key_id: key.id,
+ project: 'project/does-not-exist.git',
+ action: 'git-upload-pack',
+ secret_token: secret_token,
+ protocol: 'ssh'
+ }
+ )
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response["status"]).to be_falsey
+ end
end
- it 'returns a 200 response when using a project path that does not exist' do
- post(
- api("/internal/allowed"),
- params: {
- key_id: key.id,
- project: 'project/does-not-exist.git',
- action: 'git-upload-pack',
- secret_token: secret_token,
- protocol: 'ssh'
- }
- )
+ context 'git push' do
+ before do
+ stub_const('Gitlab::QueryLimiting::Transaction::THRESHOLD', 120)
+ end
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response["status"]).to be_falsey
+ subject { push_with_path(key, full_path: path, changes: '_any') }
+
+ context 'from a user/group namespace' do
+ let!(:path) { "#{user.namespace.path}/notexist.git" }
+
+ it 'creates the project' do
+ expect do
+ subject
+ end.to change { Project.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['status']).to be_truthy
+ end
+ end
+
+ context 'from the personal snippet path' do
+ let!(:path) { 'snippets/notexist.git' }
+
+ it 'does not create snippet' do
+ expect do
+ subject
+ end.not_to change { Snippet.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'from a project path' do
+ context 'from an non existent project path' do
+ let!(:path) { "#{user.namespace.path}/notexist/snippets/notexist.git" }
+
+ it 'does not create project' do
+ expect do
+ subject
+ end.not_to change { Project.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'does not create snippet' do
+ expect do
+ subject
+ end.not_to change { Snippet.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'from an existent project path' do
+ let!(:path) { "#{project.full_path}/notexist/snippets/notexist.git" }
+
+ it 'does not create snippet' do
+ expect do
+ subject
+ end.not_to change { Snippet.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
end
@@ -1062,18 +1131,27 @@ describe API::Internal::Base do
end
def push(key, container, protocol = 'ssh', env: nil, changes: nil)
+ push_with_path(key,
+ full_path: full_path_for(container),
+ gl_repository: gl_repository_for(container),
+ protocol: protocol,
+ env: env,
+ changes: changes)
+ end
+
+ def push_with_path(key, full_path:, gl_repository: nil, protocol: 'ssh', env: nil, changes: nil)
changes ||= 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
params = {
changes: changes,
key_id: key.id,
- project: full_path_for(container),
- gl_repository: gl_repository_for(container),
+ project: full_path,
action: 'git-receive-pack',
secret_token: secret_token,
protocol: protocol,
env: env
}
+ params[:gl_repository] = gl_repository if gl_repository
post(
api("/internal/allowed"),
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index 1444f43003f..2e1e5d3204e 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -403,7 +403,7 @@ describe API::Issues do
end
before do
- expect_next_instance_of(Spam::SpamCheckService) do |spam_service|
+ expect_next_instance_of(Spam::SpamActionService) do |spam_service|
expect(spam_service).to receive_messages(check_for_spam?: true)
end
expect_next_instance_of(Spam::AkismetService) do |akismet_service|
diff --git a/spec/requests/api/issues/put_projects_issues_spec.rb b/spec/requests/api/issues/put_projects_issues_spec.rb
index ffc5e2b1db8..2ab8b9d7877 100644
--- a/spec/requests/api/issues/put_projects_issues_spec.rb
+++ b/spec/requests/api/issues/put_projects_issues_spec.rb
@@ -182,6 +182,8 @@ describe API::Issues do
end
describe 'PUT /projects/:id/issues/:issue_iid with spam filtering' do
+ include_context 'includes Spam constants'
+
def update_issue
put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: params
end
@@ -195,11 +197,12 @@ describe API::Issues do
end
before do
- expect_next_instance_of(Spam::SpamCheckService) do |spam_service|
+ expect_next_instance_of(Spam::SpamActionService) do |spam_service|
expect(spam_service).to receive_messages(check_for_spam?: true)
end
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
+
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(DISALLOW)
end
end
diff --git a/spec/requests/api/markdown_spec.rb b/spec/requests/api/markdown_spec.rb
index 9b787e76740..09342b06744 100644
--- a/spec/requests/api/markdown_spec.rb
+++ b/spec/requests/api/markdown_spec.rb
@@ -3,8 +3,6 @@
require "spec_helper"
describe API::Markdown do
- RSpec::Matchers.define_negated_matcher :exclude, :include
-
describe "POST /markdown" do
let(:user) {} # No-op. It gets overwritten in the contexts below.
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 40d6f171116..af2ce7f7aef 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -892,6 +892,7 @@ describe API::MergeRequests do
expect(json_response['merge_error']).to eq(merge_request.merge_error)
expect(json_response['user']['can_merge']).to be_truthy
expect(json_response).not_to include('rebase_in_progress')
+ expect(json_response['first_contribution']).to be_falsy
expect(json_response['has_conflicts']).to be_falsy
expect(json_response['blocking_discussions_resolved']).to be_truthy
expect(json_response['references']['short']).to eq("!#{merge_request.iid}")
@@ -915,6 +916,21 @@ describe API::MergeRequests do
expect(json_response).to include('rebase_in_progress')
end
+ context 'when author is not a member without any merged merge requests' do
+ let(:non_member) { create(:user) }
+
+ before do
+ merge_request.update(author: non_member)
+ end
+
+ it 'exposes first_contribution as true' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['first_contribution']).to be_truthy
+ end
+ end
+
context 'merge_request_metrics' do
let(:pipeline) { create(:ci_empty_pipeline) }
diff --git a/spec/requests/api/project_milestones_spec.rb b/spec/requests/api/project_milestones_spec.rb
index a40878fc807..c5911d51706 100644
--- a/spec/requests/api/project_milestones_spec.rb
+++ b/spec/requests/api/project_milestones_spec.rb
@@ -24,13 +24,13 @@ describe API::ProjectMilestones do
project.add_reporter(reporter)
end
- it 'returns 404 response when the project does not exists' do
+ it 'returns 404 response when the project does not exist' do
delete api("/projects/0/milestones/#{milestone.id}", user)
expect(response).to have_gitlab_http_status(:not_found)
end
- it 'returns 404 response when the milestone does not exists' do
+ it 'returns 404 response when the milestone does not exist' do
delete api("/projects/#{project.id}/milestones/0", user)
expect(response).to have_gitlab_http_status(:not_found)
@@ -44,7 +44,7 @@ describe API::ProjectMilestones do
end
describe 'PUT /projects/:id/milestones/:milestone_id to test observer on close' do
- it 'creates an activity event when an milestone is closed' do
+ it 'creates an activity event when a milestone is closed' do
expect(Event).to receive(:create!)
put api("/projects/#{project.id}/milestones/#{milestone.id}", user),
@@ -91,7 +91,7 @@ describe API::ProjectMilestones do
end
end
- context 'when no such resources' do
+ context 'when no such resource' do
before do
group.add_developer(user)
end
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index 89ade15c1f6..d439107beb1 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -94,7 +94,7 @@ describe API::ProjectSnippets do
expect(json_response['title']).to eq(snippet.title)
expect(json_response['description']).to eq(snippet.description)
- expect(json_response['file_name']).to eq(snippet.file_name)
+ expect(json_response['file_name']).to eq(snippet.file_name_on_repo)
expect(json_response['ssh_url_to_repo']).to eq(snippet.ssh_url_to_repo)
expect(json_response['http_url_to_repo']).to eq(snippet.http_url_to_repo)
end
@@ -460,14 +460,13 @@ describe API::ProjectSnippets do
end
describe 'GET /projects/:project_id/snippets/:id/raw' do
- let(:snippet) { create(:project_snippet, author: admin, project: project) }
+ let_it_be(:snippet) { create(:project_snippet, :repository, author: admin, project: project) }
it 'returns raw text' do
get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain'
- expect(response.body).to eq(snippet.content)
end
it 'returns 404 for invalid snippet id' do
@@ -482,5 +481,11 @@ describe API::ProjectSnippets do
let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/123/raw", admin) }
end
end
+
+ it_behaves_like 'snippet blob content' do
+ let_it_be(:snippet_with_empty_repo) { create(:project_snippet, :empty_repo, author: admin, project: project) }
+
+ subject { get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", snippet.author) }
+ end
end
end
diff --git a/spec/requests/api/project_statistics_spec.rb b/spec/requests/api/project_statistics_spec.rb
index 5d0b506cc92..1f48c081043 100644
--- a/spec/requests/api/project_statistics_spec.rb
+++ b/spec/requests/api/project_statistics_spec.rb
@@ -50,13 +50,5 @@ describe API::ProjectStatistics do
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden')
end
-
- it 'responds with 404 when daily_statistics_enabled? is false' do
- stub_feature_flags(project_daily_statistics: { thing: public_project, enabled: false })
-
- get api("/projects/#{public_project.id}/statistics", maintainer)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 190afb9cda5..853155cea7a 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -2414,7 +2414,8 @@ describe API::Projects do
project_param = {
container_expiration_policy_attributes: {
cadence: '1month',
- keep_n: 1
+ keep_n: 1,
+ name_regex_keep: 'foo.*'
}
}
@@ -2424,6 +2425,7 @@ describe API::Projects do
expect(json_response['container_expiration_policy']['cadence']).to eq('1month')
expect(json_response['container_expiration_policy']['keep_n']).to eq(1)
+ expect(json_response['container_expiration_policy']['name_regex_keep']).to eq('foo.*')
end
end
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index 164be8f0da6..67c258260bf 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -3,35 +3,34 @@
require 'spec_helper'
describe API::Runners do
- let(:admin) { create(:user, :admin) }
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let(:group_guest) { create(:user) }
- let(:group_reporter) { create(:user) }
- let(:group_developer) { create(:user) }
- let(:group_maintainer) { create(:user) }
-
- let(:project) { create(:project, creator_id: user.id) }
- let(:project2) { create(:project, creator_id: user.id) }
-
- let(:group) { create(:group).tap { |group| group.add_owner(user) } }
- let(:subgroup) { create(:group, parent: group) }
-
- let!(:shared_runner) { create(:ci_runner, :instance, description: 'Shared runner') }
- let!(:project_runner) { create(:ci_runner, :project, description: 'Project runner', projects: [project]) }
- let!(:two_projects_runner) { create(:ci_runner, :project, description: 'Two projects runner', projects: [project, project2]) }
- let!(:group_runner_a) { create(:ci_runner, :group, description: 'Group runner A', groups: [group]) }
- let!(:group_runner_b) { create(:ci_runner, :group, description: 'Group runner B', groups: [subgroup]) }
-
- before do
- # Set project access for users
- create(:group_member, :guest, user: group_guest, group: group)
- create(:group_member, :reporter, user: group_reporter, group: group)
- create(:group_member, :developer, user: group_developer, group: group)
- create(:group_member, :maintainer, user: group_maintainer, group: group)
- create(:project_member, :maintainer, user: user, project: project)
- create(:project_member, :maintainer, user: user, project: project2)
- create(:project_member, :reporter, user: user2, project: project)
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:group_guest) { create(:user) }
+ let_it_be(:group_reporter) { create(:user) }
+ let_it_be(:group_developer) { create(:user) }
+ let_it_be(:group_maintainer) { create(:user) }
+
+ let_it_be(:project) { create(:project, creator_id: user.id) }
+ let_it_be(:project2) { create(:project, creator_id: user.id) }
+
+ let_it_be(:group) { create(:group).tap { |group| group.add_owner(user) } }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+
+ let_it_be(:shared_runner, reload: true) { create(:ci_runner, :instance, description: 'Shared runner') }
+ let_it_be(:project_runner, reload: true) { create(:ci_runner, :project, description: 'Project runner', projects: [project]) }
+ let_it_be(:two_projects_runner) { create(:ci_runner, :project, description: 'Two projects runner', projects: [project, project2]) }
+ let_it_be(:group_runner_a) { create(:ci_runner, :group, description: 'Group runner A', groups: [group]) }
+ let_it_be(:group_runner_b) { create(:ci_runner, :group, description: 'Group runner B', groups: [subgroup]) }
+
+ before_all do
+ group.add_guest(group_guest)
+ group.add_reporter(group_reporter)
+ group.add_developer(group_developer)
+ group.add_maintainer(group_maintainer)
+ project.add_maintainer(user)
+ project2.add_maintainer(user)
+ project.add_reporter(user2)
end
describe 'GET /runners' do
@@ -603,10 +602,10 @@ describe API::Runners do
describe 'GET /runners/:id/jobs' do
let_it_be(:job_1) { create(:ci_build) }
- let!(:job_2) { create(:ci_build, :running, runner: shared_runner, project: project) }
- let!(:job_3) { create(:ci_build, :failed, runner: shared_runner, project: project) }
- let!(:job_4) { create(:ci_build, :running, runner: project_runner, project: project) }
- let!(:job_5) { create(:ci_build, :failed, runner: project_runner, project: project) }
+ let_it_be(:job_2) { create(:ci_build, :running, runner: shared_runner, project: project) }
+ let_it_be(:job_3) { create(:ci_build, :failed, runner: shared_runner, project: project) }
+ let_it_be(:job_4) { create(:ci_build, :running, runner: project_runner, project: project) }
+ let_it_be(:job_5) { create(:ci_build, :failed, runner: project_runner, project: project) }
context 'admin user' do
context 'when runner exists' do
@@ -952,7 +951,7 @@ describe API::Runners do
describe 'POST /projects/:id/runners' do
context 'authorized user' do
- let(:project_runner2) { create(:ci_runner, :project, projects: [project2]) }
+ let_it_be(:project_runner2) { create(:ci_runner, :project, projects: [project2]) }
it 'enables specific runner' do
expect do
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 3e30dc537e4..b779de85ce3 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -90,7 +90,7 @@ describe API::Snippets do
describe 'GET /snippets/:id/raw' do
let_it_be(:author) { create(:user) }
- let_it_be(:snippet) { create(:personal_snippet, :private, author: author) }
+ let_it_be(:snippet) { create(:personal_snippet, :repository, :private, author: author) }
it 'requires authentication' do
get api("/snippets/#{snippet.id}", nil)
@@ -103,7 +103,6 @@ describe API::Snippets do
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain'
- expect(response.body).to eq(snippet.content)
end
it 'forces attachment content disposition' do
@@ -134,6 +133,12 @@ describe API::Snippets do
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it_behaves_like 'snippet blob content' do
+ let_it_be(:snippet_with_empty_repo) { create(:personal_snippet, :empty_repo, :private, author: author) }
+
+ subject { get api("/snippets/#{snippet.id}/raw", snippet.author) }
+ end
end
describe 'GET /snippets/:id' do
@@ -155,7 +160,7 @@ describe API::Snippets do
expect(json_response['title']).to eq(private_snippet.title)
expect(json_response['description']).to eq(private_snippet.description)
- expect(json_response['file_name']).to eq(private_snippet.file_name)
+ expect(json_response['file_name']).to eq(private_snippet.file_name_on_repo)
expect(json_response['visibility']).to eq(private_snippet.visibility)
expect(json_response['ssh_url_to_repo']).to eq(private_snippet.ssh_url_to_repo)
expect(json_response['http_url_to_repo']).to eq(private_snippet.http_url_to_repo)
@@ -424,6 +429,32 @@ describe API::Snippets do
end
end
+ context "when admin" do
+ let(:admin) { create(:admin) }
+ let(:token) { create(:personal_access_token, user: admin, scopes: [:sudo]) }
+
+ subject do
+ put api("/snippets/#{snippet.id}", admin, personal_access_token: token), params: { visibility: 'private', sudo: user.id }
+ end
+
+ context 'when sudo is defined' do
+ it 'returns 200 and updates snippet visibility' do
+ expect(snippet.visibility).not_to eq('private')
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response["visibility"]).to eq 'private'
+ end
+
+ it 'does not commit data' do
+ expect_any_instance_of(SnippetRepository).not_to receive(:multi_files_action)
+
+ subject
+ end
+ end
+ end
+
def update_snippet(snippet_id: snippet.id, params: {}, requester: user)
put api("/snippets/#{snippet_id}", requester), params: params
end
diff --git a/spec/requests/api/terraform/state_spec.rb b/spec/requests/api/terraform/state_spec.rb
index b0a963db684..88c277f4e08 100644
--- a/spec/requests/api/terraform/state_spec.rb
+++ b/spec/requests/api/terraform/state_spec.rb
@@ -3,95 +3,231 @@
require 'spec_helper'
describe API::Terraform::State do
- def auth_header_for(user)
- auth_header = ActionController::HttpAuthentication::Basic.encode_credentials(
- user.username,
- create(:personal_access_token, user: user).token
- )
- { 'HTTP_AUTHORIZATION' => auth_header }
- end
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user, developer_projects: [project]) }
+ let_it_be(:maintainer) { create(:user, maintainer_projects: [project]) }
+
+ let!(:state) { create(:terraform_state, :with_file, project: project) }
- let!(:project) { create(:project) }
- let(:developer) { create(:user) }
- let(:maintainer) { create(:user) }
- let(:state_name) { 'state' }
+ let(:current_user) { maintainer }
+ let(:auth_header) { basic_auth_header(current_user) }
+ let(:project_id) { project.id }
+ let(:state_name) { state.name }
+ let(:state_path) { "/projects/#{project_id}/terraform/state/#{state_name}" }
before do
- project.add_maintainer(maintainer)
+ stub_terraform_state_object_storage(Terraform::StateUploader)
end
describe 'GET /projects/:id/terraform/state/:name' do
- it 'returns 401 if user is not authenticated' do
- headers = { 'HTTP_AUTHORIZATION' => 'failing_token' }
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: headers
+ subject(:request) { get api(state_path), headers: auth_header }
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ context 'without authentication' do
+ let(:auth_header) { basic_auth_header('failing_token') }
- it 'returns terraform state belonging to a project of given state name' do
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(maintainer)
+ it 'returns 401 if user is not authenticated' do
+ request
- expect(response).to have_gitlab_http_status(:not_implemented)
- expect(response.body).to eq('not implemented')
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
end
- it 'returns not found if the project does not exists' do
- get api("/projects/0000/terraform/state/#{state_name}"), headers: auth_header_for(maintainer)
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'returns terraform state belonging to a project of given state name' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(state.file.read)
+ end
+
+ context 'for a project that does not exist' do
+ let(:project_id) { '0000' }
+
+ it 'returns not found' do
+ request
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
- it 'returns forbidden if the user cannot access the state' do
- project.add_developer(developer)
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(developer)
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden if the user cannot access the state' do
+ request
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
describe 'POST /projects/:id/terraform/state/:name' do
+ let(:params) { { 'instance': 'example-instance' } }
+
+ subject(:request) { post api(state_path), headers: auth_header, as: :json, params: params }
+
context 'when terraform state with a given name is already present' do
- it 'updates the state' do
- post api("/projects/#{project.id}/terraform/state/#{state_name}"),
- params: '{ "instance": "example-instance" }',
- headers: { 'Content-Type' => 'text/plain' }.merge(auth_header_for(maintainer))
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
- expect(response).to have_gitlab_http_status(:not_implemented)
- expect(response.body).to eq('not implemented')
+ it 'updates the state' do
+ expect { request }.to change { Terraform::State.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
- it 'returns forbidden if the user cannot access the state' do
- project.add_developer(developer)
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(developer)
+ context 'without body' do
+ let(:params) { nil }
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'returns no content if no body is provided' do
+ request
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden' do
+ request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
context 'when there is no terraform state of a given name' do
- it 'creates a new state' do
- post api("/projects/#{project.id}/terraform/state/example2"),
- headers: auth_header_for(maintainer),
- params: '{ "database": "example-database" }'
+ let(:state_name) { 'example2' }
+
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'creates a new state' do
+ expect { request }.to change { Terraform::State.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'without body' do
+ let(:params) { nil }
+
+ it 'returns no content if no body is provided' do
+ request
- expect(response).to have_gitlab_http_status(:not_implemented)
- expect(response.body).to eq('not implemented')
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden' do
+ request
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
end
describe 'DELETE /projects/:id/terraform/state/:name' do
- it 'deletes the state' do
- delete api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(maintainer)
+ subject(:request) { delete api(state_path), headers: auth_header }
+
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'deletes the state' do
+ expect { request }.to change { Terraform::State.count }.by(-1)
- expect(response).to have_gitlab_http_status(:not_implemented)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden' do
+ expect { request }.to change { Terraform::State.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ describe 'PUT /projects/:id/terraform/state/:name/lock' do
+ let(:params) do
+ {
+ ID: '123-456',
+ Version: '0.1',
+ Operation: 'OperationTypePlan',
+ Info: '',
+ Who: "#{current_user.username}",
+ Created: Time.now.utc.iso8601(6),
+ Path: ''
+ }
+ end
+
+ subject(:request) { post api("#{state_path}/lock"), headers: auth_header, params: params }
+
+ it 'locks the terraform state' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
end
+ end
+
+ describe 'DELETE /projects/:id/terraform/state/:name/lock' do
+ before do
+ state.lock_xid = '123-456'
+ state.save!
+ end
+
+ subject(:request) { delete api("#{state_path}/lock"), headers: auth_header, params: params }
- it 'returns forbidden if the user cannot access the state' do
- project.add_developer(developer)
- get api("/projects/#{project.id}/terraform/state/#{state_name}"), headers: auth_header_for(developer)
+ context 'with the correct lock id' do
+ let(:params) { { ID: '123-456' } }
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'removes the terraform state lock' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with no lock id (force-unlock)' do
+ let(:params) { {} }
+
+ it 'removes the terraform state lock' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with an incorrect lock id' do
+ let(:params) { { ID: '456-789' } }
+
+ it 'returns an error' do
+ request
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ end
+ end
+
+ context 'with a longer than 255 character lock id' do
+ let(:params) { { ID: '0' * 256 } }
+
+ it 'returns an error' do
+ request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
end
end
diff --git a/spec/routing/openid_connect_spec.rb b/spec/routing/openid_connect_spec.rb
index 70470032930..fc170f8986c 100644
--- a/spec/routing/openid_connect_spec.rb
+++ b/spec/routing/openid_connect_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
# oauth_discovery_keys GET /oauth/discovery/keys(.:format) doorkeeper/openid_connect/discovery#keys
+# jwks GET /-/jwks(.:format) doorkeeper/openid_connect/discovery#keys
# oauth_discovery_provider GET /.well-known/openid-configuration(.:format) doorkeeper/openid_connect/discovery#provider
# oauth_discovery_webfinger GET /.well-known/webfinger(.:format) doorkeeper/openid_connect/discovery#webfinger
describe Doorkeeper::OpenidConnect::DiscoveryController, 'routing' do
@@ -17,6 +18,10 @@ describe Doorkeeper::OpenidConnect::DiscoveryController, 'routing' do
it "to #keys" do
expect(get('/oauth/discovery/keys')).to route_to('doorkeeper/openid_connect/discovery#keys')
end
+
+ it "/-/jwks" do
+ expect(get('/-/jwks')).to route_to('doorkeeper/openid_connect/discovery#keys')
+ end
end
# oauth_userinfo GET /oauth/userinfo(.:format) doorkeeper/openid_connect/userinfo#show
diff --git a/spec/rubocop/cop/migration/add_limit_to_string_columns_spec.rb b/spec/rubocop/cop/migration/add_limit_to_string_columns_spec.rb
deleted file mode 100644
index 97a3ae8f2bc..00000000000
--- a/spec/rubocop/cop/migration/add_limit_to_string_columns_spec.rb
+++ /dev/null
@@ -1,268 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require 'rubocop'
-require 'rubocop/rspec/support'
-
-require_relative '../../../../rubocop/cop/migration/add_limit_to_string_columns'
-
-describe RuboCop::Cop::Migration::AddLimitToStringColumns do
- include CopHelper
-
- subject(:cop) { described_class.new }
-
- context 'in migration' do
- before do
- allow(cop).to receive(:in_migration?).and_return(true)
-
- inspect_source(migration)
- end
-
- context 'when creating a table' do
- context 'with string columns and limit' do
- let(:migration) do
- %q(
- class CreateUsers < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- create_table :users do |t|
- t.string :username, null: false, limit: 255
- t.timestamps_with_timezone null: true
- end
- end
- end
- )
- end
-
- it 'register no offense' do
- expect(cop.offenses.size).to eq(0)
- end
-
- context 'with limit in a different position' do
- let(:migration) do
- %q(
- class CreateUsers < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- create_table :users do |t|
- t.string :username, limit: 255, null: false
- t.timestamps_with_timezone null: true
- end
- end
- end
- )
- end
-
- it 'registers an offense' do
- expect(cop.offenses.size).to eq(0)
- end
- end
- end
-
- context 'with string columns and no limit' do
- let(:migration) do
- %q(
- class CreateUsers < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- create_table :users do |t|
- t.string :username, null: false
- t.timestamps_with_timezone null: true
- end
- end
- end
- )
- end
-
- it 'registers an offense' do
- expect(cop.offenses.size).to eq(1)
- expect(cop.offenses.first.message)
- .to eq('String columns should have a limit constraint. 255 is suggested')
- end
- end
-
- context 'with no string columns' do
- let(:migration) do
- %q(
- class CreateMilestoneReleases < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- create_table :milestone_releases do |t|
- t.integer :milestone_id
- t.integer :release_id
- end
- end
- end
- )
- end
-
- it 'register no offense' do
- expect(cop.offenses.size).to eq(0)
- end
- end
- end
-
- context 'when adding columns' do
- context 'with string columns with limit' do
- let(:migration) do
- %q(
- class AddEmailToUsers < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- add_column :users, :email, :string, limit: 255
- end
- end
- )
- end
-
- it 'registers no offense' do
- expect(cop.offenses.size).to eq(0)
- end
-
- context 'with limit in a different position' do
- let(:migration) do
- %q(
- class AddEmailToUsers < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- add_column :users, :email, :string, limit: 255, default: 'example@email.com'
- end
- end
- )
- end
-
- it 'registers no offense' do
- expect(cop.offenses.size).to eq(0)
- end
- end
- end
-
- context 'with string columns with no limit' do
- let(:migration) do
- %q(
- class AddEmailToUsers < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- add_column :users, :email, :string
- end
- end
- )
- end
-
- it 'registers offense' do
- expect(cop.offenses.size).to eq(1)
- expect(cop.offenses.first.message)
- .to eq('String columns should have a limit constraint. 255 is suggested')
- end
- end
-
- context 'with no string columns' do
- let(:migration) do
- %q(
- class AddEmailToUsers < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- add_column :users, :active, :boolean, default: false
- end
- end
- )
- end
-
- it 'registers no offense' do
- expect(cop.offenses.size).to eq(0)
- end
- end
- end
-
- context 'with add_column_with_default' do
- context 'with a limit' do
- let(:migration) do
- %q(
- class AddRuleTypeToApprovalMergeRequestRules < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- add_column_with_default(:approval_merge_request_rules, :rule_type, :string, limit: 2, default: 1)
- end
- end
- )
- end
-
- it 'registers no offense' do
- expect(cop.offenses.size).to eq(0)
- end
- end
-
- context 'without a limit' do
- let(:migration) do
- %q(
- class AddRuleTypeToApprovalMergeRequestRules < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- add_column_with_default(:approval_merge_request_rules, :rule_type, :string, default: 1)
- end
- end
- )
- end
-
- it 'registers an offense' do
- expect(cop.offenses.size).to eq(1)
- end
- end
- end
-
- context 'with methods' do
- let(:migration) do
- %q(
- class AddEmailToUsers < ActiveRecord::Migration[5.2]
- DOWNTIME = false
-
- def change
- add_column_if_table_not_exists :users, :first_name, :string, limit: 255
- search_namespace(user_name)
- end
-
- def add_column_if_not_exists(table, name, *args)
- add_column(table, name, *args) unless column_exists?(table, name)
- end
-
- def search_namespace(username)
- Uniquify.new.string(username) do |str|
- query = "SELECT id FROM namespaces WHERE parent_id IS NULL AND path='#{str}' LIMIT 1"
- connection.exec_query(query)
- end
- end
- end
- )
- end
-
- it 'registers no offense' do
- expect(cop.offenses.size).to eq(0)
- end
- end
- end
-
- context 'outside of migrations' do
- let(:active_record_model) do
- %q(
- class User < ApplicationRecord
- end
- )
- end
-
- it 'registers no offense' do
- inspect_source(active_record_model)
-
- expect(cop.offenses.size).to eq(0)
- end
- end
-end
diff --git a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
new file mode 100644
index 00000000000..514260a4306
--- /dev/null
+++ b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/migration/add_limit_to_text_columns'
+
+describe RuboCop::Cop::Migration::AddLimitToTextColumns do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'in migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ context 'when text columns are defined without a limit' do
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ class TestTextLimits < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+ disable_ddl_transaction!
+
+ def up
+ create_table :test_text_limits, id: false do |t|
+ t.integer :test_id, null: false
+ t.text :name
+ ^^^^ #{described_class::MSG}
+ end
+
+ add_column :test_text_limits, :email, :text
+ ^^^^^^^^^^ #{described_class::MSG}
+
+ add_column_with_default :test_text_limits, :role, :text, default: 'default'
+ ^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+
+ change_column_type_concurrently :test_text_limits, :test_id, :text
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+ end
+ end
+ RUBY
+
+ expect(cop.offenses.map(&:cop_name)).to all(eq('Migration/AddLimitToTextColumns'))
+ end
+ end
+
+ context 'when text columns are defined with a limit' do
+ it 'registers no offense' do
+ expect_no_offenses(<<~RUBY)
+ class TestTextLimits < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+ disable_ddl_transaction!
+
+ def up
+ create_table :test_text_limits, id: false do |t|
+ t.integer :test_id, null: false
+ t.text :name
+ end
+
+ add_column :test_text_limits, :email, :text
+ add_column_with_default :test_text_limits, :role, :text, default: 'default'
+ change_column_type_concurrently :test_text_limits, :test_id, :text
+
+ add_text_limit :test_text_limits, :name, 255
+ add_text_limit :test_text_limits, :email, 255
+ add_text_limit :test_text_limits, :role, 255
+ add_text_limit :test_text_limits, :test_id, 255
+ end
+ end
+ RUBY
+ end
+ end
+
+ # Make sure that the cop is properly checking for an `add_text_limit`
+ # over the same {table, attribute} as the one that triggered the offence
+ context 'when the limit is defined for a same name attribute but different table' do
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ class TestTextLimits < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+ disable_ddl_transaction!
+
+ def up
+ create_table :test_text_limits, id: false do |t|
+ t.integer :test_id, null: false
+ t.text :name
+ ^^^^ #{described_class::MSG}
+ end
+
+ add_column :test_text_limits, :email, :text
+ ^^^^^^^^^^ #{described_class::MSG}
+
+ add_column_with_default :test_text_limits, :role, :text, default: 'default'
+ ^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+
+ change_column_type_concurrently :test_text_limits, :test_id, :text
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+
+ add_text_limit :wrong_table, :name, 255
+ add_text_limit :wrong_table, :email, 255
+ add_text_limit :wrong_table, :role, 255
+ add_text_limit :wrong_table, :test_id, 255
+ end
+ end
+ RUBY
+
+ expect(cop.offenses.map(&:cop_name)).to all(eq('Migration/AddLimitToTextColumns'))
+ end
+ end
+
+ context 'on down' do
+ it 'registers no offense' do
+ expect_no_offenses(<<~RUBY)
+ class TestTextLimits < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ drop_table :no_offence_on_down
+ end
+
+ def down
+ create_table :no_offence_on_down, id: false do |t|
+ t.integer :test_id, null: false
+ t.text :name
+ end
+
+ add_column :no_offence_on_down, :email, :text
+
+ add_column_with_default :no_offence_on_down, :role, :text, default: 'default'
+ end
+ end
+ RUBY
+ end
+ end
+ end
+
+ context 'outside of migration' do
+ it 'registers no offense' do
+ expect_no_offenses(<<~RUBY)
+ class TestTextLimits < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+ disable_ddl_transaction!
+
+ def up
+ create_table :test_text_limits, id: false do |t|
+ t.integer :test_id, null: false
+ t.text :name
+ end
+
+ add_column :test_text_limits, :email, :text
+ add_column_with_default :test_text_limits, :role, :text, default: 'default'
+ change_column_type_concurrently :test_text_limits, :test_id, :text
+ end
+ end
+ RUBY
+ end
+ end
+end
diff --git a/spec/rubocop/cop/migration/prevent_strings_spec.rb b/spec/rubocop/cop/migration/prevent_strings_spec.rb
new file mode 100644
index 00000000000..2702ce1c090
--- /dev/null
+++ b/spec/rubocop/cop/migration/prevent_strings_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/migration/prevent_strings'
+
+describe RuboCop::Cop::Migration::PreventStrings do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'in migration' do
+ before do
+ allow(cop).to receive(:in_migration?).and_return(true)
+ end
+
+ context 'when the string data type is used' do
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ class Users < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ create_table :users do |t|
+ t.string :username, null: false
+ ^^^^^^ #{described_class::MSG}
+
+ t.timestamps_with_timezone null: true
+
+ t.string :password
+ ^^^^^^ #{described_class::MSG}
+ end
+
+ add_column(:users, :bio, :string)
+ ^^^^^^^^^^ #{described_class::MSG}
+
+ add_column_with_default(:users, :url, :string, default: '/-/user', allow_null: false, limit: 255)
+ ^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+
+ change_column_type_concurrently :users, :commit_id, :string
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+ end
+ end
+ RUBY
+
+ expect(cop.offenses.map(&:cop_name)).to all(eq('Migration/PreventStrings'))
+ end
+ end
+
+ context 'when the string data type is not used' do
+ it 'registers no offense' do
+ expect_no_offenses(<<~RUBY)
+ class Users < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ create_table :users do |t|
+ t.integer :not_a_string, null: false
+ t.timestamps_with_timezone null: true
+ end
+
+ add_column(:users, :not_a_string, :integer)
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'when the text data type is used' do
+ it 'registers no offense' do
+ expect_no_offenses(<<~RUBY)
+ class Users < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ create_table :users do |t|
+ t.text :username, null: false
+ t.timestamps_with_timezone null: true
+ t.text :password
+ end
+
+ add_column(:users, :bio, :text)
+ add_column_with_default(:users, :url, :text, default: '/-/user', allow_null: false, limit: 255)
+ change_column_type_concurrently :users, :commit_id, :text
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'on down' do
+ it 'registers no offense' do
+ expect_no_offenses(<<~RUBY)
+ class Users < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ remove_column :users, :bio
+ remove_column :users, :url
+
+ drop_table :test_strings
+ end
+
+ def down
+ create_table :test_strings, id: false do |t|
+ t.integer :test_id, null: false
+ t.string :name
+ end
+
+ add_column(:users, :bio, :string)
+ add_column_with_default(:users, :url, :string, default: '/-/user', allow_null: false, limit: 255)
+ change_column_type_concurrently :users, :commit_id, :string
+ end
+ end
+ RUBY
+ end
+ end
+ end
+
+ context 'outside of migration' do
+ it 'registers no offense' do
+ expect_no_offenses(<<~RUBY)
+ class Users < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ create_table :users do |t|
+ t.string :username, null: false
+ t.timestamps_with_timezone null: true
+ t.string :password
+ end
+
+ add_column(:users, :bio, :string)
+ add_column_with_default(:users, :url, :string, default: '/-/user', allow_null: false, limit: 255)
+ change_column_type_concurrently :users, :commit_id, :string
+ end
+ end
+ RUBY
+ end
+ end
+end
diff --git a/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb b/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb
new file mode 100644
index 00000000000..938916d8d75
--- /dev/null
+++ b/spec/rubocop/cop/rspec/modify_sidekiq_middleware_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require_relative '../../../support/helpers/expect_offense'
+require_relative '../../../../rubocop/cop/rspec/modify_sidekiq_middleware'
+
+describe RuboCop::Cop::RSpec::ModifySidekiqMiddleware do
+ include CopHelper
+ include ExpectOffense
+
+ subject(:cop) { described_class.new }
+
+ let(:source) do
+ <<~SRC
+ Sidekiq::Testing.server_middleware do |chain|
+ chain.add(MyCustomMiddleware)
+ end
+ SRC
+ end
+
+ let(:corrected) do
+ <<~SRC
+ with_sidekiq_server_middleware do |chain|
+ chain.add(MyCustomMiddleware)
+ end
+ SRC
+ end
+
+ it 'registers an offence' do
+ inspect_source(source)
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'can autocorrect the source' do
+ expect(autocorrect_source(source)).to eq(corrected)
+ end
+end
diff --git a/spec/rubocop/cop/static_translation_definition_spec.rb b/spec/rubocop/cop/static_translation_definition_spec.rb
new file mode 100644
index 00000000000..b85f9da9b4e
--- /dev/null
+++ b/spec/rubocop/cop/static_translation_definition_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../rubocop/cop/static_translation_definition'
+
+describe RuboCop::Cop::StaticTranslationDefinition do
+ include CopHelper
+
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:cop) { described_class.new }
+
+ shared_examples 'offense' do |code, highlight, line|
+ it 'registers an offense' do
+ inspect_source(code)
+
+ expect(cop.offenses.size).to eq(1)
+ expect(cop.offenses.map(&:line)).to eq([line])
+ expect(cop.highlights).to eq([highlight])
+ end
+ end
+
+ shared_examples 'no offense' do |code|
+ it 'does not register an offense' do
+ inspect_source(code)
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+
+ describe 'offenses' do
+ where(:code, :highlight, :line) do
+ [
+ ['A = _("a")', '_("a")', 1],
+ ['B = s_("b")', 's_("b")', 1],
+ ['C = n_("c")', 'n_("c")', 1],
+ [
+ <<~CODE,
+ module MyModule
+ A = {
+ b: {
+ c: _("a")
+ }
+ }
+ end
+ CODE
+ '_("a")',
+ 4
+ ],
+ [
+ <<~CODE,
+ class MyClass
+ B = [
+ [
+ s_("a")
+ ]
+ ]
+ end
+ CODE
+ 's_("a")',
+ 4
+ ]
+ ]
+ end
+
+ with_them do
+ include_examples 'offense', params[:code], params[:highlight], params[:line]
+ end
+ end
+
+ describe 'ignore' do
+ where(:code) do
+ [
+ 'CONSTANT_1 = __("a")',
+ 'CONSTANT_2 = s__("a")',
+ 'CONSTANT_3 = n__("a")',
+ <<~CODE,
+ def method
+ s_('a')
+ end
+ CODE
+ <<~CODE,
+ class MyClass
+ VALID = -> {
+ s_('hi')
+ }
+ end
+ CODE
+ <<~CODE
+ class MyClass
+ def hello
+ {
+ a: _('hi')
+ }
+ end
+ end
+ CODE
+ ]
+ end
+
+ with_them do
+ include_examples 'no offense', params[:code]
+ end
+ end
+end
diff --git a/spec/serializers/analytics_summary_serializer_spec.rb b/spec/serializers/analytics_summary_serializer_spec.rb
index 7950f89bcc7..ed126720a55 100644
--- a/spec/serializers/analytics_summary_serializer_spec.rb
+++ b/spec/serializers/analytics_summary_serializer_spec.rb
@@ -34,7 +34,10 @@ describe AnalyticsSummarySerializer do
end
context 'when representing with unit' do
- let(:resource) { { title: 'frequency', value: 1.12, unit: 'per day' } }
+ let(:resource) do
+ Gitlab::CycleAnalytics::Summary::DeploymentFrequency
+ .new(deployments: 10, from: 1.day.ago)
+ end
subject { described_class.new.represent(resource, with_unit: true) }
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 29d35fdc811..ecc0589bfdf 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -71,6 +71,28 @@ describe MergeRequestPollWidgetEntity do
end
end
+ describe 'terraform_reports_path' do
+ context 'when merge request has terraform reports' do
+ before do
+ allow(resource).to receive(:has_terraform_reports?).and_return(true)
+ end
+
+ it 'set the path to poll data' do
+ expect(subject[:terraform_reports_path]).to be_present
+ end
+ end
+
+ context 'when merge request has no terraform reports' do
+ before do
+ allow(resource).to receive(:has_terraform_reports?).and_return(false)
+ end
+
+ it 'set the path to poll data' do
+ expect(subject[:terraform_reports_path]).to be_nil
+ end
+ end
+ end
+
describe 'exposed_artifacts_path' do
context 'when merge request has exposed artifacts' do
before do
@@ -94,6 +116,10 @@ describe MergeRequestPollWidgetEntity do
end
describe 'auto merge' do
+ before do
+ project.add_maintainer(user)
+ end
+
context 'when auto merge is enabled' do
let(:resource) { create(:merge_request, :merge_when_pipeline_succeeds) }
diff --git a/spec/serializers/merge_request_serializer_spec.rb b/spec/serializers/merge_request_serializer_spec.rb
index 871a47b0a02..90b3efae412 100644
--- a/spec/serializers/merge_request_serializer_spec.rb
+++ b/spec/serializers/merge_request_serializer_spec.rb
@@ -69,6 +69,22 @@ describe MergeRequestSerializer do
end
end
+ context 'poll cached widget merge request serialization' do
+ let(:serializer) { 'poll_cached_widget' }
+
+ it 'matches basic merge request json schema' do
+ expect(json_entity).to match_schema('entities/merge_request_poll_cached_widget')
+ end
+ end
+
+ context 'poll widget merge request serialization' do
+ let(:serializer) { 'poll_widget' }
+
+ it 'matches basic merge request json schema' do
+ expect(json_entity).to match_schema('entities/merge_request_poll_widget')
+ end
+ end
+
context 'no serializer' do
let(:serializer) { nil }
diff --git a/spec/serializers/test_suite_entity_spec.rb b/spec/serializers/test_suite_entity_spec.rb
index 6a9653954f3..bd88d235013 100644
--- a/spec/serializers/test_suite_entity_spec.rb
+++ b/spec/serializers/test_suite_entity_spec.rb
@@ -3,27 +3,65 @@
require 'spec_helper'
describe TestSuiteEntity do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports) }
- let(:entity) { described_class.new(pipeline.test_reports.test_suites.each_value.first) }
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports) }
+ let(:test_suite) { pipeline.test_reports.test_suites.each_value.first }
+ let(:entity) { described_class.new(test_suite) }
describe '#as_json' do
subject(:as_json) { entity.as_json }
it 'contains the suite name' do
- expect(as_json).to include(:name)
+ expect(as_json[:name]).to be_present
end
it 'contains the total time' do
- expect(as_json).to include(:total_time)
+ expect(as_json[:total_time]).to be_present
end
it 'contains the counts' do
- expect(as_json).to include(:total_count, :success_count, :failed_count, :skipped_count, :error_count)
+ expect(as_json[:total_count]).to eq(4)
+ expect(as_json[:success_count]).to eq(2)
+ expect(as_json[:failed_count]).to eq(2)
+ expect(as_json[:skipped_count]).to eq(0)
+ expect(as_json[:error_count]).to eq(0)
end
it 'contains the test cases' do
- expect(as_json).to include(:test_cases)
expect(as_json[:test_cases].count).to eq(4)
end
+
+ it 'contains an empty error message' do
+ expect(as_json[:suite_error]).to be_nil
+ end
+
+ context 'with a suite error' do
+ before do
+ test_suite.set_suite_error('a really bad error')
+ end
+
+ it 'contains the suite name' do
+ expect(as_json[:name]).to be_present
+ end
+
+ it 'contains the total time' do
+ expect(as_json[:total_time]).to be_present
+ end
+
+ it 'returns all the counts as 0' do
+ expect(as_json[:total_count]).to eq(0)
+ expect(as_json[:success_count]).to eq(0)
+ expect(as_json[:failed_count]).to eq(0)
+ expect(as_json[:skipped_count]).to eq(0)
+ expect(as_json[:error_count]).to eq(0)
+ end
+
+ it 'returns no test cases' do
+ expect(as_json[:test_cases]).to be_empty
+ end
+
+ it 'returns a suite error' do
+ expect(as_json[:suite_error]).to eq('a really bad error')
+ end
+ end
end
end
diff --git a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
index e03d87e9d49..b6e8d3c636a 100644
--- a/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
+++ b/spec/services/auto_merge/merge_when_pipeline_succeeds_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
describe AutoMerge::MergeWhenPipelineSucceedsService do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
let(:mr_merge_if_green_enabled) do
create(:merge_request, merge_when_pipeline_succeeds: true, merge_user: user,
@@ -20,6 +20,10 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
described_class.new(project, user, commit_message: 'Awesome message')
end
+ before_all do
+ project.add_maintainer(user)
+ end
+
describe "#available_for?" do
subject { service.available_for?(mr_merge_if_green_enabled) }
@@ -34,11 +38,25 @@ describe AutoMerge::MergeWhenPipelineSucceedsService do
it { is_expected.to be_truthy }
+ it 'memoizes the result' do
+ expect(mr_merge_if_green_enabled).to receive(:can_be_merged_by?).once.and_call_original
+
+ 2.times { is_expected.to be_truthy }
+ end
+
context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success }
it { is_expected.to be_falsy }
end
+
+ context 'when the user does not have permission to merge' do
+ before do
+ allow(mr_merge_if_green_enabled).to receive(:can_be_merged_by?) { false }
+ end
+
+ it { is_expected.to be_falsy }
+ end
end
describe "#execute" do
diff --git a/spec/services/auto_merge_service_spec.rb b/spec/services/auto_merge_service_spec.rb
index 221cf695331..bab69fb4aa3 100644
--- a/spec/services/auto_merge_service_spec.rb
+++ b/spec/services/auto_merge_service_spec.rb
@@ -3,22 +3,36 @@
require 'spec_helper'
describe AutoMergeService do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:service) { described_class.new(project, user) }
- describe '.all_strategies' do
- subject { described_class.all_strategies }
+ before_all do
+ project.add_maintainer(user)
+ end
- it 'includes merge when pipeline succeeds' do
- is_expected.to include(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
+ describe '.all_strategies_ordered_by_preference' do
+ subject { described_class.all_strategies_ordered_by_preference }
+
+ it 'returns all strategies in preference order' do
+ if Gitlab.ee?
+ is_expected.to eq(
+ [AutoMergeService::STRATEGY_MERGE_TRAIN,
+ AutoMergeService::STRATEGY_ADD_TO_MERGE_TRAIN_WHEN_PIPELINE_SUCCEEDS,
+ AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS])
+ else
+ is_expected.to eq([AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS])
+ end
end
end
describe '#available_strategies' do
subject { service.available_strategies(merge_request) }
- let(:merge_request) { create(:merge_request) }
+ let(:merge_request) do
+ create(:merge_request, source_project: project)
+ end
+
let(:pipeline_status) { :running }
before do
@@ -42,6 +56,36 @@ describe AutoMergeService do
end
end
+ describe '#preferred_strategy' do
+ subject { service.preferred_strategy(merge_request) }
+
+ let(:merge_request) do
+ create(:merge_request, source_project: project)
+ end
+
+ let(:pipeline_status) { :running }
+
+ before do
+ create(:ci_pipeline, pipeline_status, ref: merge_request.source_branch,
+ sha: merge_request.diff_head_sha,
+ project: merge_request.source_project)
+
+ merge_request.update_head_pipeline
+ end
+
+ it 'returns preferred strategy' do
+ is_expected.to eq('merge_when_pipeline_succeeds')
+ end
+
+ context 'when the head piipeline succeeded' do
+ let(:pipeline_status) { :success }
+
+ it 'returns available strategies' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
describe '.get_service_class' do
subject { described_class.get_service_class(strategy) }
@@ -63,7 +107,10 @@ describe AutoMergeService do
describe '#execute' do
subject { service.execute(merge_request, strategy) }
- let(:merge_request) { create(:merge_request) }
+ let(:merge_request) do
+ create(:merge_request, source_project: project)
+ end
+
let(:pipeline_status) { :running }
let(:strategy) { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
@@ -90,6 +137,14 @@ describe AutoMergeService do
is_expected.to eq(:failed)
end
end
+
+ context 'when strategy is not specified' do
+ let(:strategy) { }
+
+ it 'chooses the most preferred strategy' do
+ is_expected.to eq(:merge_when_pipeline_succeeds)
+ end
+ end
end
describe '#update' do
diff --git a/spec/services/ci/compare_test_reports_service_spec.rb b/spec/services/ci/compare_test_reports_service_spec.rb
index f5edd3a552d..46f4d2d42ff 100644
--- a/spec/services/ci/compare_test_reports_service_spec.rb
+++ b/spec/services/ci/compare_test_reports_service_spec.rb
@@ -38,9 +38,10 @@ describe Ci::CompareTestReportsService do
create(:ci_job_artifact, :junit_with_corrupted_data, job: build, project: project)
end
- it 'returns status and error message' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:status_reason]).to include('XML parsing failed')
+ it 'returns a parsed TestReports success status and failure on the individual suite' do
+ expect(subject[:status]).to eq(:parsed)
+ expect(subject.dig(:data, 'status')).to eq('success')
+ expect(subject.dig(:data, 'suites', 0, 'status') ).to eq('error')
end
end
end
diff --git a/spec/services/ci/create_cross_project_pipeline_service_spec.rb b/spec/services/ci/create_cross_project_pipeline_service_spec.rb
index a411244e57f..5c59aaa4ce9 100644
--- a/spec/services/ci/create_cross_project_pipeline_service_spec.rb
+++ b/spec/services/ci/create_cross_project_pipeline_service_spec.rb
@@ -475,5 +475,45 @@ describe Ci::CreateCrossProjectPipelineService, '#execute' do
expect(bridge.failure_reason).to eq 'insufficient_bridge_permissions'
end
end
+
+ context 'when there is no such branch in downstream project' do
+ let(:trigger) do
+ {
+ trigger: {
+ project: downstream_project.full_path,
+ branch: 'invalid_branch'
+ }
+ }
+ end
+
+ it 'does not create a pipeline and drops the bridge' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
+ end
+ end
+
+ context 'when downstream pipeline has a branch rule and does not satisfy' do
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ let(:config) do
+ <<-EOY
+ hello:
+ script: echo world
+ only:
+ - invalid_branch
+ EOY
+ end
+
+ it 'does not create a pipeline and drops the bridge' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
+ end
+ end
end
end
diff --git a/spec/services/ci/generate_terraform_reports_service_spec.rb b/spec/services/ci/generate_terraform_reports_service_spec.rb
new file mode 100644
index 00000000000..4d2c60bed2c
--- /dev/null
+++ b/spec/services/ci/generate_terraform_reports_service_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Ci::GenerateTerraformReportsService do
+ let_it_be(:project) { create(:project, :repository) }
+
+ describe '#execute' do
+ let_it_be(:merge_request) { create(:merge_request, :with_terraform_reports, source_project: project) }
+
+ subject { described_class.new(project, nil, id: merge_request.id) }
+
+ context 'when head pipeline has terraform reports' do
+ it 'returns status and data' do
+ result = subject.execute(nil, merge_request.head_pipeline)
+
+ expect(result).to match(
+ status: :parsed,
+ data: match(
+ a_hash_including('tfplan.json' => a_hash_including('create' => 0, 'update' => 1, 'delete' => 0))
+ ),
+ key: an_instance_of(Array)
+ )
+ end
+ end
+
+ context 'when head pipeline has corrupted terraform reports' do
+ it 'returns status and error message' do
+ build = create(:ci_build, pipeline: merge_request.head_pipeline, project: project)
+ create(:ci_job_artifact, :terraform_with_corrupted_data, job: build, project: project)
+
+ result = subject.execute(nil, merge_request.head_pipeline)
+
+ expect(result).to match(
+ status: :error,
+ status_reason: 'An error occurred while fetching terraform reports.',
+ key: an_instance_of(Array)
+ )
+ end
+ end
+ end
+
+ describe '#latest?' do
+ let_it_be(:head_pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+
+ subject { described_class.new(project) }
+
+ it 'returns true when cache key is latest' do
+ cache_key = subject.send(:key, nil, head_pipeline)
+
+ result = subject.latest?(nil, head_pipeline, key: cache_key)
+
+ expect(result).to eq(true)
+ end
+
+ it 'returns false when cache key is outdated' do
+ cache_key = subject.send(:key, nil, head_pipeline)
+ head_pipeline.update_column(:updated_at, 10.minutes.ago)
+
+ result = subject.latest?(nil, head_pipeline, key: cache_key)
+
+ expect(result).to eq(false)
+ end
+
+ it 'returns false when cache key is nil' do
+ result = subject.latest?(nil, head_pipeline, key: nil)
+
+ expect(result).to eq(false)
+ end
+ end
+end
diff --git a/spec/services/ci/update_runner_service_spec.rb b/spec/services/ci/update_runner_service_spec.rb
index 2b07dad7248..abe575eebc8 100644
--- a/spec/services/ci/update_runner_service_spec.rb
+++ b/spec/services/ci/update_runner_service_spec.rb
@@ -23,6 +23,19 @@ describe Ci::UpdateRunnerService do
end
end
+ context 'with cost factor params' do
+ let(:params) { { public_projects_minutes_cost_factor: 1.1, private_projects_minutes_cost_factor: 2.2 }}
+
+ it 'updates the runner cost factors' do
+ expect(update).to be_truthy
+
+ runner.reload
+
+ expect(runner.public_projects_minutes_cost_factor).to eq(1.1)
+ expect(runner.private_projects_minutes_cost_factor).to eq(2.2)
+ end
+ end
+
context 'when params are not valid' do
let(:params) { { run_untagged: false } }
diff --git a/spec/services/emails/destroy_service_spec.rb b/spec/services/emails/destroy_service_spec.rb
index 5abe8da2529..9e14a13aa4f 100644
--- a/spec/services/emails/destroy_service_spec.rb
+++ b/spec/services/emails/destroy_service_spec.rb
@@ -10,7 +10,10 @@ describe Emails::DestroyService do
describe '#execute' do
it 'removes an email' do
- expect { service.execute(email) }.to change { user.emails.count }.by(-1)
+ response = service.execute(email)
+
+ expect(user.emails).not_to include(email)
+ expect(response).to be true
end
end
end
diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb
index fc5e379f51d..924e913a9ec 100644
--- a/spec/services/git/process_ref_changes_service_spec.rb
+++ b/spec/services/git/process_ref_changes_service_spec.rb
@@ -160,6 +160,49 @@ describe Git::ProcessRefChangesService do
let(:ref_prefix) { 'refs/heads' }
it_behaves_like 'service for processing ref changes', Git::BranchPushService
+
+ context 'when there are merge requests associated with branches' do
+ let(:tag_changes) do
+ [
+ { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "refs/tags/v10.0.0" }
+ ]
+ end
+ let(:branch_changes) do
+ [
+ { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create1" },
+ { index: 1, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789013', ref: "#{ref_prefix}/create2" },
+ { index: 2, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" }
+ ]
+ end
+ let(:git_changes) { double(branch_changes: branch_changes, tag_changes: tag_changes) }
+
+ it 'schedules job for existing merge requests' do
+ expect_next_instance_of(MergeRequests::PushedBranchesService) do |service|
+ expect(service).to receive(:execute).and_return(%w(create1 create2))
+ end
+
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async)
+ .with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789012', "#{ref_prefix}/create1").ordered
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async)
+ .with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789013', "#{ref_prefix}/create2").ordered
+ expect(UpdateMergeRequestsWorker).not_to receive(:perform_async)
+ .with(project.id, user.id, Gitlab::Git::BLANK_SHA, '789014', "#{ref_prefix}/create3").ordered
+
+ subject.execute
+ end
+
+ context 'refresh_only_existing_merge_requests_on_push disabled' do
+ before do
+ stub_feature_flags(refresh_only_existing_merge_requests_on_push: false)
+ end
+
+ it 'refreshes all merge requests' do
+ expect(UpdateMergeRequestsWorker).to receive(:perform_async).exactly(3).times
+
+ subject.execute
+ end
+ end
+ end
end
context 'tag changes' do
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index bd50d6b1001..21ccebc53f4 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -368,6 +368,8 @@ describe Issues::CreateService do
end
context 'checking spam' do
+ include_context 'includes Spam constants'
+
let(:title) { 'Legit issue' }
let(:description) { 'please fix' }
let(:opts) do
@@ -378,6 +380,8 @@ describe Issues::CreateService do
}
end
+ subject { described_class.new(project, user, opts) }
+
before do
stub_feature_flags(allow_possible_spam: false)
end
@@ -391,7 +395,7 @@ describe Issues::CreateService do
opts[:recaptcha_verified] = true
opts[:spam_log_id] = target_spam_log.id
- expect(Spam::AkismetService).not_to receive(:new)
+ expect(Spam::SpamVerdictService).not_to receive(:new)
end
it 'does not mark an issue as spam' do
@@ -402,7 +406,7 @@ describe Issues::CreateService do
expect(issue).to be_valid
end
- it 'does not assign a spam_log to an issue' do
+ it 'does not assign a spam_log to the issue' do
expect(issue.spam_log).to be_nil
end
@@ -421,23 +425,52 @@ describe Issues::CreateService do
context 'when recaptcha was not verified' do
before do
- expect_next_instance_of(Spam::SpamCheckService) do |spam_service|
+ expect_next_instance_of(Spam::SpamActionService) do |spam_service|
expect(spam_service).to receive_messages(check_for_spam?: true)
end
end
- context 'when akismet detects spam' do
+ context 'when SpamVerdictService requires reCAPTCHA' do
+ before do
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(REQUIRE_RECAPTCHA)
+ end
+ end
+
+ it 'marks the issue as spam' do
+ expect(issue).to be_spam
+ end
+
+ it 'marks the issue as needing reCAPTCHA' do
+ expect(issue.needs_recaptcha?).to be_truthy
+ end
+
+ it 'invalidates the issue' do
+ expect(issue).to be_invalid
+ end
+
+ it 'creates a new spam_log' do
+ expect { issue }
+ .to have_spam_log(title: title, description: description, user_id: user.id, noteable_type: 'Issue')
+ end
+ end
+
+ context 'when SpamVerdictService disallows creation' do
before do
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: true)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(DISALLOW)
end
end
context 'when allow_possible_spam feature flag is false' do
- it 'marks the issue as spam' do
+ it 'does not mark the issue as spam' do
expect(issue).to be_spam
end
+ it 'does not mark the issue as needing reCAPTCHA' do
+ expect(issue.needs_recaptcha?).to be_falsey
+ end
+
it 'invalidates the issue' do
expect(issue).to be_invalid
end
@@ -457,7 +490,7 @@ describe Issues::CreateService do
expect(issue).not_to be_spam
end
- it '​creates a valid issue' do
+ it 'creates a valid issue' do
expect(issue).to be_valid
end
@@ -468,10 +501,10 @@ describe Issues::CreateService do
end
end
- context 'when akismet does not detect spam' do
+ context 'when the SpamVerdictService allows creation' do
before do
- expect_next_instance_of(Spam::AkismetService) do |akismet_service|
- expect(akismet_service).to receive_messages(spam?: false)
+ expect_next_instance_of(Spam::SpamVerdictService) do |verdict_service|
+ expect(verdict_service).to receive(:execute).and_return(ALLOW)
end
end
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
new file mode 100644
index 00000000000..419e29d92a8
--- /dev/null
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -0,0 +1,170 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Issues::ExportCsvService do
+ let_it_be(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :public, group: group) }
+ let!(:issue) { create(:issue, project: project, author: user) }
+ let!(:bad_issue) { create(:issue, project: project, author: user) }
+ let(:subject) { described_class.new(Issue.all, project) }
+
+ it 'renders csv to string' do
+ expect(subject.csv_data).to be_a String
+ end
+
+ describe '#email' do
+ it 'emails csv' do
+ expect { subject.email(user) }.to change(ActionMailer::Base.deliveries, :count)
+ end
+
+ it 'renders with a target filesize' do
+ expect(subject.csv_builder).to receive(:render).with(described_class::TARGET_FILESIZE)
+
+ subject.email(user)
+ end
+ end
+
+ def csv
+ CSV.parse(subject.csv_data, headers: true)
+ end
+
+ context 'includes' do
+ let(:milestone) { create(:milestone, title: 'v1.0', project: project) }
+ let(:idea_label) { create(:label, project: project, title: 'Idea') }
+ let(:feature_label) { create(:label, project: project, title: 'Feature') }
+
+ before do
+ # Creating a timelog touches the updated_at timestamp of issue,
+ # so create these first.
+ issue.timelogs.create(time_spent: 360, user: user)
+ issue.timelogs.create(time_spent: 200, user: user)
+ issue.update!(milestone: milestone,
+ assignees: [user],
+ description: 'Issue with details',
+ state: :opened,
+ due_date: DateTime.new(2014, 3, 2),
+ created_at: DateTime.new(2015, 4, 3, 2, 1, 0),
+ updated_at: DateTime.new(2016, 5, 4, 3, 2, 1),
+ closed_at: DateTime.new(2017, 6, 5, 4, 3, 2),
+ weight: 4,
+ discussion_locked: true,
+ labels: [feature_label, idea_label],
+ time_estimate: 72000)
+ end
+
+ it 'includes the columns required for import' do
+ expect(csv.headers).to include('Title', 'Description')
+ end
+
+ specify 'iid' do
+ expect(csv[0]['Issue ID']).to eq issue.iid.to_s
+ end
+
+ specify 'url' do
+ expect(csv[0]['URL']).to match(/http.*#{project.full_path}.*#{issue.iid}/)
+ end
+
+ specify 'title' do
+ expect(csv[0]['Title']).to eq issue.title
+ end
+
+ specify 'state' do
+ expect(csv[0]['State']).to eq 'Open'
+ end
+
+ specify 'description' do
+ expect(csv[0]['Description']).to eq issue.description
+ expect(csv[1]['Description']).to eq nil
+ end
+
+ specify 'author name' do
+ expect(csv[0]['Author']).to eq issue.author_name
+ end
+
+ specify 'author username' do
+ expect(csv[0]['Author Username']).to eq issue.author.username
+ end
+
+ specify 'assignee name' do
+ expect(csv[0]['Assignee']).to eq user.name
+ expect(csv[1]['Assignee']).to eq ''
+ end
+
+ specify 'assignee username' do
+ expect(csv[0]['Assignee Username']).to eq user.username
+ expect(csv[1]['Assignee Username']).to eq ''
+ end
+
+ specify 'confidential' do
+ expect(csv[0]['Confidential']).to eq 'No'
+ end
+
+ specify 'milestone' do
+ expect(csv[0]['Milestone']).to eq issue.milestone.title
+ expect(csv[1]['Milestone']).to eq nil
+ end
+
+ specify 'labels' do
+ expect(csv[0]['Labels']).to eq 'Feature,Idea'
+ expect(csv[1]['Labels']).to eq nil
+ end
+
+ specify 'due_date' do
+ expect(csv[0]['Due Date']).to eq '2014-03-02'
+ expect(csv[1]['Due Date']).to eq nil
+ end
+
+ specify 'created_at' do
+ expect(csv[0]['Created At (UTC)']).to eq '2015-04-03 02:01:00'
+ end
+
+ specify 'updated_at' do
+ expect(csv[0]['Updated At (UTC)']).to eq '2016-05-04 03:02:01'
+ end
+
+ specify 'closed_at' do
+ expect(csv[0]['Closed At (UTC)']).to eq '2017-06-05 04:03:02'
+ expect(csv[1]['Closed At (UTC)']).to eq nil
+ end
+
+ specify 'discussion_locked' do
+ expect(csv[0]['Locked']).to eq 'Yes'
+ end
+
+ specify 'weight' do
+ expect(csv[0]['Weight']).to eq '4'
+ end
+
+ specify 'time estimate' do
+ expect(csv[0]['Time Estimate']).to eq '72000'
+ expect(csv[1]['Time Estimate']).to eq '0'
+ end
+
+ specify 'time spent' do
+ expect(csv[0]['Time Spent']).to eq '560'
+ expect(csv[1]['Time Spent']).to eq '0'
+ end
+
+ context 'with issues filtered by labels and project' do
+ let(:subject) do
+ described_class.new(
+ IssuesFinder.new(user,
+ project_id: project.id,
+ label_name: %w(Idea Feature)).execute, project)
+ end
+
+ it 'returns only filtered objects' do
+ expect(csv.count).to eq(1)
+ expect(csv[0]['Issue ID']).to eq issue.iid.to_s
+ end
+ end
+ end
+
+ context 'with minimal details' do
+ it 'renders labels as nil' do
+ expect(csv[0]['Labels']).to eq nil
+ end
+ end
+end
diff --git a/spec/services/jira_import/start_import_service_spec.rb b/spec/services/jira_import/start_import_service_spec.rb
index ae0c4f63fee..1eefffe11fa 100644
--- a/spec/services/jira_import/start_import_service_spec.rb
+++ b/spec/services/jira_import/start_import_service_spec.rb
@@ -5,8 +5,9 @@ require 'spec_helper'
describe JiraImport::StartImportService do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
+ let(:key) { 'KEY' }
- subject { described_class.new(user, project, '').execute }
+ subject { described_class.new(user, project, key).execute }
context 'when feature flag disabled' do
before do
@@ -23,6 +24,8 @@ describe JiraImport::StartImportService do
context 'when user does not have permissions to run the import' do
before do
+ create(:jira_service, project: project, active: true)
+
project.add_developer(user)
end
@@ -38,19 +41,21 @@ describe JiraImport::StartImportService do
it_behaves_like 'responds with error', 'Jira integration not configured.'
end
- context 'when issues feature are disabled' do
- let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
-
- it_behaves_like 'responds with error', 'Cannot import because issues are not available in this project.'
- end
-
context 'when Jira service exists' do
let!(:jira_service) { create(:jira_service, project: project, active: true) }
context 'when Jira project key is not provided' do
+ let(:key) { '' }
+
it_behaves_like 'responds with error', 'Unable to find Jira project to import data from.'
end
+ context 'when issues feature are disabled' do
+ let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
+
+ it_behaves_like 'responds with error', 'Cannot import because issues are not available in this project.'
+ end
+
context 'when correct data provided' do
let(:fake_key) { 'some-key' }
@@ -62,18 +67,20 @@ describe JiraImport::StartImportService do
it_behaves_like 'responds with error', 'Jira import is already running.'
end
- it 'returns success response' do
- expect(subject).to be_a(ServiceResponse)
- expect(subject).to be_success
- end
+ context 'when everything is ok' do
+ it 'returns success response' do
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject).to be_success
+ end
- it 'schedules jira import' do
- subject
+ it 'schedules Jira import' do
+ subject
- expect(project.latest_jira_import).to be_scheduled
+ expect(project.latest_jira_import).to be_scheduled
+ end
end
- it 'creates jira import data' do
+ it 'creates Jira import data' do
jira_import = subject.payload[:import_data]
expect(jira_import.jira_project_xid).to eq(0)
@@ -82,21 +89,21 @@ describe JiraImport::StartImportService do
expect(jira_import.user).to eq(user)
end
- it 'creates jira import label' do
+ it 'creates Jira import label' do
expect { subject }.to change { Label.count }.by(1)
end
- it 'creates jira label title with correct number' do
+ it 'creates Jira label title with correct number' do
jira_import = subject.payload[:import_data]
label_title = "jira-import::#{jira_import.jira_project_key}-1"
expect(jira_import.label.title).to eq(label_title)
end
- context 'when multiple jira imports for same jira project' do
+ context 'when multiple Jira imports for same Jira project' do
let!(:jira_imports) { create_list(:jira_import_state, 3, :finished, project: project, jira_project_key: fake_key)}
- it 'creates jira label title with correct number' do
+ it 'creates Jira label title with correct number' do
jira_import = subject.payload[:import_data]
label_title = "jira-import::#{jira_import.jira_project_key}-4"
diff --git a/spec/services/merge_requests/merge_orchestration_service_spec.rb b/spec/services/merge_requests/merge_orchestration_service_spec.rb
new file mode 100644
index 00000000000..c50f20d7703
--- /dev/null
+++ b/spec/services/merge_requests/merge_orchestration_service_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequests::MergeOrchestrationService do
+ let_it_be(:maintainer) { create(:user) }
+ let(:merge_params) { { sha: merge_request.diff_head_sha } }
+ let(:user) { maintainer }
+ let(:service) { described_class.new(project, user, merge_params) }
+
+ let!(:merge_request) do
+ create(:merge_request, source_project: project, source_branch: 'feature',
+ target_project: project, target_branch: 'master')
+ end
+
+ shared_context 'fresh repository' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ before_all do
+ project.add_maintainer(maintainer)
+ end
+ end
+
+ describe '#execute' do
+ subject { service.execute(merge_request) }
+
+ include_context 'fresh repository'
+
+ context 'when merge request is mergeable' do
+ context 'when merge request can be merged automatically' do
+ before do
+ create(:ci_pipeline, :detached_merge_request_pipeline, project: project, merge_request: merge_request)
+ merge_request.update_head_pipeline
+ end
+
+ it 'schedules auto merge' do
+ expect_next_instance_of(AutoMergeService, project, user, merge_params) do |service|
+ expect(service).to receive(:execute).with(merge_request).and_call_original
+ end
+
+ subject
+
+ expect(merge_request).to be_auto_merge_enabled
+ expect(merge_request.auto_merge_strategy).to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
+ expect(merge_request).not_to be_merged
+ end
+ end
+
+ context 'when merge request cannot be merged automatically' do
+ it 'merges immediately', :sidekiq_inline do
+ expect(merge_request)
+ .to receive(:merge_async).with(user.id, merge_params)
+ .and_call_original
+
+ subject
+
+ merge_request.reset
+ expect(merge_request).to be_merged
+ expect(merge_request).not_to be_auto_merge_enabled
+ end
+ end
+ end
+
+ context 'when merge request is not mergeable' do
+ before do
+ allow(merge_request).to receive(:mergeable_state?) { false }
+ end
+
+ it 'does nothing' do
+ subject
+
+ expect(merge_request).not_to be_auto_merge_enabled
+ expect(merge_request).not_to be_merged
+ end
+ end
+ end
+
+ describe '#can_merge?' do
+ subject { service.can_merge?(merge_request) }
+
+ include_context 'fresh repository'
+
+ context 'when merge request is mergeable' do
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when merge request is not mergeable' do
+ before do
+ allow(merge_request).to receive(:mergeable_state?) { false }
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#preferred_auto_merge_strategy' do
+ subject { service.preferred_auto_merge_strategy(merge_request) }
+
+ include_context 'fresh repository'
+
+ context 'when merge request can be merged automatically' do
+ before do
+ create(:ci_pipeline, :detached_merge_request_pipeline, project: project, merge_request: merge_request)
+ merge_request.update_head_pipeline
+ end
+
+ it 'fetches perferred auto merge strategy' do
+ is_expected.to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
+ end
+ end
+
+ context 'when merge request cannot be merged automatically' do
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/services/merge_requests/pushed_branches_service_spec.rb b/spec/services/merge_requests/pushed_branches_service_spec.rb
new file mode 100644
index 00000000000..7b5d505f4d9
--- /dev/null
+++ b/spec/services/merge_requests/pushed_branches_service_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequests::PushedBranchesService do
+ let(:project) { create(:project) }
+ let!(:service) { described_class.new(project, nil, changes: pushed_branches) }
+
+ context 'when branches pushed' do
+ let(:pushed_branches) do
+ %w(branch1 branch2 extra1 extra2 extra3).map do |branch|
+ { ref: "refs/heads/#{branch}" }
+ end
+ end
+
+ it 'returns only branches which have a merge request' do
+ create(:merge_request, source_branch: 'branch1', source_project: project)
+ create(:merge_request, source_branch: 'branch2', source_project: project)
+ create(:merge_request, target_branch: 'branch2', source_project: project)
+ create(:merge_request, :closed, target_branch: 'extra1', source_project: project)
+ create(:merge_request, source_branch: 'extra2')
+
+ expect(service.execute).to contain_exactly('branch1', 'branch2')
+ end
+ end
+
+ context 'when tags pushed' do
+ let(:pushed_branches) do
+ %w(v10.0.0 v11.0.2 v12.1.0).map do |branch|
+ { ref: "refs/tags/#{branch}" }
+ end
+ end
+
+ it 'returns empty result without any SQL query performed' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ expect(service.execute).to be_empty
+ end.count
+
+ expect(control_count).to be_zero
+ end
+ end
+end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index dd5d90b2d07..8c1800c495f 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -208,7 +208,7 @@ describe MergeRequests::UpdateService, :mailer do
end
end
- context 'merge' do
+ shared_examples_for 'correct merge behavior' do
let(:opts) do
{
merge: merge_request.diff_head_sha
@@ -311,6 +311,18 @@ describe MergeRequests::UpdateService, :mailer do
end
end
+ describe 'merge' do
+ it_behaves_like 'correct merge behavior'
+
+ context 'when merge_orchestration_service feature flag is disabled' do
+ before do
+ stub_feature_flags(merge_orchestration_service: false)
+ end
+
+ it_behaves_like 'correct merge behavior'
+ end
+ end
+
context 'todos' do
let!(:pending_todo) { create(:todo, :assigned, user: user, project: project, target: merge_request, author: user2) }
diff --git a/spec/services/metrics/dashboard/transient_embed_service_spec.rb b/spec/services/metrics/dashboard/transient_embed_service_spec.rb
index fddfbe15281..4982f56cddc 100644
--- a/spec/services/metrics/dashboard/transient_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/transient_embed_service_spec.rb
@@ -38,21 +38,7 @@ describe Metrics::Dashboard::TransientEmbedService, :use_clean_rails_memory_stor
end
describe '#get_dashboard' do
- let(:embed_json) do
- {
- panel_groups: [{
- panels: [{
- type: 'line-graph',
- title: 'title',
- y_label: 'y_label',
- metrics: [{
- query_range: 'up',
- label: 'y_label'
- }]
- }]
- }]
- }.to_json
- end
+ let(:embed_json) { get_embed_json }
let(:service_params) { [project, user, { environment: environment, embedded: 'true', embed_json: embed_json }] }
let(:service_call) { described_class.new(*service_params).get_dashboard }
@@ -68,5 +54,39 @@ describe Metrics::Dashboard::TransientEmbedService, :use_clean_rails_memory_stor
described_class.new(*service_params).get_dashboard
described_class.new(*service_params).get_dashboard
end
+
+ it 'caches unique requests separately' do
+ alt_embed_json = get_embed_json('area-chart')
+ alt_service_params = [project, user, { environment: environment, embedded: 'true', embed_json: alt_embed_json }]
+
+ embed = described_class.new(*service_params).get_dashboard
+ alt_embed = described_class.new(*alt_service_params).get_dashboard
+
+ expect(embed).not_to eq(alt_embed)
+ expect(get_type_for_embed(embed)).to eq('line-graph')
+ expect(get_type_for_embed(alt_embed)).to eq('area-chart')
+ end
+
+ private
+
+ def get_embed_json(type = 'line-graph')
+ {
+ panel_groups: [{
+ panels: [{
+ type: type,
+ title: 'title',
+ y_label: 'y_label',
+ metrics: [{
+ query_range: 'up',
+ label: 'y_label'
+ }]
+ }]
+ }]
+ }.to_json
+ end
+
+ def get_type_for_embed(embed)
+ embed[:dashboard][:panel_groups][0][:panels][0][:type]
+ end
end
end
diff --git a/spec/services/namespaces/check_storage_size_service_spec.rb b/spec/services/namespaces/check_storage_size_service_spec.rb
new file mode 100644
index 00000000000..dcb16e2f52a
--- /dev/null
+++ b/spec/services/namespaces/check_storage_size_service_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Namespaces::CheckStorageSizeService, '#execute' do
+ let_it_be(:root_group) { create(:group) }
+ let(:nested_group) { create(:group, parent: root_group) }
+ let(:service) { described_class.new(nested_group) }
+ let(:current_size) { 150.megabytes }
+ let(:limit) { 100 }
+
+ subject { service.execute }
+
+ before do
+ stub_application_setting(namespace_storage_size_limit: limit)
+
+ create(:namespace_root_storage_statistics, namespace: root_group, storage_size: current_size)
+ end
+
+ context 'feature flag' do
+ it 'is successful when disabled' do
+ stub_feature_flags(namespace_storage_limit: false)
+
+ expect(subject).to be_success
+ end
+
+ it 'errors when enabled' do
+ stub_feature_flags(namespace_storage_limit: true)
+
+ expect(subject).to be_error
+ end
+
+ it 'is successful when disabled for the current group' do
+ stub_feature_flags(namespace_storage_limit: { enabled: false, thing: root_group })
+
+ expect(subject).to be_success
+ end
+
+ it 'is successful when feature flag is activated for another group' do
+ stub_feature_flags(namespace_storage_limit: false)
+ stub_feature_flags(namespace_storage_limit: { enabled: true, thing: create(:group) })
+
+ expect(subject).to be_success
+ end
+
+ it 'errors when feature flag is activated for the current group' do
+ stub_feature_flags(namespace_storage_limit: { enabled: true, thing: root_group })
+
+ expect(subject).to be_error
+ end
+ end
+
+ context 'when limit is set to 0' do
+ let(:limit) { 0 }
+
+ it { is_expected.to be_success }
+
+ it 'does not respond with a payload' do
+ result = subject
+
+ expect(result.message).to be_nil
+ expect(result.payload).to be_empty
+ end
+ end
+
+ context 'when current size is below threshold to show an alert' do
+ let(:current_size) { 10.megabytes }
+
+ it { is_expected.to be_success }
+ end
+
+ context 'when current size exceeds limit' do
+ it 'returns an error with a payload' do
+ result = subject
+ current_usage_message = result.payload[:current_usage_message]
+
+ expect(result).to be_error
+ expect(result.message).to include("#{root_group.name} is now read-only.")
+ expect(current_usage_message).to include("150%")
+ expect(current_usage_message).to include(root_group.name)
+ expect(current_usage_message).to include("150 MB of 100 MB")
+ expect(result.payload[:usage_ratio]).to eq(1.5)
+ end
+ end
+
+ context 'when current size is below limit but should show an alert' do
+ let(:current_size) { 50.megabytes }
+
+ it 'returns success with a payload' do
+ result = subject
+ current_usage_message = result.payload[:current_usage_message]
+
+ expect(result).to be_success
+ expect(result.message).to be_present
+ expect(current_usage_message).to include("50%")
+ expect(current_usage_message).to include(root_group.name)
+ expect(current_usage_message).to include("50 MB of 100 MB")
+ expect(result.payload[:usage_ratio]).to eq(0.5)
+ end
+ end
+end
diff --git a/spec/services/personal_access_tokens/create_service_spec.rb b/spec/services/personal_access_tokens/create_service_spec.rb
new file mode 100644
index 00000000000..9190434b96a
--- /dev/null
+++ b/spec/services/personal_access_tokens/create_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PersonalAccessTokens::CreateService do
+ describe '#execute' do
+ context 'with valid params' do
+ it 'creates personal access token record' do
+ user = create(:user)
+ params = { name: 'Test token', impersonation: true, scopes: [:api], expires_at: Date.today + 1.month }
+
+ response = described_class.new(user, params).execute
+ personal_access_token = response.payload[:personal_access_token]
+
+ expect(response.success?).to be true
+ expect(personal_access_token.name).to eq(params[:name])
+ expect(personal_access_token.impersonation).to eq(params[:impersonation])
+ expect(personal_access_token.scopes).to eq(params[:scopes])
+ expect(personal_access_token.expires_at).to eq(params[:expires_at])
+ expect(personal_access_token.user).to eq(user)
+ end
+ end
+ end
+end
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
index e3efce1134b..79efc91af41 100644
--- a/spec/services/pod_logs/elasticsearch_service_spec.rb
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -225,7 +225,7 @@ describe ::PodLogs::ElasticsearchService do
.and_return(Elasticsearch::Transport::Client.new)
allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
- .with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor)
+ .with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor, filebeat7: true)
.and_return({ logs: expected_logs, cursor: expected_cursor })
result = subject.send(:pod_logs, result_arg)
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 6cc2e2b6abe..36f9966c0ef 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -492,7 +492,7 @@ describe QuickActions::InterpretService do
end
end
- shared_examples 'merge command' do
+ shared_examples 'merge immediately command' do
let(:project) { create(:project, :repository) }
it 'runs merge command if content contains /merge' do
@@ -504,7 +504,18 @@ describe QuickActions::InterpretService do
it 'returns them merge message' do
_, _, message = service.execute(content, issuable)
- expect(message).to eq('Scheduled to merge this merge request when the pipeline succeeds.')
+ expect(message).to eq('Merged this merge request.')
+ end
+ end
+
+ shared_examples 'merge automatically command' do
+ let(:project) { create(:project, :repository) }
+
+ it 'runs merge command if content contains /merge and returns merge message' do
+ _, updates, message = service.execute(content, issuable)
+
+ expect(updates).to eq(merge: merge_request.diff_head_sha)
+ expect(message).to eq('Scheduled to merge this merge request (Merge when pipeline succeeds).')
end
end
@@ -675,11 +686,23 @@ describe QuickActions::InterpretService do
context 'merge command' do
let(:service) { described_class.new(project, developer, { merge_request_diff_head_sha: merge_request.diff_head_sha }) }
- it_behaves_like 'merge command' do
+ it_behaves_like 'merge immediately command' do
let(:content) { '/merge' }
let(:issuable) { merge_request }
end
+ context 'when the head pipeline of merge request is running' do
+ before do
+ create(:ci_pipeline, :detached_merge_request_pipeline, merge_request: merge_request)
+ merge_request.update_head_pipeline
+ end
+
+ it_behaves_like 'merge automatically command' do
+ let(:content) { '/merge' }
+ let(:issuable) { merge_request }
+ end
+ end
+
context 'can not be merged when logged user does not have permissions' do
let(:service) { described_class.new(project, create(:user)) }
diff --git a/spec/services/resources/create_access_token_service_spec.rb b/spec/services/resources/create_access_token_service_spec.rb
new file mode 100644
index 00000000000..8c108d9937a
--- /dev/null
+++ b/spec/services/resources/create_access_token_service_spec.rb
@@ -0,0 +1,163 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Resources::CreateAccessTokenService do
+ subject { described_class.new(resource_type, resource, user, params).execute }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:params) { {} }
+
+ describe '#execute' do
+ # Created shared_examples as it will easy to include specs for group bots in https://gitlab.com/gitlab-org/gitlab/-/issues/214046
+ shared_examples 'fails when user does not have the permission to create a Resource Bot' do
+ before do
+ resource.add_developer(user)
+ end
+
+ it 'returns error' do
+ response = subject
+
+ expect(response.error?).to be true
+ expect(response.message).to eq("User does not have permission to create #{resource_type} Access Token")
+ end
+ end
+
+ shared_examples 'fails when flag is disabled' do
+ before do
+ stub_feature_flags(resource_access_token: false)
+ end
+
+ it 'returns nil' do
+ expect(subject).to be nil
+ end
+ end
+
+ shared_examples 'allows creation of bot with valid params' do
+ it { expect { subject }.to change { User.count }.by(1) }
+
+ it 'creates resource bot user' do
+ response = subject
+
+ access_token = response.payload[:access_token]
+
+ expect(access_token.user.reload.user_type).to eq("#{resource_type}_bot")
+ end
+
+ context 'bot name' do
+ context 'when no value is passed' do
+ it 'uses default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.user.name).to eq("#{resource.name.to_s.humanize} bot")
+ end
+ end
+
+ context 'when user provides value' do
+ let(:params) { { name: 'Random bot' } }
+
+ it 'overrides the default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.user.name).to eq(params[:name])
+ end
+ end
+ end
+
+ it 'adds the bot user as a maintainer in the resource' do
+ response = subject
+ access_token = response.payload[:access_token]
+ bot_user = access_token.user
+
+ expect(resource.members.maintainers.map(&:user_id)).to include(bot_user.id)
+ end
+
+ context 'personal access token' do
+ it { expect { subject }.to change { PersonalAccessToken.count }.by(1) }
+
+ context 'when user does not provide scope' do
+ it 'has default scopes' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.scopes).to eq(Gitlab::Auth::API_SCOPES + Gitlab::Auth::REPOSITORY_SCOPES + Gitlab::Auth.registry_scopes - [:read_user])
+ end
+ end
+
+ context 'when user provides scope explicitly' do
+ let(:params) { { scopes: Gitlab::Auth::REPOSITORY_SCOPES } }
+
+ it 'overrides the default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.scopes).to eq(Gitlab::Auth::REPOSITORY_SCOPES)
+ end
+ end
+
+ context 'expires_at' do
+ context 'when no value is passed' do
+ it 'uses default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.expires_at).to eq(nil)
+ end
+ end
+
+ context 'when user provides value' do
+ let(:params) { { expires_at: Date.today + 1.month } }
+
+ it 'overrides the default value' do
+ response = subject
+ access_token = response.payload[:access_token]
+
+ expect(access_token.expires_at).to eq(params[:expires_at])
+ end
+ end
+
+ context 'when invalid scope is passed' do
+ let(:params) { { scopes: [:invalid_scope] } }
+
+ it 'returns error' do
+ response = subject
+
+ expect(response.error?).to be true
+ end
+ end
+ end
+ end
+
+ context 'when access provisioning fails' do
+ before do
+ allow(resource).to receive(:add_maintainer).and_return(nil)
+ end
+
+ it 'returns error' do
+ response = subject
+
+ expect(response.error?).to be true
+ end
+ end
+ end
+
+ context 'when resource is a project' do
+ let(:resource_type) { 'project' }
+ let(:resource) { project }
+
+ it_behaves_like 'fails when user does not have the permission to create a Resource Bot'
+ it_behaves_like 'fails when flag is disabled'
+
+ context 'user with valid permission' do
+ before do
+ resource.add_maintainer(user)
+ end
+
+ it_behaves_like 'allows creation of bot with valid params'
+ end
+ end
+ end
+end
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index 690aa2c066e..c1a8a026b90 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -252,6 +252,39 @@ describe Snippets::CreateService do
end
end
+ shared_examples 'after_save callback to store_mentions' do
+ context 'when mentionable attributes change' do
+ let(:extra_opts) { { description: "Description with #{user.to_reference}" } }
+
+ it 'saves mentions' do
+ expect_next_instance_of(Snippet) do |instance|
+ expect(instance).to receive(:store_mentions!).and_call_original
+ end
+ expect(snippet.user_mentions.count).to eq 1
+ end
+ end
+
+ context 'when mentionable attributes do not change' do
+ it 'does not call store_mentions' do
+ expect_next_instance_of(Snippet) do |instance|
+ expect(instance).not_to receive(:store_mentions!)
+ end
+ expect(snippet.user_mentions.count).to eq 0
+ end
+ end
+
+ context 'when save fails' do
+ it 'does not call store_mentions' do
+ base_opts.delete(:title)
+
+ expect_next_instance_of(Snippet) do |instance|
+ expect(instance).not_to receive(:store_mentions!)
+ end
+ expect(snippet.valid?).to be false
+ end
+ end
+ end
+
context 'when ProjectSnippet' do
let_it_be(:project) { create(:project) }
@@ -265,6 +298,7 @@ describe Snippets::CreateService do
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
+ it_behaves_like 'after_save callback to store_mentions'
end
context 'when PersonalSnippet' do
@@ -276,6 +310,9 @@ describe Snippets::CreateService do
it_behaves_like 'snippet create data is tracked'
it_behaves_like 'an error service response when save fails'
it_behaves_like 'creates repository and files'
+ pending('See https://gitlab.com/gitlab-org/gitlab/issues/30742') do
+ it_behaves_like 'after_save callback to store_mentions'
+ end
end
end
end
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index 05fb725c065..4274a7d05d9 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -270,6 +270,35 @@ describe Snippets::UpdateService do
end
end
+ shared_examples 'committable attributes' do
+ context 'when file_name is updated' do
+ let(:options) { { file_name: 'snippet.rb' } }
+
+ it 'commits to repository' do
+ expect(service).to receive(:create_commit)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when content is updated' do
+ let(:options) { { content: 'puts "hello world"' } }
+
+ it 'commits to repository' do
+ expect(service).to receive(:create_commit)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when content or file_name is not updated' do
+ let(:options) { { title: 'Test snippet' } }
+
+ it 'does not perform any commit' do
+ expect(service).not_to receive(:create_commit)
+ expect(subject).to be_success
+ end
+ end
+ end
+
context 'when Project Snippet' do
let_it_be(:project) { create(:project) }
let!(:snippet) { create(:project_snippet, :repository, author: user, project: project) }
@@ -283,6 +312,7 @@ describe Snippets::UpdateService do
it_behaves_like 'snippet update data is tracked'
it_behaves_like 'updates repository content'
it_behaves_like 'commit operation fails'
+ it_behaves_like 'committable attributes'
context 'when snippet does not have a repository' do
let!(:snippet) { create(:project_snippet, author: user, project: project) }
@@ -301,6 +331,7 @@ describe Snippets::UpdateService do
it_behaves_like 'snippet update data is tracked'
it_behaves_like 'updates repository content'
it_behaves_like 'commit operation fails'
+ it_behaves_like 'committable attributes'
context 'when snippet does not have a repository' do
let!(:snippet) { create(:personal_snippet, author: user, project: project) }
diff --git a/spec/services/spam/spam_check_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index 3d0cb1447bd..8c8685e9628 100644
--- a/spec/services/spam/spam_check_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-describe Spam::SpamCheckService do
+describe Spam::SpamActionService do
+ include_context 'includes Spam constants'
+
let(:fake_ip) { '1.2.3.4' }
let(:fake_user_agent) { 'fake-user-agent' }
let(:fake_referrer) { 'fake-http-referrer' }
@@ -15,7 +17,7 @@ describe Spam::SpamCheckService do
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
- let_it_be(:issue) { create(:issue, project: project, author: user) }
+ let(:issue) { create(:issue, project: project, author: user) }
before do
issue.spam = false
@@ -51,7 +53,7 @@ describe Spam::SpamCheckService do
shared_examples 'only checks for spam if a request is provided' do
context 'when request is missing' do
- let(:request) { nil }
+ subject { described_class.new(spammable: issue, request: nil) }
it "doesn't check as spam" do
subject
@@ -70,6 +72,7 @@ describe Spam::SpamCheckService do
describe '#execute' do
let(:request) { double(:request, env: env) }
+ let(:fake_verdict_service) { double(:spam_verdict_service) }
let_it_be(:existing_spam_log) { create(:spam_log, user: user, recaptcha_verified: false) }
@@ -78,13 +81,17 @@ describe Spam::SpamCheckService do
described_service.execute(user_id: user.id, api: nil, recaptcha_verified: recaptcha_verified, spam_log_id: existing_spam_log.id)
end
+ before do
+ allow(Spam::SpamVerdictService).to receive(:new).and_return(fake_verdict_service)
+ end
+
context 'when recaptcha was already verified' do
let(:recaptcha_verified) { true }
- it "updates spam log and doesn't check Akismet" do
+ it "doesn't check with the SpamVerdictService" do
aggregate_failures do
- expect(SpamLog).not_to receive(:create!)
- expect(an_instance_of(described_class)).not_to receive(:check)
+ expect(SpamLog).to receive(:verify_recaptcha!)
+ expect(fake_verdict_service).not_to receive(:execute)
end
subject
@@ -101,12 +108,6 @@ describe Spam::SpamCheckService do
context 'when spammable attributes have not changed' do
before do
issue.closed_at = Time.zone.now
-
- allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: true))
- end
-
- it 'returns false' do
- expect(subject).to be_falsey
end
it 'does not create a spam log' do
@@ -120,9 +121,9 @@ describe Spam::SpamCheckService do
issue.description = 'SPAM!'
end
- context 'when indicated as spam by Akismet' do
+ context 'when disallowed by the spam action service' do
before do
- allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: true))
+ allow(fake_verdict_service).to receive(:execute).and_return(DISALLOW)
end
context 'when allow_possible_spam feature flag is false' do
@@ -150,13 +151,9 @@ describe Spam::SpamCheckService do
end
end
- context 'when not indicated as spam by Akismet' do
+ context 'when spam action service allows creation' do
before do
- allow(Spam::AkismetService).to receive(:new).and_return(double(spam?: false))
- end
-
- it 'returns false' do
- expect(subject).to be_falsey
+ allow(fake_verdict_service).to receive(:execute).and_return(ALLOW)
end
it 'does not create a spam log' do
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
new file mode 100644
index 00000000000..93460a5e7d7
--- /dev/null
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Spam::SpamVerdictService do
+ include_context 'includes Spam constants'
+
+ let(:fake_ip) { '1.2.3.4' }
+ let(:fake_user_agent) { 'fake-user-agent' }
+ let(:fake_referrer) { 'fake-http-referrer' }
+ let(:env) do
+ { 'action_dispatch.remote_ip' => fake_ip,
+ 'HTTP_USER_AGENT' => fake_user_agent,
+ 'HTTP_REFERRER' => fake_referrer }
+ end
+ let(:request) { double(:request, env: env) }
+
+ let(:check_for_spam) { true }
+ let(:issue) { build(:issue) }
+ let(:service) do
+ described_class.new(target: issue, request: request, options: {})
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ before do
+ allow_next_instance_of(Spam::AkismetService) do |service|
+ allow(service).to receive(:spam?).and_return(spam_verdict)
+ end
+ end
+
+ context 'if Akismet considers it spam' do
+ let(:spam_verdict) { true }
+
+ context 'if reCAPTCHA is enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: true)
+ end
+
+ it 'requires reCAPTCHA' do
+ expect(subject).to eq REQUIRE_RECAPTCHA
+ end
+ end
+
+ context 'if reCAPTCHA is not enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: false)
+ end
+
+ it 'disallows the change' do
+ expect(subject).to eq DISALLOW
+ end
+ end
+ end
+
+ context 'if Akismet does not consider it spam' do
+ let(:spam_verdict) { false }
+
+ it 'allows the change' do
+ expect(subject).to eq ALLOW
+ end
+ end
+ end
+end
diff --git a/spec/services/terraform/remote_state_handler_spec.rb b/spec/services/terraform/remote_state_handler_spec.rb
new file mode 100644
index 00000000000..f4e1831b2e8
--- /dev/null
+++ b/spec/services/terraform/remote_state_handler_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Terraform::RemoteStateHandler do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ describe '#find_with_lock' do
+ context 'without a state name' do
+ subject { described_class.new(project, user) }
+
+ it 'raises an exception' do
+ expect { subject.find_with_lock }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'with a state name' do
+ subject { described_class.new(project, user, name: 'state') }
+
+ context 'with no matching state' do
+ it 'raises an exception' do
+ expect { subject.find_with_lock }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with a matching state' do
+ let!(:state) { create(:terraform_state, project: project, name: 'state') }
+
+ it 'returns the state' do
+ expect(subject.find_with_lock).to eq(state)
+ end
+ end
+ end
+ end
+
+ describe '#create_or_find!' do
+ it 'requires passing a state name' do
+ handler = described_class.new(project, user)
+
+ expect { handler.create_or_find! }.to raise_error(ArgumentError)
+ end
+
+ it 'allows to create states with same name in different projects' do
+ project_b = create(:project)
+
+ state_a = described_class.new(project, user, name: 'my-state').create_or_find!
+ state_b = described_class.new(project_b, user, name: 'my-state').create_or_find!
+
+ expect(state_a).to be_persisted
+ expect(state_b).to be_persisted
+ expect(state_a.id).not_to eq state_b.id
+ end
+
+ it 'loads the same state upon subsequent call in the project scope' do
+ state_a = described_class.new(project, user, name: 'my-state').create_or_find!
+ state_b = described_class.new(project, user, name: 'my-state').create_or_find!
+
+ expect(state_a).to be_persisted
+ expect(state_a.id).to eq state_b.id
+ end
+ end
+
+ context 'when state locking is not being used' do
+ subject { described_class.new(project, user, name: 'my-state') }
+
+ describe '#handle_with_lock' do
+ it 'allows to modify a state using database locking' do
+ state = subject.handle_with_lock do |state|
+ state.name = 'updated-name'
+ end
+
+ expect(state.name).to eq 'updated-name'
+ end
+
+ it 'returns the state object itself' do
+ state = subject.create_or_find!
+
+ expect(state.name).to eq 'my-state'
+ end
+ end
+
+ describe '#lock!' do
+ it 'raises an error' do
+ expect { subject.lock! }.to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ context 'when using locking' do
+ describe '#handle_with_lock' do
+ it 'handles a locked state using exclusive read lock' do
+ handler = described_class
+ .new(project, user, name: 'new-state', lock_id: 'abc-abc')
+
+ handler.lock!
+
+ state = handler.handle_with_lock do |state|
+ state.name = 'new-name'
+ end
+
+ expect(state.name).to eq 'new-name'
+ end
+ end
+
+ it 'raises exception if lock has not been acquired before' do
+ handler = described_class
+ .new(project, user, name: 'new-state', lock_id: 'abc-abc')
+
+ expect { handler.handle_with_lock }
+ .to raise_error(described_class::StateLockedError)
+ end
+
+ describe '#lock!' do
+ it 'allows to lock state if it does not exist yet' do
+ handler = described_class.new(project, user, name: 'new-state', lock_id: 'abc-abc')
+
+ state = handler.lock!
+
+ expect(state).to be_persisted
+ expect(state.name).to eq 'new-state'
+ end
+
+ it 'allows to lock state if it exists and is not locked' do
+ state = described_class.new(project, user, name: 'new-state').create_or_find!
+ handler = described_class.new(project, user, name: 'new-state', lock_id: 'abc-abc')
+
+ handler.lock!
+
+ expect(state.reload.lock_xid).to eq 'abc-abc'
+ expect(state).to be_locked
+ end
+
+ it 'raises an exception when trying to unlocked state locked by someone else' do
+ described_class.new(project, user, name: 'new-state', lock_id: 'abc-abc').lock!
+
+ handler = described_class.new(project, user, name: 'new-state', lock_id: '12a-23f')
+
+ expect { handler.lock! }.to raise_error(described_class::StateLockedError)
+ end
+ end
+ end
+end
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index 146819c7f44..7588be833ae 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -157,6 +157,26 @@ describe Users::BuildService do
end
end
+ context 'when user_type is provided' do
+ subject(:user) { service.execute }
+
+ context 'when project_bot' do
+ before do
+ params.merge!({ user_type: :project_bot })
+ end
+
+ it { expect(user.project_bot?).to be true}
+ end
+
+ context 'when not a project_bot' do
+ before do
+ params.merge!({ user_type: :alert_bot })
+ end
+
+ it { expect(user.user_type).to be nil }
+ end
+ end
+
context 'with "user_default_external" application setting' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/services/x509_certificate_revoke_service_spec.rb b/spec/services/x509_certificate_revoke_service_spec.rb
index ef76f616c93..c2b2576904c 100644
--- a/spec/services/x509_certificate_revoke_service_spec.rb
+++ b/spec/services/x509_certificate_revoke_service_spec.rb
@@ -24,8 +24,6 @@ describe X509CertificateRevokeService do
end
context 'for good certificates' do
- RSpec::Matchers.define_negated_matcher :not_change, :change
-
let(:x509_certificate) { create(:x509_certificate) }
it 'do not update any commit signature' do
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 19d12a0f5cb..fe03621b9bf 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -136,6 +136,7 @@ RSpec.configure do |config|
config.include ExpectRequestWithStatus, type: :request
config.include IdempotentWorkerHelper, type: :worker
config.include RailsHelpers
+ config.include SidekiqMiddleware
if ENV['CI'] || ENV['RETRIES']
# This includes the first try, i.e. tests will be run 4 times before failing.
@@ -299,6 +300,22 @@ RSpec.configure do |config|
Labkit::Context.with_context { example.run }
end
+ config.around do |example|
+ with_sidekiq_server_middleware do |chain|
+ Gitlab::SidekiqMiddleware.server_configurator(
+ metrics: false, # The metrics don't go anywhere in tests
+ arguments_logger: false, # We're not logging the regular messages for inline jobs
+ memory_killer: false, # This is not a thing we want to do inline in tests
+ # Don't enable this if the request store is active in the spec itself
+ # This needs to run within the `request_store` around block defined above
+ request_store: !RequestStore.active?
+ ).call(chain)
+ chain.add DisableQueryLimit
+
+ example.run
+ end
+ end
+
config.after do
Fog.unmock! if Fog.mock?
Gitlab::CurrentSettings.clear_in_memory_application_settings!
diff --git a/spec/support/helpers/api_helpers.rb b/spec/support/helpers/api_helpers.rb
index b1e6078c4f2..eb9594a4fb6 100644
--- a/spec/support/helpers/api_helpers.rb
+++ b/spec/support/helpers/api_helpers.rb
@@ -40,6 +40,17 @@ module ApiHelpers
end
end
+ def basic_auth_header(user = nil)
+ return { 'HTTP_AUTHORIZATION' => user } unless user.respond_to?(:username)
+
+ {
+ 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(
+ user.username,
+ create(:personal_access_token, user: user).token
+ )
+ }
+ end
+
def expect_empty_array_response
expect_successful_response_with_paginated_array
expect(json_response.length).to eq(0)
diff --git a/spec/support/helpers/smime_helper.rb b/spec/support/helpers/smime_helper.rb
index 96da3d81708..261aef9518e 100644
--- a/spec/support/helpers/smime_helper.rb
+++ b/spec/support/helpers/smime_helper.rb
@@ -5,20 +5,24 @@ module SmimeHelper
SHORT_EXPIRY = 30.minutes
def generate_root
- issue(signed_by: nil, expires_in: INFINITE_EXPIRY, certificate_authority: true)
+ issue(cn: 'RootCA', signed_by: nil, expires_in: INFINITE_EXPIRY, certificate_authority: true)
end
- def generate_cert(root_ca:, expires_in: SHORT_EXPIRY)
- issue(signed_by: root_ca, expires_in: expires_in, certificate_authority: false)
+ def generate_intermediate(signer_ca:)
+ issue(cn: 'IntermediateCA', signed_by: signer_ca, expires_in: INFINITE_EXPIRY, certificate_authority: true)
+ end
+
+ def generate_cert(signer_ca:, expires_in: SHORT_EXPIRY)
+ issue(signed_by: signer_ca, expires_in: expires_in, certificate_authority: false)
end
# returns a hash { key:, cert: } containing a generated key, cert pair
- def issue(email_address: 'test@example.com', signed_by:, expires_in:, certificate_authority:)
+ def issue(email_address: 'test@example.com', cn: nil, signed_by:, expires_in:, certificate_authority:)
key = OpenSSL::PKey::RSA.new(4096)
public_key = key.public_key
subject = if certificate_authority
- OpenSSL::X509::Name.parse("/CN=EU")
+ OpenSSL::X509::Name.parse("/CN=#{cn}")
else
OpenSSL::X509::Name.parse("/CN=#{email_address}")
end
diff --git a/spec/support/import_export/configuration_helper.rb b/spec/support/import_export/configuration_helper.rb
index 4fe619225bb..4330c4314a8 100644
--- a/spec/support/import_export/configuration_helper.rb
+++ b/spec/support/import_export/configuration_helper.rb
@@ -44,8 +44,8 @@ module ConfigurationHelper
import_export_config = config_hash(config)
excluded_attributes = import_export_config[:excluded_attributes][relation_name.to_sym]
included_attributes = import_export_config[:included_attributes][relation_name.to_sym]
- attributes = attributes - JSON[excluded_attributes.to_json] if excluded_attributes
- attributes = attributes & JSON[included_attributes.to_json] if included_attributes
+ attributes = attributes - JSON.parse(excluded_attributes.to_json) if excluded_attributes
+ attributes = attributes & JSON.parse(included_attributes.to_json) if included_attributes
attributes
end
diff --git a/spec/support/matchers/exclude_matcher.rb b/spec/support/matchers/exclude_matcher.rb
new file mode 100644
index 00000000000..29ee251a466
--- /dev/null
+++ b/spec/support/matchers/exclude_matcher.rb
@@ -0,0 +1,3 @@
+# frozen_string_literal: true
+
+RSpec::Matchers.define_negated_matcher :exclude, :include
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index e4bc44c9d32..e08b8b93522 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -62,6 +62,7 @@ RSpec.shared_context 'project navbar structure' do
nav_item: _('Operations'),
nav_sub_items: [
_('Metrics'),
+ _('Alerts'),
_('Environments'),
_('Error Tracking'),
_('Serverless'),
@@ -111,6 +112,7 @@ RSpec.shared_context 'group navbar structure' do
nav_sub_items: [
_('General'),
_('Projects'),
+ _('Repository'),
_('CI / CD'),
_('Integrations'),
_('Webhooks'),
diff --git a/spec/support/shared_contexts/spam_constants.rb b/spec/support/shared_contexts/spam_constants.rb
new file mode 100644
index 00000000000..b6e92ea3050
--- /dev/null
+++ b/spec/support/shared_contexts/spam_constants.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+shared_context 'includes Spam constants' do
+ REQUIRE_RECAPTCHA = Spam::SpamConstants::REQUIRE_RECAPTCHA
+ DISALLOW = Spam::SpamConstants::DISALLOW
+ ALLOW = Spam::SpamConstants::ALLOW
+end
diff --git a/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb b/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb
index 791eb0b68e0..bd4eeff81a0 100644
--- a/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/deploy_token_shared_examples.rb
@@ -1,12 +1,13 @@
# frozen_string_literal: true
RSpec.shared_examples 'a created deploy token' do
+ let(:read_repository) { '1' }
let(:deploy_token_params) do
{
name: 'deployer_token',
expires_at: 1.month.from_now.to_date.to_s,
username: 'deployer',
- read_repository: '1',
+ read_repository: read_repository,
deploy_token_type: deploy_token_type
}
end
@@ -19,4 +20,15 @@ RSpec.shared_examples 'a created deploy token' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:show)
end
+
+ context 'when no scope is selected' do
+ let(:read_repository) { '0' }
+
+ it 'creates a variable with a errored deploy token' do
+ expect { create_deploy_token }.not_to change { DeployToken.active.count }
+
+ expect(assigns(:new_deploy_token)).to be_a(DeployToken)
+ expect(assigns(:new_deploy_token).errors.full_messages.first).to eq('Scopes can\'t be blank')
+ end
+ end
end
diff --git a/spec/support/shared_examples/features/deploy_token_shared_examples.rb b/spec/support/shared_examples/features/deploy_token_shared_examples.rb
index f358615ee9e..fd77297a490 100644
--- a/spec/support/shared_examples/features/deploy_token_shared_examples.rb
+++ b/spec/support/shared_examples/features/deploy_token_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'a deploy token in ci/cd settings' do
+RSpec.shared_examples 'a deploy token in settings' do
it 'view deploy tokens' do
within('.deploy-tokens') do
expect(page).to have_content(deploy_token.name)
diff --git a/spec/support/shared_examples/graphql/jira_import/jira_import_resolver_shared_examples.rb b/spec/support/shared_examples/graphql/jira_import/jira_import_resolver_shared_examples.rb
index 3d97fe10a47..2b96010477c 100644
--- a/spec/support/shared_examples/graphql/jira_import/jira_import_resolver_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/jira_import/jira_import_resolver_shared_examples.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-shared_examples 'no jira import data present' do
+shared_examples 'no Jira import data present' do
it 'returns none' do
expect(resolve_imports).to eq JiraImportState.none
end
end
-shared_examples 'no jira import access' do
+shared_examples 'no Jira import access' do
it 'raises error' do
expect do
resolve_imports
diff --git a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
index b03da4471bc..50a8b81b518 100644
--- a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
@@ -18,6 +18,16 @@ RSpec.shared_examples 'issuable quick actions' do
end
end
+ let(:unlabel_expectation) do
+ ->(noteable, can_use_quick_action) {
+ if can_use_quick_action
+ expect(noteable.labels).to be_empty
+ else
+ expect(noteable.labels).not_to be_empty
+ end
+ }
+ end
+
# Quick actions shared by issues and merge requests
let(:issuable_quick_actions) do
[
@@ -136,13 +146,11 @@ RSpec.shared_examples 'issuable quick actions' do
),
QuickAction.new(
action_text: "/unlabel",
- expectation: ->(noteable, can_use_quick_action) {
- if can_use_quick_action
- expect(noteable.labels).to be_empty
- else
- expect(noteable.labels).not_to be_empty
- end
- }
+ expectation: unlabel_expectation
+ ),
+ QuickAction.new(
+ action_text: "/remove_label",
+ expectation: unlabel_expectation
),
QuickAction.new(
action_text: "/award :100:",
diff --git a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
index fa163b54405..e0edbc5637a 100644
--- a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
@@ -10,10 +10,27 @@ RSpec.shared_examples 'merge quick action' do
it 'merges the MR', :sidekiq_might_not_need_inline do
add_note("/merge")
- expect(page).to have_content 'Scheduled to merge this merge request when the pipeline succeeds.'
+ expect(page).to have_content 'Merged this merge request.'
expect(merge_request.reload).to be_merged
end
+
+ context 'when auto merge is avialable' do
+ before do
+ create(:ci_pipeline, :detached_merge_request_pipeline,
+ project: project, merge_request: merge_request)
+ merge_request.update_head_pipeline
+ end
+
+ it 'schedules to merge the MR' do
+ add_note("/merge")
+
+ expect(page).to have_content "Scheduled to merge this merge request (Merge when pipeline succeeds)."
+
+ expect(merge_request.reload).to be_auto_merge_enabled
+ expect(merge_request.reload).not_to be_merged
+ end
+ end
end
context 'when the head diff changes in the meanwhile' do
diff --git a/spec/support/shared_examples/requests/snippet_shared_examples.rb b/spec/support/shared_examples/requests/snippet_shared_examples.rb
index aa7f57ae903..e963ba140c6 100644
--- a/spec/support/shared_examples/requests/snippet_shared_examples.rb
+++ b/spec/support/shared_examples/requests/snippet_shared_examples.rb
@@ -23,21 +23,31 @@ RSpec.shared_examples 'update with repository actions' do
context 'when the repository does not exist' do
let(:snippet) { snippet_without_repo }
- it 'creates the repository' do
- update_snippet(snippet_id: snippet.id, params: { title: 'foo' })
+ context 'when update attributes does not include file_name or content' do
+ it 'does not create the repository' do
+ update_snippet(snippet_id: snippet.id, params: { title: 'foo' })
- expect(snippet.repository).to exist
+ expect(snippet.repository).not_to exist
+ end
end
- it 'commits the file to the repository' do
- content = 'New Content'
- file_name = 'file_name.rb'
+ context 'when update attributes include file_name or content' do
+ it 'creates the repository' do
+ update_snippet(snippet_id: snippet.id, params: { title: 'foo', file_name: 'foo' })
+
+ expect(snippet.repository).to exist
+ end
- update_snippet(snippet_id: snippet.id, params: { content: content, file_name: file_name })
+ it 'commits the file to the repository' do
+ content = 'New Content'
+ file_name = 'file_name.rb'
- blob = snippet.repository.blob_at('master', file_name)
- expect(blob).not_to be_nil
- expect(blob.data).to eq content
+ update_snippet(snippet_id: snippet.id, params: { content: content, file_name: file_name })
+
+ blob = snippet.repository.blob_at('master', file_name)
+ expect(blob).not_to be_nil
+ expect(blob.data).to eq content
+ end
end
end
end
@@ -48,3 +58,31 @@ RSpec.shared_examples 'snippet response without repository URLs' do
expect(json_response).not_to have_key('http_url_to_repo')
end
end
+
+RSpec.shared_examples 'snippet blob content' do
+ it 'returns content from repository' do
+ subject
+
+ expect(response.body).to eq(snippet.blobs.first.data)
+ end
+
+ context 'when feature flag :version_snippets is disabled' do
+ it 'returns content from database' do
+ stub_feature_flags(version_snippets: false)
+
+ subject
+
+ expect(response.body).to eq(snippet.content)
+ end
+ end
+
+ context 'when snippet repository is empty' do
+ let(:snippet) { snippet_with_empty_repo }
+
+ it 'returns content from database' do
+ subject
+
+ expect(response.body).to eq(snippet.content)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb b/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
index 1b7fe626aea..07a6353296d 100644
--- a/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
@@ -18,6 +18,10 @@ RSpec.shared_examples 'lists list service' do
expect { service.execute(board) }.to change(board.lists, :count).by(1)
end
+ it 'does not create a backlog list when create_default_lists is false' do
+ expect { service.execute(board, create_default_lists: false) }.not_to change(board.lists, :count)
+ end
+
it "returns board's lists" do
expect(service.execute(board)).to eq [board.backlog_list, list, board.closed_list]
end
diff --git a/spec/support/shared_examples/workers/gitlab/jira_import/jira_import_workers_shared_examples.rb b/spec/support/shared_examples/workers/gitlab/jira_import/jira_import_workers_shared_examples.rb
index c0d17d6853d..ae8c82cb67c 100644
--- a/spec/support/shared_examples/workers/gitlab/jira_import/jira_import_workers_shared_examples.rb
+++ b/spec/support/shared_examples/workers/gitlab/jira_import/jira_import_workers_shared_examples.rb
@@ -20,7 +20,7 @@ shared_examples 'does not advance to next stage' do
end
end
-shared_examples 'cannot do jira import' do
+shared_examples 'cannot do Jira import' do
it 'does not advance to next stage' do
worker = described_class.new
expect(worker).not_to receive(:import)
diff --git a/spec/support/sidekiq_middleware.rb b/spec/support/sidekiq_middleware.rb
index f6694713101..1380f4394d8 100644
--- a/spec/support/sidekiq_middleware.rb
+++ b/spec/support/sidekiq_middleware.rb
@@ -2,6 +2,17 @@
require 'sidekiq/testing'
+# rubocop:disable RSpec/ModifySidekiqMiddleware
+module SidekiqMiddleware
+ def with_sidekiq_server_middleware(&block)
+ Sidekiq::Testing.server_middleware.clear
+ Sidekiq::Testing.server_middleware(&block)
+ ensure
+ Sidekiq::Testing.server_middleware.clear
+ end
+end
+# rubocop:enable RSpec/ModifySidekiqMiddleware
+
# If Sidekiq::Testing.inline! is used, SQL transactions done inside
# Sidekiq worker are included in the SQL query limit (in a real
# deployment sidekiq worker is executed separately). To avoid
@@ -20,8 +31,3 @@ class DisableQueryLimit
end
end
end
-
-Sidekiq::Testing.server_middleware do |chain|
- chain.add Gitlab::SidekiqStatus::ServerMiddleware
- chain.add DisableQueryLimit
-end
diff --git a/spec/uploaders/terraform/state_uploader_spec.rb b/spec/uploaders/terraform/state_uploader_spec.rb
index 4577a2c4738..cbcb6298eca 100644
--- a/spec/uploaders/terraform/state_uploader_spec.rb
+++ b/spec/uploaders/terraform/state_uploader_spec.rb
@@ -5,15 +5,15 @@ require 'spec_helper'
describe Terraform::StateUploader do
subject { terraform_state.file }
- let(:terraform_state) { create(:terraform_state, file: fixture_file_upload('spec/fixtures/terraform/terraform.tfstate')) }
+ let(:terraform_state) { create(:terraform_state, :with_file) }
before do
stub_terraform_state_object_storage
end
describe '#filename' do
- it 'contains the ID of the terraform state record' do
- expect(subject.filename).to include(terraform_state.id.to_s)
+ it 'contains the UUID of the terraform state record' do
+ expect(subject.filename).to include(terraform_state.uuid)
end
end
diff --git a/spec/views/shared/projects/_project.html.haml_spec.rb b/spec/views/shared/projects/_project.html.haml_spec.rb
index b123be42074..8c3b8768469 100644
--- a/spec/views/shared/projects/_project.html.haml_spec.rb
+++ b/spec/views/shared/projects/_project.html.haml_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe 'shared/projects/_project.html.haml' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
before do
allow(view).to receive(:current_application_settings).and_return(Gitlab::CurrentSettings.current_application_settings)
diff --git a/spec/workers/create_commit_signature_worker_spec.rb b/spec/workers/create_commit_signature_worker_spec.rb
index d7235fcd907..f40482f2361 100644
--- a/spec/workers/create_commit_signature_worker_spec.rb
+++ b/spec/workers/create_commit_signature_worker_spec.rb
@@ -9,14 +9,14 @@ describe CreateCommitSignatureWorker do
let(:gpg_commit) { instance_double(Gitlab::Gpg::Commit) }
let(:x509_commit) { instance_double(Gitlab::X509::Commit) }
- context 'when a signature is found' do
- before do
- allow(Project).to receive(:find_by).with(id: project.id).and_return(project)
- allow(project).to receive(:commits_by).with(oids: commit_shas).and_return(commits)
- end
+ before do
+ allow(Project).to receive(:find_by).with(id: project.id).and_return(project)
+ allow(project).to receive(:commits_by).with(oids: commit_shas).and_return(commits)
+ end
- subject { described_class.new.perform(commit_shas, project.id) }
+ subject { described_class.new.perform(commit_shas, project.id) }
+ context 'when a signature is found' do
it 'calls Gitlab::Gpg::Commit#signature' do
commits.each do |commit|
allow(commit).to receive(:signature_type).and_return(:PGP)
@@ -67,9 +67,10 @@ describe CreateCommitSignatureWorker do
end
context 'handles when a string is passed in for the commit SHA' do
+ let(:commit_shas) { super().first }
+
before do
- allow(Project).to receive(:find_by).with(id: project.id).and_return(project)
- allow(project).to receive(:commits_by).with(oids: Array(commit_shas.first)).and_return(commits)
+ allow(project).to receive(:commits_by).with(oids: [commit_shas]).and_return(commits)
allow(commits.first).to receive(:signature_type).and_return(:PGP)
end
@@ -78,35 +79,65 @@ describe CreateCommitSignatureWorker do
expect(gpg_commit).to receive(:signature).once
- described_class.new.perform(commit_shas.first, project.id)
+ subject
end
end
context 'when Commit is not found' do
let(:nonexisting_commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a34' }
+ let(:commit_shas) { [nonexisting_commit_sha] }
it 'does not raise errors' do
- expect { described_class.new.perform([nonexisting_commit_sha], project.id) }.not_to raise_error
+ expect { described_class.new.perform(commit_shas, project.id) }.not_to raise_error
end
end
context 'when Project is not found' do
- let(:nonexisting_project_id) { -1 }
+ let(:commits) { [] }
+ let(:project) { double(id: non_existing_record_id) }
it 'does not raise errors' do
- expect { described_class.new.perform(commit_shas, nonexisting_project_id) }.not_to raise_error
+ expect { subject }.not_to raise_error
end
it 'does not call Gitlab::Gpg::Commit#signature' do
expect_any_instance_of(Gitlab::Gpg::Commit).not_to receive(:signature)
- described_class.new.perform(commit_shas, nonexisting_project_id)
+ subject
end
it 'does not call Gitlab::X509::Commit#signature' do
expect_any_instance_of(Gitlab::X509::Commit).not_to receive(:signature)
- described_class.new.perform(commit_shas, nonexisting_project_id)
+ subject
+ end
+ end
+
+ context 'fetching signatures' do
+ before do
+ commits.each do |commit|
+ allow(commit).to receive(:signature_type).and_return(type)
+ end
+ end
+
+ context 'X509' do
+ let(:type) { :X509 }
+
+ it 'performs a single query for commit signatures' do
+ expect(X509CommitSignature).to receive(:by_commit_sha).with(commit_shas).once.and_return([])
+
+ subject
+ end
+ end
+
+ context 'PGP' do
+ let(:type) { :PGP }
+
+ it 'performs a single query for commit signatures' do
+ expect(GpgSignature).to receive(:by_commit_sha).with(commit_shas).once.and_return([])
+
+ subject
+ end
end
end
end
diff --git a/spec/workers/expire_pipeline_cache_worker_spec.rb b/spec/workers/expire_pipeline_cache_worker_spec.rb
index 8d898ffc13e..61ea22fbd32 100644
--- a/spec/workers/expire_pipeline_cache_worker_spec.rb
+++ b/spec/workers/expire_pipeline_cache_worker_spec.rb
@@ -11,7 +11,9 @@ describe ExpirePipelineCacheWorker do
describe '#perform' do
it 'executes the service' do
- expect_any_instance_of(Ci::ExpirePipelineCacheService).to receive(:execute).with(pipeline).and_call_original
+ expect_next_instance_of(Ci::ExpirePipelineCacheService) do |instance|
+ expect(instance).to receive(:execute).with(pipeline).and_call_original
+ end
subject.perform(pipeline.id)
end
@@ -31,5 +33,9 @@ describe ExpirePipelineCacheWorker do
subject.perform(pipeline.id)
end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [pipeline.id] }
+ end
end
end
diff --git a/spec/workers/export_csv_worker_spec.rb b/spec/workers/export_csv_worker_spec.rb
new file mode 100644
index 00000000000..87285b6264a
--- /dev/null
+++ b/spec/workers/export_csv_worker_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ExportCsvWorker do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, creator: user) }
+
+ def perform(params = {})
+ described_class.new.perform(user.id, project.id, params)
+ end
+
+ it 'emails a CSV' do
+ expect {perform}.to change(ActionMailer::Base.deliveries, :size).by(1)
+ end
+
+ it 'ensures that project_id is passed to issues_finder' do
+ expect(IssuesFinder).to receive(:new).with(anything, hash_including(project_id: project.id)).and_call_original
+
+ perform
+ end
+
+ it 'removes sort parameter' do
+ expect(IssuesFinder).to receive(:new).with(anything, hash_not_including(:sort)).and_call_original
+
+ perform
+ end
+
+ it 'converts controller string keys to symbol keys for IssuesFinder' do
+ expect(IssuesFinder).to receive(:new).with(anything, hash_including(test_key: true)).and_call_original
+
+ perform('test_key' => true)
+ end
+end
diff --git a/spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb
index 93e2a44223b..4cb6f5e28b8 100644
--- a/spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/finish_import_worker_spec.rb
@@ -16,30 +16,42 @@ describe Gitlab::JiraImport::Stage::FinishImportWorker do
stub_feature_flags(jira_issue_import: false)
end
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
end
context 'when feature flag enabled' do
- let_it_be(:jira_import) { create(:jira_import_state, :scheduled, project: project) }
+ let_it_be(:jira_import, reload: true) { create(:jira_import_state, :scheduled, project: project) }
before do
stub_feature_flags(jira_issue_import: true)
end
context 'when import did not start' do
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
end
context 'when import started' do
+ let_it_be(:import_label) { create(:label, project: project, title: 'jira-import') }
+ let_it_be(:imported_issues) { create_list(:labeled_issue, 3, project: project, labels: [import_label]) }
+
before do
+ expect(Gitlab::JiraImport).to receive(:get_import_label_id).and_return(import_label.id)
+ expect(Gitlab::JiraImport).to receive(:issue_failures).and_return(2)
+
jira_import.start!
+ worker.perform(project.id)
end
it 'changes import state to finished' do
- worker.perform(project.id)
-
expect(project.jira_import_status).to eq('finished')
end
+
+ it 'saves imported issues counts' do
+ latest_jira_import = project.latest_jira_import
+ expect(latest_jira_import.total_issue_count).to eq(5)
+ expect(latest_jira_import.failed_to_import_count).to eq(2)
+ expect(latest_jira_import.imported_issues_count).to eq(3)
+ end
end
end
end
diff --git a/spec/workers/gitlab/jira_import/stage/import_attachments_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/import_attachments_worker_spec.rb
index 478cb447dc5..e6d41ae8bb4 100644
--- a/spec/workers/gitlab/jira_import/stage/import_attachments_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/import_attachments_worker_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::JiraImport::Stage::ImportAttachmentsWorker do
stub_feature_flags(jira_issue_import: false)
end
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
it_behaves_like 'does not advance to next stage'
end
@@ -27,7 +27,7 @@ describe Gitlab::JiraImport::Stage::ImportAttachmentsWorker do
end
context 'when import did not start' do
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
it_behaves_like 'does not advance to next stage'
end
diff --git a/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
index 6470a293461..e19acbebd66 100644
--- a/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
@@ -16,7 +16,7 @@ describe Gitlab::JiraImport::Stage::ImportIssuesWorker do
stub_feature_flags(jira_issue_import: false)
end
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
it_behaves_like 'does not advance to next stage'
end
@@ -28,7 +28,7 @@ describe Gitlab::JiraImport::Stage::ImportIssuesWorker do
end
context 'when import did not start' do
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
it_behaves_like 'does not advance to next stage'
end
diff --git a/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb
index f1562395546..52df5b1215c 100644
--- a/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb
@@ -16,7 +16,7 @@ describe Gitlab::JiraImport::Stage::ImportLabelsWorker do
stub_feature_flags(jira_issue_import: false)
end
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
it_behaves_like 'does not advance to next stage'
end
@@ -28,7 +28,7 @@ describe Gitlab::JiraImport::Stage::ImportLabelsWorker do
end
context 'when import did not start' do
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
it_behaves_like 'does not advance to next stage'
end
diff --git a/spec/workers/gitlab/jira_import/stage/import_notes_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/import_notes_worker_spec.rb
index 956898c1abc..f9bdbd669d8 100644
--- a/spec/workers/gitlab/jira_import/stage/import_notes_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/import_notes_worker_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::JiraImport::Stage::ImportNotesWorker do
stub_feature_flags(jira_issue_import: false)
end
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
it_behaves_like 'does not advance to next stage'
end
@@ -27,7 +27,7 @@ describe Gitlab::JiraImport::Stage::ImportNotesWorker do
end
context 'when import did not start' do
- it_behaves_like 'cannot do jira import'
+ it_behaves_like 'cannot do Jira import'
it_behaves_like 'does not advance to next stage'
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index a51e0b79075..3d24b5f753a 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -352,6 +352,9 @@ describe PostReceive do
it "enqueues a UpdateMergeRequestsWorker job" do
allow(Project).to receive(:find_by).and_return(project)
+ expect_next_instance_of(MergeRequests::PushedBranchesService) do |service|
+ expect(service).to receive(:execute).and_return(%w(tést))
+ end
expect(UpdateMergeRequestsWorker).to receive(:perform_async).with(project.id, project.owner.id, any_args)
diff --git a/spec/workers/stage_update_worker_spec.rb b/spec/workers/stage_update_worker_spec.rb
index 8a57cc6bbff..dc7158cfd2f 100644
--- a/spec/workers/stage_update_worker_spec.rb
+++ b/spec/workers/stage_update_worker_spec.rb
@@ -12,6 +12,15 @@ describe StageUpdateWorker do
described_class.new.perform(stage.id)
end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [stage.id] }
+
+ it 'results in the stage getting the skipped status' do
+ expect { subject }.to change { stage.reload.status }.from('pending').to('skipped')
+ expect { subject }.not_to change { stage.reload.status }
+ end
+ end
end
context 'when stage does not exist' do
diff --git a/spec/workers/x509_issuer_crl_check_worker_spec.rb b/spec/workers/x509_issuer_crl_check_worker_spec.rb
new file mode 100644
index 00000000000..f052812b86b
--- /dev/null
+++ b/spec/workers/x509_issuer_crl_check_worker_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe X509IssuerCrlCheckWorker do
+ subject(:worker) { described_class.new }
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:x509_signed_commit) { project.commit_by(oid: '189a6c924013fc3fe40d6f1ec1dc20214183bc97') }
+ let(:revoked_x509_signed_commit) { project.commit_by(oid: 'ed775cc81e5477df30c2abba7b6fdbb5d0baadae') }
+
+ describe '#perform' do
+ context 'valid crl' do
+ before do
+ stub_request(:get, "http://ch.siemens.com/pki?ZZZZZZA6.crl")
+ .to_return(status: 200, body: File.read('spec/fixtures/x509/ZZZZZZA6.crl'), headers: {})
+ end
+
+ it 'changes certificate status for revoked certificates' do
+ revoked_x509_commit = Gitlab::X509::Commit.new(revoked_x509_signed_commit)
+ x509_commit = Gitlab::X509::Commit.new(x509_signed_commit)
+ issuer = revoked_x509_commit.signature.x509_certificate.x509_issuer
+
+ expect(issuer).to eq(x509_commit.signature.x509_certificate.x509_issuer)
+ expect(revoked_x509_commit.signature.x509_certificate.good?).to be_truthy
+ expect(x509_commit.signature.x509_certificate.good?).to be_truthy
+
+ worker.perform
+ revoked_x509_commit.signature.reload
+
+ expect(revoked_x509_commit.signature.x509_certificate.revoked?).to be_truthy
+ expect(x509_commit.signature.x509_certificate.revoked?).to be_falsey
+ end
+ end
+
+ context 'invalid crl' do
+ before do
+ stub_request(:get, "http://ch.siemens.com/pki?ZZZZZZA6.crl")
+ .to_return(status: 200, body: "trash", headers: {})
+ end
+
+ it 'does not change certificate status' do
+ revoked_x509_commit = Gitlab::X509::Commit.new(revoked_x509_signed_commit)
+
+ expect(revoked_x509_commit.signature.x509_certificate.good?).to be_truthy
+
+ worker.perform
+ revoked_x509_commit.signature.reload
+
+ expect(revoked_x509_commit.signature.x509_certificate.revoked?).to be_falsey
+ end
+ end
+
+ context 'not found crl' do
+ before do
+ stub_request(:get, "http://ch.siemens.com/pki?ZZZZZZA6.crl")
+ .to_return(status: 404, body: "not found", headers: {})
+ end
+
+ it 'does not change certificate status' do
+ revoked_x509_commit = Gitlab::X509::Commit.new(revoked_x509_signed_commit)
+
+ expect(revoked_x509_commit.signature.x509_certificate.good?).to be_truthy
+
+ worker.perform
+ revoked_x509_commit.signature.reload
+
+ expect(revoked_x509_commit.signature.x509_certificate.revoked?).to be_falsey
+ end
+ end
+
+ context 'unreachable crl' do
+ before do
+ stub_request(:get, "http://ch.siemens.com/pki?ZZZZZZA6.crl")
+ .to_raise(SocketError.new('Some HTTP error'))
+ end
+
+ it 'does not change certificate status' do
+ revoked_x509_commit = Gitlab::X509::Commit.new(revoked_x509_signed_commit)
+
+ expect(revoked_x509_commit.signature.x509_certificate.good?).to be_truthy
+
+ worker.perform
+ revoked_x509_commit.signature.reload
+
+ expect(revoked_x509_commit.signature.x509_certificate.revoked?).to be_falsey
+ end
+ end
+ end
+end