diff options
25 files changed, 351 insertions, 308 deletions
@@ -148,7 +148,7 @@ gem 'creole', '~> 0.5.0' gem 'wikicloth', '0.8.1' gem 'asciidoctor', '~> 2.0.10' gem 'asciidoctor-include-ext', '~> 0.3.1', require: false -gem 'asciidoctor-plantuml', '0.0.10' +gem 'asciidoctor-plantuml', '~> 0.0.12' gem 'rouge', '~> 3.19.0' gem 'truncato', '~> 0.7.11' gem 'bootstrap_form', '~> 4.2.0' diff --git a/Gemfile.lock b/Gemfile.lock index e5631d860ee..ffff576e8b0 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -84,7 +84,7 @@ GEM asciidoctor (2.0.10) asciidoctor-include-ext (0.3.1) asciidoctor (>= 1.5.6, < 3.0.0) - asciidoctor-plantuml (0.0.10) + asciidoctor-plantuml (0.0.12) asciidoctor (>= 1.5.6, < 3.0.0) ast (2.4.0) atlassian-jwt (0.2.0) @@ -1162,7 +1162,7 @@ DEPENDENCIES asana (~> 0.9) asciidoctor (~> 2.0.10) asciidoctor-include-ext (~> 0.3.1) - asciidoctor-plantuml (= 0.0.10) + asciidoctor-plantuml (~> 0.0.12) atlassian-jwt (~> 0.2.0) attr_encrypted (~> 3.1.0) awesome_print diff --git a/app/assets/javascripts/monitoring/components/dashboard.vue b/app/assets/javascripts/monitoring/components/dashboard.vue index de73da67fcc..2018c706b11 100644 --- a/app/assets/javascripts/monitoring/components/dashboard.vue +++ b/app/assets/javascripts/monitoring/components/dashboard.vue @@ -39,6 +39,7 @@ import { timeRangeFromUrl, panelToUrl, expandedPanelPayloadFromUrl, + convertVariablesForURL, } from '../utils'; import { metricStates } from '../constants'; import { defaultTimeRange, timeRanges } from '~/vue_shared/constants'; @@ -272,7 +273,7 @@ export default { handler({ group, panel }) { const dashboardPath = this.currentDashboard || this.selectedDashboard?.path; updateHistory({ - url: panelToUrl(dashboardPath, group, panel), + url: panelToUrl(dashboardPath, convertVariablesForURL(this.promVariables), group, panel), title: document.title, }); }, @@ -343,7 +344,7 @@ export default { }, generatePanelUrl(groupKey, panel) { const dashboardPath = this.currentDashboard || this.selectedDashboard?.path; - return panelToUrl(dashboardPath, groupKey, panel); + return panelToUrl(dashboardPath, convertVariablesForURL(this.promVariables), groupKey, panel); }, hideAddMetricModal() { this.$refs.addMetricModal.hide(); diff --git a/app/assets/javascripts/monitoring/utils.js b/app/assets/javascripts/monitoring/utils.js index 2e4012855d5..1f028ffbcad 100644 --- a/app/assets/javascripts/monitoring/utils.js +++ b/app/assets/javascripts/monitoring/utils.js @@ -17,7 +17,7 @@ import { VARIABLE_PREFIX } from './constants'; * This will be removed once we add support for free text variables * via the dashboard yaml files in https://gitlab.com/gitlab-org/gitlab/-/issues/215689 */ -export const dashboardParams = ['dashboard', 'group', 'title', 'y_label']; +export const dashboardParams = ['dashboard', 'group', 'title', 'y_label', 'embedded']; /** * This method is used to validate if the graph data format for a chart component @@ -262,14 +262,22 @@ export const expandedPanelPayloadFromUrl = (dashboard, search = window.location. * If no group/panel is set, the dashboard URL is returned. * * @param {?String} dashboard - Dashboard path, used as identifier for a dashboard + * @param {?Object} promVariables - Custom variables that came from the URL * @param {?String} group - Group Identifier * @param {?Object} panel - Panel object from the dashboard * @param {?String} url - Base URL including current search params * @returns Dashboard URL which expands a panel (chart) */ -export const panelToUrl = (dashboard = null, group, panel, url = window.location.href) => { +export const panelToUrl = ( + dashboard = null, + promVariables, + group, + panel, + url = window.location.href, +) => { const params = { dashboard, + ...promVariables, }; if (group && panel) { diff --git a/app/controllers/snippets_controller.rb b/app/controllers/snippets_controller.rb index e877f3c7a54..425e0458b41 100644 --- a/app/controllers/snippets_controller.rb +++ b/app/controllers/snippets_controller.rb @@ -49,23 +49,19 @@ class SnippetsController < ApplicationController end def create - create_params = snippet_params.merge(spammable_params) + create_params = snippet_params.merge(files: params.delete(:files)) service_response = Snippets::CreateService.new(nil, current_user, create_params).execute @snippet = service_response.payload[:snippet] if service_response.error? && @snippet.errors[:repository].present? handle_repository_error(:new) else - move_temporary_files if @snippet.valid? && params[:files] - recaptcha_check_with_fallback { render :new } end end def update - update_params = snippet_params.merge(spammable_params) - - service_response = Snippets::UpdateService.new(nil, current_user, update_params).execute(@snippet) + service_response = Snippets::UpdateService.new(nil, current_user, snippet_params).execute(@snippet) @snippet = service_response.payload[:snippet] handle_repository_error(:edit) @@ -150,12 +146,6 @@ class SnippetsController < ApplicationController end def snippet_params - params.require(:personal_snippet).permit(:title, :content, :file_name, :private, :visibility_level, :description) - end - - def move_temporary_files - params[:files].each do |file| - FileMover.new(file, from_model: current_user, to_model: @snippet).execute - end + params.require(:personal_snippet).permit(:title, :content, :file_name, :private, :visibility_level, :description).merge(spammable_params) end end diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb index 2e2fdd5993f..bb58d1bc2bc 100644 --- a/app/services/snippets/create_service.rb +++ b/app/services/snippets/create_service.rb @@ -9,6 +9,8 @@ module Snippets def execute filter_spam_check_params + @files = Array(params.delete(:files).presence) + @snippet = if project project.snippets.build(params) else @@ -29,6 +31,8 @@ module Snippets UserAgentDetailService.new(@snippet, @request).create Gitlab::UsageDataCounters::SnippetCounter.count(:create) + move_temporary_files + ServiceResponse.success(payload: { snippet: @snippet } ) else snippet_error_response(@snippet, 400) @@ -83,5 +87,13 @@ module Snippets def snippet_files [{ file_path: params[:file_name], content: params[:content] }] end + + def move_temporary_files + return unless @snippet.is_a?(PersonalSnippet) + + @files.each do |file| + FileMover.new(file, from_model: current_user, to_model: @snippet).execute + end + end end end diff --git a/app/views/shared/_ref_dropdown.html.haml b/app/views/shared/_ref_dropdown.html.haml index 8b2a3bee407..ee2b2a17e21 100644 --- a/app/views/shared/_ref_dropdown.html.haml +++ b/app/views/shared/_ref_dropdown.html.haml @@ -1,7 +1,7 @@ - dropdown_class = local_assigns.fetch(:dropdown_class, '') .dropdown-menu.dropdown-menu-selectable.git-revision-dropdown{ class: dropdown_class } - = dropdown_title "Select Git revision" - = dropdown_filter "Filter by Git revision" + = dropdown_title _('Select Git revision') + = dropdown_filter _('Filter by Git revision') = dropdown_content = dropdown_loading diff --git a/changelogs/unreleased/22691-externelize-i18n-strings-from---app-views-shared-_ref_dropdown-html.yml b/changelogs/unreleased/22691-externelize-i18n-strings-from---app-views-shared-_ref_dropdown-html.yml new file mode 100644 index 00000000000..6c5da93f1e9 --- /dev/null +++ b/changelogs/unreleased/22691-externelize-i18n-strings-from---app-views-shared-_ref_dropdown-html.yml @@ -0,0 +1,5 @@ +--- +title: Externalize i18n strings from ./app/views/shared/_ref_dropdown.html.haml +merge_request: 32102 +author: Gilang Gumilar +type: changed diff --git a/changelogs/unreleased/jivanvl-ensure-links-generated-manage-variables.yml b/changelogs/unreleased/jivanvl-ensure-links-generated-manage-variables.yml new file mode 100644 index 00000000000..217b7ada151 --- /dev/null +++ b/changelogs/unreleased/jivanvl-ensure-links-generated-manage-variables.yml @@ -0,0 +1,5 @@ +--- +title: Ensure links generated by the copy link feature contain variables +merge_request: 31636 +author: +type: changed diff --git a/changelogs/unreleased/sh-update-plantutml-gem.yml b/changelogs/unreleased/sh-update-plantutml-gem.yml new file mode 100644 index 00000000000..ee138a974f2 --- /dev/null +++ b/changelogs/unreleased/sh-update-plantutml-gem.yml @@ -0,0 +1,5 @@ +--- +title: Update asciidoctor-plantuml gem to v0.0.12 +merge_request: 32376 +author: +type: other diff --git a/doc/administration/integration/plantuml.md b/doc/administration/integration/plantuml.md index 76655a945a3..682e47adc21 100644 --- a/doc/administration/integration/plantuml.md +++ b/doc/administration/integration/plantuml.md @@ -115,6 +115,19 @@ that, login with an Admin account and do following: - Check **Enable PlantUML** checkbox. - Set the PlantUML instance as `https://gitlab.example.com/-/plantuml/`. +NOTE: **Note:** If you are using a PlantUML server running v1.2020.9 and +above (for example, [plantuml.com](https://plantuml.com)), set the `PLANTUML_ENCODING` +environment variable to enable the `deflate` compression. On Omnibus, +this can be done set in `/etc/gitlab.rb`: + +```ruby +gitlab_rails['env'] = { 'PLANTUML_ENCODING' => 'deflate' } +``` + +From GitLab 13.1 and later, PlantUML integration now +[requires a header prefix in the URL](https://github.com/plantuml/plantuml/issues/117#issuecomment-6235450160) +to distinguish different encoding types. + ## Creating Diagrams With PlantUML integration enabled and configured, we can start adding diagrams to diff --git a/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md b/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md index 33a64176357..2cbc994fb4c 100644 --- a/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md +++ b/doc/administration/troubleshooting/gitlab_rails_cheat_sheet.md @@ -572,29 +572,7 @@ Ci::Pipeline.where(project_id: p.id).where(status: 'pending').count ### Remove artifacts more than a week old -The Latest version of these steps can be found in the [job artifacts documentation](../job_artifacts.md) - -```ruby -### SELECTING THE BUILDS TO CLEAR -# For a single project: -project = Project.find_by_full_path('') -builds_with_artifacts = project.builds.with_downloadable_artifacts - -# Instance-wide: -builds_with_artifacts = Ci::Build.with_downloadable_artifacts - -# Prior to 10.6 the above lines would be: -# builds_with_artifacts = project.builds.with_artifacts -# builds_with_artifacts = Ci::Build.with_artifacts - -### CLEAR THEM OUT -# Note that this will also erase artifacts that developers marked to "Keep" -builds_to_clear = builds_with_artifacts.where("finished_at < ?", 1.week.ago) -builds_to_clear.each do |build| - build.artifacts_expire_at = Time.now - build.erase_erasable_artifacts! -end -``` +This section has been moved to the [job artifacts troubleshooting documentation](../job_artifacts.md#delete-job-artifacts-from-jobs-completed-before-a-specific-date). ### Find reason failure (for when build trace is empty) (Introduced in 10.3.0) diff --git a/doc/ci/jenkins/index.md b/doc/ci/jenkins/index.md index c9235c62a84..c4346005138 100644 --- a/doc/ci/jenkins/index.md +++ b/doc/ci/jenkins/index.md @@ -13,6 +13,10 @@ First of all, our [Quick Start Guide](../quick_start/README.md) contains a good You may also be interested in [Auto DevOps](../../topics/autodevops/index.md) which can potentially be used to build, test, and deploy your applications with little to no configuration needed at all. +For an example of how to convert a Jenkins pipeline into a GitLab CI/CD pipeline, +or how to use Auto DevOps to test your code automatically, watch the +[Migrating from Jenkins to GitLab](https://www.youtube.com/watch?v=RlEVGOpYF5Y) video. + For advanced CI/CD teams, [templates](#templates) can enable the reuse of pipeline configurations. Otherwise, read on for important information that will help you get the ball rolling. Welcome diff --git a/doc/ci/merge_request_pipelines/index.md b/doc/ci/merge_request_pipelines/index.md index 9e9d4ee403b..a724bf416b6 100644 --- a/doc/ci/merge_request_pipelines/index.md +++ b/doc/ci/merge_request_pipelines/index.md @@ -37,79 +37,23 @@ To enable pipelines for merge requests: ## Configuring pipelines for merge requests -To configure pipelines for merge requests, configure your [CI/CD configuration file](../yaml/README.md). -There are a few different ways to do this. +To configure pipelines for merge requests you need to configure your [CI/CD configuration file](../yaml/README.md). +There are a few different ways to do this: -### Enable pipelines for merge requests for all jobs +### Use `rules` to run pipelines for merge requests -The recommended method for enabling pipelines for merge requests for all jobs in -a pipeline is to use [`workflow:rules`](../yaml/README.md#workflowrules). - -In this example, the pipeline always runs for all merge requests, as well as for all changes -to the master branch: - -```yaml -workflow: - rules: - - if: $CI_MERGE_REQUEST_ID # Execute jobs in merge request context - - if: $CI_COMMIT_BRANCH == 'master' # Execute jobs when a new commit is pushed to master branch - -build: - stage: build - script: ./build - -test: - stage: test - script: ./test - -deploy: - stage: deploy - script: ./deploy -``` - -### Enable pipelines for merge requests for specific jobs - -To enable pipelines for merge requests for specific jobs, you can use -[`rules`](../yaml/README.md#rules). - -In the following example: - -- The `build` job runs for all changes to the `master` branch, as well as for all merge requests. -- The `test` job runs for all merge requests. -- The `deploy` job runs for all changes to the `master` branch, but does *not* run - for merge requests. - -```yaml -build: - stage: build - script: ./build - rules: - - if: $CI_COMMIT_BRANCH == 'master' # Execute jobs when a new commit is pushed to master branch - - if: $CI_MERGE_REQUEST_ID # Execute jobs in merge request context - -test: - stage: test - script: ./test - rules: - - if: $CI_MERGE_REQUEST_ID # Execute jobs in merge request context - -deploy: - stage: deploy - script: ./deploy - rules: - - if: $CI_COMMIT_BRANCH == 'master' # Execute jobs when a new commit is pushed to master branch -``` +When using `rules`, which is the preferred method, we recommend starting with one +of the [`workflow:rules` templates](../yaml/README.md#workflowrules-templates) to ensure +your basic configuration is correct. Instructions on how to do this, as well as how +to customize, are available at that link. ### Use `only` or `except` to run pipelines for merge requests -NOTE: **Note**: -The [`only` / `except`](../yaml/README.md#onlyexcept-basic) keywords are going to be deprecated -and you should not use them. - -To enable pipelines for merge requests, you can use `only / except`. When you use this method, -you have to specify `only: - merge_requests` for each job. +If you want to continue using `only/except`, this is possible but please review the drawbacks +below. -In this example, the pipeline contains a `test` job that is configured to run on merge requests. +When you use this method, you have to specify `only: - merge_requests` for each job. In this +example, the pipeline contains a `test` job that is configured to run on merge requests. The `build` and `deploy` jobs don't have the `only: - merge_requests` parameter, so they will not run on merge requests. @@ -259,60 +203,16 @@ The variable names begin with the `CI_MERGE_REQUEST_` prefix. ### Two pipelines created when pushing to a merge request -If two pipelines are created when you push a new change to a merge request, -check your CI configuration file. - -For example, with this `.gitlab-ci.yml` configuration: - -```yaml -test: - script: ./test - rules: - - if: $CI_MERGE_REQUEST_ID # Include this job in pipelines for merge request - - if: $CI_COMMIT_BRANCH # Include this job in all branch pipelines - # Or, if you are using the `only:` keyword: - # only: - # - merge_requests - # - branches -``` - -Two pipelines are created when you push a commit to a branch that also has a pending -merge request: - -- A merge request pipeline that runs for the changes in the merge request. In - **CI/CD > Pipelines**, the merge request icon (**{merge-request}**) - and the merge request ID are displayed. If you hover over the ID, the merge request name is displayed. - - ![MR pipeline icon example](img/merge_request_pipelines_doubled_MR_v12_09.png) - -- A "branch" pipeline that runs for the commit pushed to the branch. In **CI/CD > Pipelines**, - the branch icon (**{branch}**) and branch name are displayed. This pipeline is - created even if no merge request exists. - - ![branch pipeline icon example](img/merge_request_pipelines_doubled_branch_v12_09.png) - -With the example configuration above, there is overlap between these two events. -When you push a commit to a branch that also has an open merge request pending, -both types of pipelines are created. - -To fix this overlap, you must explicitly define which job should run for which -purpose, for example: - -```yaml -test: - script: ./test - rules: - - if: $CI_MERGE_REQUEST_ID # Include this job in pipelines for merge request - - if: $CI_COMMIT_BRANCH == 'master' # Include this job in master branch pipelines -``` +If you are experiencing duplicated pipelines when using `rules`, take a look at +the [key details when using `rules`](../yaml/README.md#key-details-when-using-rules), +which will help you get your starting configuration correct. -Similar `rules:` should be added to all jobs to avoid any overlapping pipelines. Alternatively, -you can use the [`workflow:`](../yaml/README.md#exclude-jobs-with-rules-from-certain-pipelines) -parameter to add the same rules to all jobs globally. +If you are seeing two pipelines when using `only/except`, please see the caveats +related to using `only/except` above (or, consider moving to `rules`). ### Two pipelines created when pushing an invalid CI configuration file -Similar to above, pushing to a branch with an invalid CI configuration file can trigger +Pushing to a branch with an invalid CI configuration file can trigger the creation of two types of failed pipelines. One pipeline is a failed merge request pipeline, and the other is a failed branch pipeline, but both are caused by the same invalid configuration. diff --git a/doc/ci/yaml/README.md b/doc/ci/yaml/README.md index b1112e68486..64ee2940eee 100644 --- a/doc/ci/yaml/README.md +++ b/doc/ci/yaml/README.md @@ -286,7 +286,45 @@ determine whether or not a pipeline is created. It currently accepts a single `rules:` key that operates similarly to [`rules:` defined within jobs](#rules), enabling dynamic configuration of the pipeline. -The configuration options currently available for `workflow:rules` are: +#### `workflow:rules` templates + +> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/217732) in GitLab 13.0. + +We provide pre-made templates for use with your pipelines that set up `workflow: rules` +for common scenarios. Usage of these will make things easier and prevent duplicate pipelines from running. + +The [`Branch-Pipelines` template](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/gitlab/ci/templates/Workflows/Branch-Pipelines.gitlab-ci.yml) +makes your pipelines run for branches and tags. + +Branch pipeline status will be displayed within merge requests that use that branch +as a source, but this pipeline type does not support any features offered by +[Merge Request Pipelines](../merge_request_pipelines/) like +[Pipelines for Merge Results](../merge_request_pipelines/#pipelines-for-merged-results-premium) +or [Merge Trains](../merge_request_pipelines/pipelines_for_merged_results/merge_trains/). +Use this template if you are intentionally avoiding those features. + +It is [included](#include) as follows: + +```yaml +include: + - template: 'Workflows/Branch-Pipelines.gitlab-ci.yml' +``` + +The [`MergeRequest-Pipelines` include](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/gitlab/ci/templates/Workflows/MergeRequest-Pipelines.gitlab-ci.yml) sets your pipelines to run for the default branch (usually `master`), tags, and +The [`MergeRequest-Pipelines` template](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/gitlab/ci/templates/Workflows/MergeRequest-Pipelines.gitlab-ci.yml) +makes your pipelines run for the default branch (usually `master`), tags, and +all types of merge request pipelines. Use this template if you use any of the +the [Pipelines for Merge Requests features](../merge_request_pipelines/), as mentioned +above. + +It is [included](#include) as follows: + +```yaml +include: + - template: 'Workflows/MergeRequest-Pipelines.gitlab-ci.yml' +``` + +If you prefer to define your own rules, the configuration options currently available are: - [`if`](#rulesif): Define a rule. - [`when`](#when): May be set to `always` or `never` only. If not provided, the default value is `always`. @@ -858,10 +896,26 @@ the `.template` job, and uses the `alpine` Docker image as defined in the local `rules` allows for a list of individual rule objects to be evaluated *in order*, until one matches and dynamically provides attributes to the job. -Note that `rules` can't be used in combination with `only/except` since it's intended -to replace that functionality. If you attempt to do this the linter will return a + +CAUTION: **Caution:** +`rules` can't be used in combination with `only/except` as it is a replacement for that functionality. If you attempt to do this, the linter will return a `key may not be used with rules` error. +#### Key details when using `rules` + +A very important difference between `rules` and `only/except`, is that jobs defined +with `rules` trigger merge request pipelines by default, but `only/except` jobs do not. +This may be surprising if migrating from `only` and `except`, so new users of `rules` +can use one of the [`workflow: rules` templates](#workflowrules-templates) to get started. +This will ensure that the behavior is more stable as you start adding additional `rules` +blocks, and will avoid issues like creating a duplicate, merge request (detached) pipeline. + +We don't recomment mixing `only/except` jobs with `rules` jobs in the same pipeline. +It may not cause YAML errors, but debugging the exact execution behavior can be complex +due to the different default behaviors of `only/except` and `rules`. + +### Rules clauses + Available rule clauses include: - [`if`](#rulesif) (similar to [`only:variables`](#onlyvariablesexceptvariables)) @@ -995,47 +1049,6 @@ job: In this example, if the first rule matches, then the job will have `when: manual` and `allow_failure: true`. -#### Exclude jobs with `rules:` from certain pipelines - -Jobs with `rules:` can cause two pipelines to be created unexpectedly: - -- One pipeline from pushing a commit to a branch. -- A second ["detached" pipeline for a merge request](../merge_request_pipelines/index.md). - -`only` and `except` jobs don't trigger merge request pipelines by default, but this -is not the case for jobs with `rules:`, which may be surprising if migrating from `only` -and `except` to `rules:`. - -If you're using `rules:` and you see two pipelines for commits to branches that have -a merge request, you have two options: - -- Individually exclude each job that uses `rules:` from merge request pipelines. The - example below will cause the job to **not** run in *pipelines for merge requests*, - but it **will** run in pipelines for *new tags and pipelines running on branch refs*: - - ```yaml - job: - rules: - - if: $CI_MERGE_REQUEST_ID - when: never - - when: manual - script: - - echo hello - ``` - -- Add a global [`workflow: rules`](#workflowrules) to allow pipelines in only certain - situations. The example below will only run pipelines for merge requests, new tags and - changes to master. It will **not** run any pipelines *on any branch except master*, but - it will run **detached merge request pipelines** for any merge request, targeting any branch: - - ```yaml - workflow: - rules: - - if: $CI_MERGE_REQUEST_ID - - if: $CI_COMMIT_TAG - - if: $CI_COMMIT_BRANCH == "master" - ``` - #### Complex rule clauses To conjoin `if`, `changes`, and `exists` clauses with an AND, use them in the diff --git a/doc/integration/jenkins.md b/doc/integration/jenkins.md index 0afa9cbad50..485ca67c9fc 100644 --- a/doc/integration/jenkins.md +++ b/doc/integration/jenkins.md @@ -9,11 +9,9 @@ From GitLab, you can trigger a Jenkins build when you push code to a repository, request is created. In return, Jenkins shows the pipeline status on merge requests widgets and on the GitLab project's home page. -To better understand GitLab's Jenkins integration, watch the following videos: +To better understand GitLab's Jenkins integration, watch the following video: - [GitLab workflow with Jira issues and Jenkins pipelines](https://youtu.be/Jn-_fyra7xQ) -- [Migrating from Jenkins to GitLab](https://www.youtube.com/watch?v=RlEVGOpYF5Y) - Use the Jenkins integration with GitLab when: - You plan to migrate your CI from Jenkins to [GitLab CI/CD](../ci/README.md) in the future, but diff --git a/doc/user/application_security/sast/index.md b/doc/user/application_security/sast/index.md index 69db0fe9d03..c932f434684 100644 --- a/doc/user/application_security/sast/index.md +++ b/doc/user/application_security/sast/index.md @@ -67,7 +67,8 @@ The following table shows which languages, package managers and frameworks are s | Language (package managers) / framework | Scan tool | Introduced in GitLab Version | |-----------------------------------------------------------------------------|----------------------------------------------------------------------------------------|------------------------------| -| .NET | [Security Code Scan](https://security-code-scan.github.io) | 11.0 | +| .NET Core | [Security Code Scan](https://security-code-scan.github.io) | 11.0 | +| .NET Framework | [Security Code Scan](https://security-code-scan.github.io) | 13.0 | | Any | [Gitleaks](https://github.com/zricethezav/gitleaks) and [TruffleHog](https://github.com/dxa4481/truffleHog) | 11.9 | | Apex (Salesforce) | [PMD](https://pmd.github.io/pmd/index.html) | 12.1 | | C/C++ | [Flawfinder](https://dwheeler.com/flawfinder/) | 10.7 | diff --git a/lib/gitlab/ci/templates/Workflows/Branch-Pipelines.gitlab-ci.yml b/lib/gitlab/ci/templates/Workflows/Branch-Pipelines.gitlab-ci.yml new file mode 100644 index 00000000000..05635cf71be --- /dev/null +++ b/lib/gitlab/ci/templates/Workflows/Branch-Pipelines.gitlab-ci.yml @@ -0,0 +1,7 @@ +# Read more on when to use this template at +# https://docs.gitlab.com/ee/ci/yaml/#workflowrules + +workflow: + rules: + - if: $CI_COMMIT_TAG + - if: $CI_COMMIT_BRANCH diff --git a/lib/gitlab/ci/templates/Workflows/MergeRequest-Pipelines.gitlab-ci.yml b/lib/gitlab/ci/templates/Workflows/MergeRequest-Pipelines.gitlab-ci.yml new file mode 100644 index 00000000000..50ff4c1f60b --- /dev/null +++ b/lib/gitlab/ci/templates/Workflows/MergeRequest-Pipelines.gitlab-ci.yml @@ -0,0 +1,8 @@ +# Read more on when to use this template at +# https://docs.gitlab.com/ee/ci/yaml/#workflowrules + +workflow: + rules: + - if: $CI_MERGE_REQUEST_IID + - if: $CI_COMMIT_TAG + - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 06548367c04..037e4e30653 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -9473,6 +9473,9 @@ msgstr "" msgid "Filter by %{page_context_word} that are currently opened." msgstr "" +msgid "Filter by Git revision" +msgstr "" + msgid "Filter by commit message" msgstr "" @@ -18975,6 +18978,9 @@ msgstr "" msgid "Select Archive Format" msgstr "" +msgid "Select Git revision" +msgstr "" + msgid "Select GitLab project to link with your Slack team" msgstr "" diff --git a/spec/controllers/projects/logs_controller_spec.rb b/spec/controllers/projects/logs_controller_spec.rb index cc0690788c7..e86a42b03c8 100644 --- a/spec/controllers/projects/logs_controller_spec.rb +++ b/spec/controllers/projects/logs_controller_spec.rb @@ -16,16 +16,23 @@ describe Projects::LogsController do let(:container) { 'container-1' } before do - project.add_maintainer(user) - sign_in(user) end describe 'GET #index' do let(:empty_project) { create(:project) } + it 'returns 404 with developer access' do + project.add_developer(user) + + get :index, params: environment_params + + expect(response).to have_gitlab_http_status(:not_found) + end + it 'renders empty logs page if no environment exists' do empty_project.add_maintainer(user) + get :index, params: { namespace_id: empty_project.namespace, project_id: empty_project } expect(response).to be_ok @@ -33,6 +40,8 @@ describe Projects::LogsController do end it 'renders index template' do + project.add_maintainer(user) + get :index, params: environment_params expect(response).to be_ok @@ -60,70 +69,84 @@ describe Projects::LogsController do end end - it 'returns the service result' do + it 'returns 404 with developer access' do + project.add_developer(user) + get endpoint, params: environment_params(pod_name: pod_name, format: :json) - expect(response).to have_gitlab_http_status(:success) - expect(json_response).to eq(service_result_json) + expect(response).to have_gitlab_http_status(:not_found) end - it 'registers a usage of the endpoint' do - expect(::Gitlab::UsageCounters::PodLogs).to receive(:increment).with(project.id) + context 'with maintainer access' do + before do + project.add_maintainer(user) + end - get endpoint, params: environment_params(pod_name: pod_name, format: :json) + it 'returns the service result' do + get endpoint, params: environment_params(pod_name: pod_name, format: :json) - expect(response).to have_gitlab_http_status(:success) - end + expect(response).to have_gitlab_http_status(:success) + expect(json_response).to eq(service_result_json) + end - it 'sets the polling header' do - get endpoint, params: environment_params(pod_name: pod_name, format: :json) + it 'registers a usage of the endpoint' do + expect(::Gitlab::UsageCounters::PodLogs).to receive(:increment).with(project.id) - expect(response).to have_gitlab_http_status(:success) - expect(response.headers['Poll-Interval']).to eq('3000') - end + get endpoint, params: environment_params(pod_name: pod_name, format: :json) - context 'when service is processing' do - let(:service_result) { nil } + expect(response).to have_gitlab_http_status(:success) + end - it 'returns a 202' do + it 'sets the polling header' do get endpoint, params: environment_params(pod_name: pod_name, format: :json) - expect(response).to have_gitlab_http_status(:accepted) + expect(response).to have_gitlab_http_status(:success) + expect(response.headers['Poll-Interval']).to eq('3000') end - end - shared_examples 'unsuccessful execution response' do |message| - let(:service_result) do - { - status: :error, - message: message - } - end + context 'when service is processing' do + let(:service_result) { nil } - it 'returns the error' do - get endpoint, params: environment_params(pod_name: pod_name, format: :json) + it 'returns a 202' do + get endpoint, params: environment_params(pod_name: pod_name, format: :json) - expect(response).to have_gitlab_http_status(:bad_request) - expect(json_response).to eq(service_result_json) + expect(response).to have_gitlab_http_status(:accepted) + end end - end - context 'when service is failing' do - it_behaves_like 'unsuccessful execution response', 'some error' - end + shared_examples 'unsuccessful execution response' do |message| + let(:service_result) do + { + status: :error, + message: message + } + end - context 'when cluster is nil' do - let!(:cluster) { nil } + it 'returns the error' do + get endpoint, params: environment_params(pod_name: pod_name, format: :json) - it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments' - end + expect(response).to have_gitlab_http_status(:bad_request) + expect(json_response).to eq(service_result_json) + end + end - context 'when namespace is empty' do - before do - allow(environment).to receive(:deployment_namespace).and_return('') + context 'when service is failing' do + it_behaves_like 'unsuccessful execution response', 'some error' + end + + context 'when cluster is nil' do + let!(:cluster) { nil } + + it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments' end - it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments' + context 'when namespace is empty' do + before do + allow(environment).to receive(:deployment_namespace).and_return('') + end + + it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments' + end end end diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb index aea0e4a097b..046ee40cec2 100644 --- a/spec/controllers/snippets_controller_spec.rb +++ b/spec/controllers/snippets_controller_spec.rb @@ -243,39 +243,13 @@ describe SnippetsController do end end - context 'when the snippet description contains a file' do - include FileMoverHelpers + context 'when the controller receives the files param' do + let(:files) { %w(foo bar) } - let(:picture_secret) { SecureRandom.hex } - let(:text_secret) { SecureRandom.hex } - let(:picture_file) { "/-/system/user/#{user.id}/#{picture_secret}/picture.jpg" } - let(:text_file) { "/-/system/user/#{user.id}/#{text_secret}/text.txt" } - let(:description) do - "Description with picture: ![picture](/uploads#{picture_file}) and "\ - "text: [text.txt](/uploads#{text_file})" - end - - before do - allow(FileUtils).to receive(:mkdir_p) - allow(FileUtils).to receive(:move) - stub_file_mover(text_file) - stub_file_mover(picture_file) - end - - subject { create_snippet({ description: description }, { files: [picture_file, text_file] }) } - - it 'creates the snippet' do - expect { subject }.to change { Snippet.count }.by(1) - end - - it 'stores the snippet description correctly' do - snippet = subject - - expected_description = "Description with picture: "\ - "![picture](/uploads/-/system/personal_snippet/#{snippet.id}/#{picture_secret}/picture.jpg) and "\ - "text: [text.txt](/uploads/-/system/personal_snippet/#{snippet.id}/#{text_secret}/text.txt)" + it 'passes the files param to the snippet create service' do + expect(Snippets::CreateService).to receive(:new).with(nil, user, hash_including(files: files)).and_call_original - expect(snippet.description).to eq(expected_description) + create_snippet({ title: nil }, { files: files }) end end diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js index e78c17dc392..aa5a4459a72 100644 --- a/spec/frontend/monitoring/utils_spec.js +++ b/spec/frontend/monitoring/utils_spec.js @@ -315,24 +315,31 @@ describe('monitoring/utils', () => { const getUrlParams = url => urlUtils.queryToObject(url.split('?')[1]); it('returns URL for a panel when query parameters are given', () => { - const params = getUrlParams(panelToUrl(dashboard, panelGroup.group, panel)); - - expect(params).toEqual({ - dashboard, - group: panelGroup.group, - title: panel.title, - y_label: panel.y_label, - }); + const params = getUrlParams(panelToUrl(dashboard, {}, panelGroup.group, panel)); + + expect(params).toEqual( + expect.objectContaining({ + dashboard, + group: panelGroup.group, + title: panel.title, + y_label: panel.y_label, + }), + ); }); it('returns a dashboard only URL if group is missing', () => { - const params = getUrlParams(panelToUrl(dashboard, null, panel)); - expect(params).toEqual({ dashboard: 'metrics.yml' }); + const params = getUrlParams(panelToUrl(dashboard, {}, null, panel)); + expect(params).toEqual(expect.objectContaining({ dashboard: 'metrics.yml' })); }); it('returns a dashboard only URL if panel is missing', () => { - const params = getUrlParams(panelToUrl(dashboard, panelGroup.group, null)); - expect(params).toEqual({ dashboard: 'metrics.yml' }); + const params = getUrlParams(panelToUrl(dashboard, {}, panelGroup.group, null)); + expect(params).toEqual(expect.objectContaining({ dashboard: 'metrics.yml' })); + }); + + it('returns URL for a panel when query paramters are given including custom variables', () => { + const params = getUrlParams(panelToUrl(dashboard, { pod: 'pod' }, panelGroup.group, null)); + expect(params).toEqual(expect.objectContaining({ dashboard: 'metrics.yml', pod: 'pod' })); }); }); diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb index b4ea90d2141..36e971c467a 100644 --- a/spec/serializers/environment_entity_spec.rb +++ b/spec/serializers/environment_entity_spec.rb @@ -10,7 +10,13 @@ describe EnvironmentEntity do described_class.new(environment, request: spy('request')) end - let(:environment) { create(:environment) } + let_it_be(:user) { create(:user) } + let_it_be(:project) { create(:project) } + let_it_be(:environment) { create(:environment, project: project) } + + before do + allow(entity).to receive(:current_user).and_return(user) + end subject { entity.as_json } @@ -67,28 +73,48 @@ describe EnvironmentEntity do end context 'with auto_stop_in' do - let(:environment) { create(:environment, :will_auto_stop) } + let(:environment) { create(:environment, :will_auto_stop, project: project) } it 'exposes auto stop related information' do + project.add_maintainer(user) + expect(subject).to include(:cancel_auto_stop_path, :auto_stop_at) end end context 'pod_logs' do - it 'exposes logs keys' do - expect(subject).to include(:logs_path) - expect(subject).to include(:logs_api_path) - expect(subject).to include(:enable_advanced_logs_querying) - end + context 'with developer access' do + before do + project.add_developer(user) + end - it 'uses k8s api when ES is not available' do - expect(subject[:logs_api_path]).to eq(k8s_project_logs_path(environment.project, environment_name: environment.name, format: :json)) + it 'does not expose logs keys' do + expect(subject).not_to include(:logs_path) + expect(subject).not_to include(:logs_api_path) + expect(subject).not_to include(:enable_advanced_logs_querying) + end end - it 'uses ES api when ES is available' do - allow(environment).to receive(:elastic_stack_available?).and_return(true) + context 'with maintainer access' do + before do + project.add_maintainer(user) + end + + it 'exposes logs keys' do + expect(subject).to include(:logs_path) + expect(subject).to include(:logs_api_path) + expect(subject).to include(:enable_advanced_logs_querying) + end - expect(subject[:logs_api_path]).to eq(elasticsearch_project_logs_path(environment.project, environment_name: environment.name, format: :json)) + it 'uses k8s api when ES is not available' do + expect(subject[:logs_api_path]).to eq(k8s_project_logs_path(project, environment_name: environment.name, format: :json)) + end + + it 'uses ES api when ES is available' do + allow(environment).to receive(:elastic_stack_available?).and_return(true) + + expect(subject[:logs_api_path]).to eq(elasticsearch_project_logs_path(project, environment_name: environment.name, format: :json)) + end end end end diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb index b545d7a8aa8..c0a0a2f6ac2 100644 --- a/spec/services/snippets/create_service_spec.rb +++ b/spec/services/snippets/create_service_spec.rb @@ -285,6 +285,20 @@ describe Snippets::CreateService do it_behaves_like 'an error service response when save fails' it_behaves_like 'creates repository and files' it_behaves_like 'after_save callback to store_mentions', ProjectSnippet + + context 'when uploaded files are passed to the service' do + let(:extra_opts) { { files: ['foo'] } } + + it 'does not move uploaded files to the snippet' do + expect_next_instance_of(described_class) do |instance| + expect(instance).to receive(:move_temporary_files).and_call_original + end + + expect_any_instance_of(FileMover).not_to receive(:execute) + + subject + end + end end context 'when PersonalSnippet' do @@ -297,6 +311,51 @@ describe Snippets::CreateService do it_behaves_like 'an error service response when save fails' it_behaves_like 'creates repository and files' it_behaves_like 'after_save callback to store_mentions', PersonalSnippet + + context 'when the snippet description contains files' do + include FileMoverHelpers + + let(:title) { 'Title' } + let(:picture_secret) { SecureRandom.hex } + let(:text_secret) { SecureRandom.hex } + let(:picture_file) { "/-/system/user/#{creator.id}/#{picture_secret}/picture.jpg" } + let(:text_file) { "/-/system/user/#{creator.id}/#{text_secret}/text.txt" } + let(:files) { [picture_file, text_file] } + let(:description) do + "Description with picture: ![picture](/uploads#{picture_file}) and "\ + "text: [text.txt](/uploads#{text_file})" + end + + before do + allow(FileUtils).to receive(:mkdir_p) + allow(FileUtils).to receive(:move) + end + + let(:extra_opts) { { description: description, title: title, files: files } } + + it 'stores the snippet description correctly' do + stub_file_mover(text_file) + stub_file_mover(picture_file) + + snippet = subject.payload[:snippet] + + expected_description = "Description with picture: "\ + "![picture](/uploads/-/system/personal_snippet/#{snippet.id}/#{picture_secret}/picture.jpg) and "\ + "text: [text.txt](/uploads/-/system/personal_snippet/#{snippet.id}/#{text_secret}/text.txt)" + + expect(snippet.description).to eq(expected_description) + end + + context 'when there is a validation error' do + let(:title) { nil } + + it 'does not move uploaded files to the snippet' do + expect_any_instance_of(described_class).not_to receive(:move_temporary_files) + + subject + end + end + end end end end |