Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/microsoft/vscode.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoão Moreno <joao.moreno@microsoft.com>2021-11-20 20:01:39 +0300
committerGitHub <noreply@github.com>2021-11-20 20:01:39 +0300
commit6f2ad1994cb7bf8b4288fc52e2d6ea72aecd9b39 (patch)
tree1d5a88c42a8d6c3579b85cd4e628e74e89352c73 /build/azure-pipelines
parent1d8839096bf7341f7d0ac6568177445cb6236c0f (diff)
More RBAC usage (#137579)
* ci: :construction_worker: remove secret references * inline web storage account * remove unused reference * inline storage accounts * formatting * formatting * drop ticino-storage-key, web-storage-key * remove leftovers * fix build * fix build * catch errors on all upload* scripts * bump gulp-azure-storage
Diffstat (limited to 'build/azure-pipelines')
-rw-r--r--build/azure-pipelines/darwin/product-build-darwin.yml21
-rw-r--r--build/azure-pipelines/distro-build.yml2
-rw-r--r--build/azure-pipelines/exploration-build.yml2
-rw-r--r--build/azure-pipelines/linux/product-build-linux.yml2
-rw-r--r--build/azure-pipelines/product-build.yml4
-rw-r--r--build/azure-pipelines/product-compile.yml18
-rw-r--r--build/azure-pipelines/product-publish.yml2
-rw-r--r--build/azure-pipelines/sdl-scan.yml396
-rw-r--r--build/azure-pipelines/upload-cdn.js45
-rw-r--r--build/azure-pipelines/upload-cdn.ts47
-rw-r--r--build/azure-pipelines/upload-configuration.js112
-rw-r--r--build/azure-pipelines/upload-configuration.ts132
-rw-r--r--build/azure-pipelines/upload-nlsmetadata.js147
-rw-r--r--build/azure-pipelines/upload-nlsmetadata.ts159
-rw-r--r--build/azure-pipelines/upload-sourcemaps.js33
-rw-r--r--build/azure-pipelines/upload-sourcemaps.ts38
-rw-r--r--build/azure-pipelines/web/product-build-web.yml31
-rw-r--r--build/azure-pipelines/win32/product-build-win32.yml2
18 files changed, 769 insertions, 424 deletions
diff --git a/build/azure-pipelines/darwin/product-build-darwin.yml b/build/azure-pipelines/darwin/product-build-darwin.yml
index 928bd234e0c..f309fc1dab8 100644
--- a/build/azure-pipelines/darwin/product-build-darwin.yml
+++ b/build/azure-pipelines/darwin/product-build-darwin.yml
@@ -8,7 +8,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- SecretsFilter: 'github-distro-mixin-password,macos-developer-certificate,macos-developer-certificate-key,ticino-storage-key'
+ SecretsFilter: "github-distro-mixin-password,macos-developer-certificate,macos-developer-certificate-key"
- task: DownloadPipelineArtifact@2
inputs:
@@ -301,10 +301,25 @@ steps:
displayName: Publish web server archive
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
+ - task: AzureCLI@2
+ inputs:
+ azureSubscription: "vscode-builds-subscription"
+ scriptType: pscore
+ scriptLocation: inlineScript
+ addSpnToEnvironment: true
+ inlineScript: |
+ Write-Host "##vso[task.setvariable variable=AZURE_TENANT_ID]$env:tenantId"
+ Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_ID]$env:servicePrincipalId"
+ Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_SECRET;issecret=true]$env:servicePrincipalKey"
+
- script: |
- AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
+ set -e
+ AZURE_STORAGE_ACCOUNT="ticino" \
+ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
+ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
+ AZURE_CLIENT_SECRET="$(AZURE_CLIENT_SECRET)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \
- yarn gulp upload-vscode-configuration
+ node build/azure-pipelines/upload-configuration
displayName: Upload configuration (for Bing settings search)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
continueOnError: true
diff --git a/build/azure-pipelines/distro-build.yml b/build/azure-pipelines/distro-build.yml
index 6031405fe37..53b62b47a4e 100644
--- a/build/azure-pipelines/distro-build.yml
+++ b/build/azure-pipelines/distro-build.yml
@@ -18,7 +18,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- SecretsFilter: 'github-distro-mixin-password'
+ SecretsFilter: "github-distro-mixin-password"
- script: |
set -e
diff --git a/build/azure-pipelines/exploration-build.yml b/build/azure-pipelines/exploration-build.yml
index ca78aca17f4..5b6599d8dab 100644
--- a/build/azure-pipelines/exploration-build.yml
+++ b/build/azure-pipelines/exploration-build.yml
@@ -14,7 +14,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- SecretsFilter: 'github-distro-mixin-password'
+ SecretsFilter: "github-distro-mixin-password"
- script: |
set -e
diff --git a/build/azure-pipelines/linux/product-build-linux.yml b/build/azure-pipelines/linux/product-build-linux.yml
index dc232cc0219..641be042f73 100644
--- a/build/azure-pipelines/linux/product-build-linux.yml
+++ b/build/azure-pipelines/linux/product-build-linux.yml
@@ -8,7 +8,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- SecretsFilter: "github-distro-mixin-password,builds-docdb-key-readwrite,vscode-storage-key,ESRP-PKI,esrp-aad-username,esrp-aad-password"
+ SecretsFilter: "github-distro-mixin-password,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- task: DownloadPipelineArtifact@2
inputs:
diff --git a/build/azure-pipelines/product-build.yml b/build/azure-pipelines/product-build.yml
index e87939db37e..c0b619cab09 100644
--- a/build/azure-pipelines/product-build.yml
+++ b/build/azure-pipelines/product-build.yml
@@ -113,10 +113,6 @@ variables:
value: https://az764295.vo.msecnd.net
- name: AZURE_DOCUMENTDB_ENDPOINT
value: https://vscode.documents.azure.com:443/
- - name: AZURE_STORAGE_ACCOUNT
- value: ticino
- - name: AZURE_STORAGE_ACCOUNT_2
- value: vscode
- name: MOONCAKE_CDN_URL
value: https://vscode.cdn.azure.cn
- name: VSCODE_MIXIN_REPO
diff --git a/build/azure-pipelines/product-compile.yml b/build/azure-pipelines/product-compile.yml
index 709d8e39f49..88af1af2918 100644
--- a/build/azure-pipelines/product-compile.yml
+++ b/build/azure-pipelines/product-compile.yml
@@ -8,7 +8,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- SecretsFilter: "github-distro-mixin-password,ticino-storage-key"
+ SecretsFilter: "github-distro-mixin-password"
- script: |
set -e
@@ -103,9 +103,23 @@ steps:
displayName: Compile test suites
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
+ - task: AzureCLI@2
+ inputs:
+ azureSubscription: "vscode-builds-subscription"
+ scriptType: pscore
+ scriptLocation: inlineScript
+ addSpnToEnvironment: true
+ inlineScript: |
+ Write-Host "##vso[task.setvariable variable=AZURE_TENANT_ID]$env:tenantId"
+ Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_ID]$env:servicePrincipalId"
+ Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_SECRET;issecret=true]$env:servicePrincipalKey"
+
- script: |
set -e
- AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
+ AZURE_STORAGE_ACCOUNT="ticino" \
+ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
+ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
+ AZURE_CLIENT_SECRET="$(AZURE_CLIENT_SECRET)" \
node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/build/azure-pipelines/product-publish.yml b/build/azure-pipelines/product-publish.yml
index 44516763081..c43180ea0a3 100644
--- a/build/azure-pipelines/product-publish.yml
+++ b/build/azure-pipelines/product-publish.yml
@@ -8,7 +8,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- SecretsFilter: "builds-docdb-key-readwrite,github-distro-mixin-password,ticino-storage-key,vscode-storage-key,vscode-mooncake-storage-key"
+ SecretsFilter: "github-distro-mixin-password"
- pwsh: |
. build/azure-pipelines/win32/exec.ps1
diff --git a/build/azure-pipelines/sdl-scan.yml b/build/azure-pipelines/sdl-scan.yml
index b1976140bd0..d1cd72b3d95 100644
--- a/build/azure-pipelines/sdl-scan.yml
+++ b/build/azure-pipelines/sdl-scan.yml
@@ -32,201 +32,201 @@ variables:
value: x64
stages:
-- stage: Windows
- condition: eq(variables.SCAN_WINDOWS, 'true')
- pool:
- vmImage: VS2017-Win2016
- jobs:
- - job: WindowsJob
- timeoutInMinutes: 0
- steps:
- - task: CredScan@3
- continueOnError: true
- inputs:
- scanFolder: '$(Build.SourcesDirectory)'
- outputFormat: 'pre'
- - task: NodeTool@0
- inputs:
- versionSpec: "14.x"
-
- - task: AzureKeyVault@1
- displayName: "Azure Key Vault: Get Secrets"
- inputs:
- azureSubscription: "vscode-builds-subscription"
- KeyVaultName: vscode
- SecretsFilter: "github-distro-mixin-password"
-
- - powershell: |
- . build/azure-pipelines/win32/exec.ps1
- $ErrorActionPreference = "Stop"
- "machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
-
- exec { git config user.email "vscode@microsoft.com" }
- exec { git config user.name "VSCode" }
- displayName: Prepare tooling
-
- - powershell: |
- . build/azure-pipelines/win32/exec.ps1
- $ErrorActionPreference = "Stop"
- exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
- displayName: Merge distro
-
- - powershell: |
- . build/azure-pipelines/win32/exec.ps1
- $ErrorActionPreference = "Stop"
- exec { npx https://aka.ms/enablesecurefeed standAlone }
- timeoutInMinutes: 5
- condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- displayName: Switch to Terrapin packages
-
- - task: Semmle@1
- inputs:
- sourceCodeDirectory: '$(Build.SourcesDirectory)'
- language: 'cpp'
- buildCommandsString: 'yarn --frozen-lockfile'
- querySuite: 'Required'
- timeout: '1800'
- ram: '16384'
- addProjectDirToScanningExclusionList: true
- env:
- npm_config_arch: "$(NPM_ARCH)"
- PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
- GITHUB_TOKEN: "$(github-distro-mixin-password)"
- displayName: CodeQL
-
- - powershell: |
- . build/azure-pipelines/win32/exec.ps1
- . build/azure-pipelines/win32/retry.ps1
- $ErrorActionPreference = "Stop"
- retry { exec { yarn --frozen-lockfile } }
- env:
- npm_config_arch: "$(NPM_ARCH)"
- PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
- GITHUB_TOKEN: "$(github-distro-mixin-password)"
- CHILD_CONCURRENCY: 1
- displayName: Install dependencies
-
- - powershell: |
- . build/azure-pipelines/win32/exec.ps1
- $ErrorActionPreference = "Stop"
- exec { yarn gulp "vscode-symbols-win32-$(VSCODE_ARCH)" }
- displayName: Download Symbols
-
- - task: BinSkim@4
- inputs:
- InputType: 'Basic'
- Function: 'analyze'
- TargetPattern: 'guardianGlob'
- AnalyzeTargetGlob: '$(agent.builddirectory)\scanbin\**.dll;$(agent.builddirectory)\scanbin\**.exe;$(agent.builddirectory)\scanbin\**.node'
- AnalyzeLocalSymbolDirectories: '$(agent.builddirectory)\scanbin\VSCode-win32-$(VSCODE_ARCH)\pdb'
-
- - task: TSAUpload@2
- inputs:
- GdnPublishTsaOnboard: true
- GdnPublishTsaConfigFile: '$(Build.SourcesDirectory)\build\azure-pipelines\.gdntsa'
-
-- stage: Linux
- dependsOn: []
- condition: eq(variables.SCAN_LINUX, 'true')
- pool:
- vmImage: "Ubuntu-18.04"
- jobs:
- - job: LinuxJob
- steps:
- - task: CredScan@2
- inputs:
- toolMajorVersion: 'V2'
- - task: NodeTool@0
- inputs:
- versionSpec: "14.x"
-
- - task: AzureKeyVault@1
- displayName: "Azure Key Vault: Get Secrets"
- inputs:
- azureSubscription: "vscode-builds-subscription"
- KeyVaultName: vscode
- SecretsFilter: "github-distro-mixin-password"
-
- - script: |
- set -e
- cat << EOF > ~/.netrc
- machine github.com
- login vscode
- password $(github-distro-mixin-password)
- EOF
-
- git config user.email "vscode@microsoft.com"
- git config user.name "VSCode"
- displayName: Prepare tooling
-
- - script: |
- set -e
- git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
- displayName: Merge distro
-
- - script: |
- set -e
- npx https://aka.ms/enablesecurefeed standAlone
- timeoutInMinutes: 5
- condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
- displayName: Switch to Terrapin packages
-
- - script: |
- set -e
- yarn --cwd build
- yarn --cwd build compile
- displayName: Compile build tools
-
- - script: |
- set -e
- export npm_config_arch=$(NPM_ARCH)
-
- if [ -z "$CC" ] || [ -z "$CXX" ]; then
- # Download clang based on chromium revision used by vscode
- curl -s https://raw.githubusercontent.com/chromium/chromium/91.0.4472.164/tools/clang/scripts/update.py | python - --output-dir=$PWD/.build/CR_Clang --host-os=linux
- # Download libcxx headers and objects from upstream electron releases
- DEBUG=libcxx-fetcher \
- VSCODE_LIBCXX_OBJECTS_DIR=$PWD/.build/libcxx-objects \
- VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \
- VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \
- VSCODE_ARCH="$(NPM_ARCH)" \
- node build/linux/libcxx-fetcher.js
- # Set compiler toolchain
- export CC=$PWD/.build/CR_Clang/bin/clang
- export CXX=$PWD/.build/CR_Clang/bin/clang++
- export CXXFLAGS="-nostdinc++ -D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS -isystem$PWD/.build/libcxx_headers/include -isystem$PWD/.build/libcxxabi_headers/include -fPIC -flto=thin -fsplit-lto-unit"
- export LDFLAGS="-stdlib=libc++ -fuse-ld=lld -flto=thin -fsplit-lto-unit -L$PWD/.build/libcxx-objects -lc++abi"
- fi
-
- if [ "$VSCODE_ARCH" == "x64" ]; then
- export VSCODE_REMOTE_CC=$(which gcc-4.8)
- export VSCODE_REMOTE_CXX=$(which g++-4.8)
- fi
-
- for i in {1..3}; do # try 3 times, for Terrapin
- yarn --frozen-lockfile && break
- if [ $i -eq 3 ]; then
- echo "Yarn failed too many times" >&2
- exit 1
- fi
- echo "Yarn failed $i, trying again..."
- done
- env:
- PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
- GITHUB_TOKEN: "$(github-distro-mixin-password)"
- displayName: Install dependencies
-
- - script: |
- set -e
- yarn gulp vscode-symbols-linux-$(VSCODE_ARCH)
- displayName: Build
-
- - task: BinSkim@3
- inputs:
- toolVersion: Latest
- InputType: CommandLine
- arguments: analyze $(agent.builddirectory)\scanbin\exe\*.* --recurse --local-symbol-directories $(agent.builddirectory)\scanbin\VSCode-linux-$(VSCODE_ARCH)\pdb
-
- - task: TSAUpload@2
- inputs:
- GdnPublishTsaConfigFile: '$(Build.SourceDirectory)\build\azure-pipelines\.gdntsa'
+ - stage: Windows
+ condition: eq(variables.SCAN_WINDOWS, 'true')
+ pool:
+ vmImage: VS2017-Win2016
+ jobs:
+ - job: WindowsJob
+ timeoutInMinutes: 0
+ steps:
+ - task: CredScan@3
+ continueOnError: true
+ inputs:
+ scanFolder: "$(Build.SourcesDirectory)"
+ outputFormat: "pre"
+ - task: NodeTool@0
+ inputs:
+ versionSpec: "14.x"
+
+ - task: AzureKeyVault@1
+ displayName: "Azure Key Vault: Get Secrets"
+ inputs:
+ azureSubscription: "vscode-builds-subscription"
+ KeyVaultName: vscode
+ SecretsFilter: "github-distro-mixin-password"
+
+ - powershell: |
+ . build/azure-pipelines/win32/exec.ps1
+ $ErrorActionPreference = "Stop"
+ "machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
+
+ exec { git config user.email "vscode@microsoft.com" }
+ exec { git config user.name "VSCode" }
+ displayName: Prepare tooling
+
+ - powershell: |
+ . build/azure-pipelines/win32/exec.ps1
+ $ErrorActionPreference = "Stop"
+ exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
+ displayName: Merge distro
+
+ - powershell: |
+ . build/azure-pipelines/win32/exec.ps1
+ $ErrorActionPreference = "Stop"
+ exec { npx https://aka.ms/enablesecurefeed standAlone }
+ timeoutInMinutes: 5
+ condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
+ displayName: Switch to Terrapin packages
+
+ - task: Semmle@1
+ inputs:
+ sourceCodeDirectory: "$(Build.SourcesDirectory)"
+ language: "cpp"
+ buildCommandsString: "yarn --frozen-lockfile"
+ querySuite: "Required"
+ timeout: "1800"
+ ram: "16384"
+ addProjectDirToScanningExclusionList: true
+ env:
+ npm_config_arch: "$(NPM_ARCH)"
+ PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
+ GITHUB_TOKEN: "$(github-distro-mixin-password)"
+ displayName: CodeQL
+
+ - powershell: |
+ . build/azure-pipelines/win32/exec.ps1
+ . build/azure-pipelines/win32/retry.ps1
+ $ErrorActionPreference = "Stop"
+ retry { exec { yarn --frozen-lockfile } }
+ env:
+ npm_config_arch: "$(NPM_ARCH)"
+ PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
+ GITHUB_TOKEN: "$(github-distro-mixin-password)"
+ CHILD_CONCURRENCY: 1
+ displayName: Install dependencies
+
+ - powershell: |
+ . build/azure-pipelines/win32/exec.ps1
+ $ErrorActionPreference = "Stop"
+ exec { yarn gulp "vscode-symbols-win32-$(VSCODE_ARCH)" }
+ displayName: Download Symbols
+
+ - task: BinSkim@4
+ inputs:
+ InputType: "Basic"
+ Function: "analyze"
+ TargetPattern: "guardianGlob"
+ AnalyzeTargetGlob: '$(agent.builddirectory)\scanbin\**.dll;$(agent.builddirectory)\scanbin\**.exe;$(agent.builddirectory)\scanbin\**.node'
+ AnalyzeLocalSymbolDirectories: '$(agent.builddirectory)\scanbin\VSCode-win32-$(VSCODE_ARCH)\pdb'
+
+ - task: TSAUpload@2
+ inputs:
+ GdnPublishTsaOnboard: true
+ GdnPublishTsaConfigFile: '$(Build.SourcesDirectory)\build\azure-pipelines\.gdntsa'
+
+ - stage: Linux
+ dependsOn: []
+ condition: eq(variables.SCAN_LINUX, 'true')
+ pool:
+ vmImage: "Ubuntu-18.04"
+ jobs:
+ - job: LinuxJob
+ steps:
+ - task: CredScan@2
+ inputs:
+ toolMajorVersion: "V2"
+ - task: NodeTool@0
+ inputs:
+ versionSpec: "14.x"
+
+ - task: AzureKeyVault@1
+ displayName: "Azure Key Vault: Get Secrets"
+ inputs:
+ azureSubscription: "vscode-builds-subscription"
+ KeyVaultName: vscode
+ SecretsFilter: "github-distro-mixin-password"
+
+ - script: |
+ set -e
+ cat << EOF > ~/.netrc
+ machine github.com
+ login vscode
+ password $(github-distro-mixin-password)
+ EOF
+
+ git config user.email "vscode@microsoft.com"
+ git config user.name "VSCode"
+ displayName: Prepare tooling
+
+ - script: |
+ set -e
+ git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
+ displayName: Merge distro
+
+ - script: |
+ set -e
+ npx https://aka.ms/enablesecurefeed standAlone
+ timeoutInMinutes: 5
+ condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
+ displayName: Switch to Terrapin packages
+
+ - script: |
+ set -e
+ yarn --cwd build
+ yarn --cwd build compile
+ displayName: Compile build tools
+
+ - script: |
+ set -e
+ export npm_config_arch=$(NPM_ARCH)
+
+ if [ -z "$CC" ] || [ -z "$CXX" ]; then
+ # Download clang based on chromium revision used by vscode
+ curl -s https://raw.githubusercontent.com/chromium/chromium/91.0.4472.164/tools/clang/scripts/update.py | python - --output-dir=$PWD/.build/CR_Clang --host-os=linux
+ # Download libcxx headers and objects from upstream electron releases
+ DEBUG=libcxx-fetcher \
+ VSCODE_LIBCXX_OBJECTS_DIR=$PWD/.build/libcxx-objects \
+ VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \
+ VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \
+ VSCODE_ARCH="$(NPM_ARCH)" \
+ node build/linux/libcxx-fetcher.js
+ # Set compiler toolchain
+ export CC=$PWD/.build/CR_Clang/bin/clang
+ export CXX=$PWD/.build/CR_Clang/bin/clang++
+ export CXXFLAGS="-nostdinc++ -D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS -isystem$PWD/.build/libcxx_headers/include -isystem$PWD/.build/libcxxabi_headers/include -fPIC -flto=thin -fsplit-lto-unit"
+ export LDFLAGS="-stdlib=libc++ -fuse-ld=lld -flto=thin -fsplit-lto-unit -L$PWD/.build/libcxx-objects -lc++abi"
+ fi
+
+ if [ "$VSCODE_ARCH" == "x64" ]; then
+ export VSCODE_REMOTE_CC=$(which gcc-4.8)
+ export VSCODE_REMOTE_CXX=$(which g++-4.8)
+ fi
+
+ for i in {1..3}; do # try 3 times, for Terrapin
+ yarn --frozen-lockfile && break
+ if [ $i -eq 3 ]; then
+ echo "Yarn failed too many times" >&2
+ exit 1
+ fi
+ echo "Yarn failed $i, trying again..."
+ done
+ env:
+ PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
+ GITHUB_TOKEN: "$(github-distro-mixin-password)"
+ displayName: Install dependencies
+
+ - script: |
+ set -e
+ yarn gulp vscode-symbols-linux-$(VSCODE_ARCH)
+ displayName: Build
+
+ - task: BinSkim@3
+ inputs:
+ toolVersion: Latest
+ InputType: CommandLine
+ arguments: analyze $(agent.builddirectory)\scanbin\exe\*.* --recurse --local-symbol-directories $(agent.builddirectory)\scanbin\VSCode-linux-$(VSCODE_ARCH)\pdb
+
+ - task: TSAUpload@2
+ inputs:
+ GdnPublishTsaConfigFile: '$(Build.SourceDirectory)\build\azure-pipelines\.gdntsa'
diff --git a/build/azure-pipelines/upload-cdn.js b/build/azure-pipelines/upload-cdn.js
index 16a072905a0..fe3817c9183 100644
--- a/build/azure-pipelines/upload-cdn.js
+++ b/build/azure-pipelines/upload-cdn.js
@@ -10,26 +10,35 @@ const vfs = require("vinyl-fs");
const util = require("../lib/util");
const filter = require("gulp-filter");
const gzip = require("gulp-gzip");
+const identity_1 = require("@azure/identity");
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
+const credential = new identity_1.ClientSecretCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_CLIENT_SECRET']);
function main() {
- return vfs.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true })
- .pipe(filter(f => !f.isDirectory()))
- .pipe(gzip({ append: false }))
- .pipe(es.through(function (data) {
- console.log('Uploading CDN file:', data.relative); // debug
- this.emit('data', data);
- }))
- .pipe(azure.upload({
- account: process.env.AZURE_STORAGE_ACCOUNT,
- key: process.env.AZURE_STORAGE_ACCESS_KEY,
- container: process.env.VSCODE_QUALITY,
- prefix: commit + '/',
- contentSettings: {
- contentEncoding: 'gzip',
- cacheControl: 'max-age=31536000, public'
- }
- }));
+ return new Promise((c, e) => {
+ vfs.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true })
+ .pipe(filter(f => !f.isDirectory()))
+ .pipe(gzip({ append: false }))
+ .pipe(es.through(function (data) {
+ console.log('Uploading CDN file:', data.relative); // debug
+ this.emit('data', data);
+ }))
+ .pipe(azure.upload({
+ account: process.env.AZURE_STORAGE_ACCOUNT,
+ credential,
+ container: process.env.VSCODE_QUALITY,
+ prefix: commit + '/',
+ contentSettings: {
+ contentEncoding: 'gzip',
+ cacheControl: 'max-age=31536000, public'
+ }
+ }))
+ .on('end', () => c())
+ .on('error', (err) => e(err));
+ });
}
-main();
+main().catch(err => {
+ console.error(err);
+ process.exit(1);
+});
diff --git a/build/azure-pipelines/upload-cdn.ts b/build/azure-pipelines/upload-cdn.ts
index 71589033867..c35582017d7 100644
--- a/build/azure-pipelines/upload-cdn.ts
+++ b/build/azure-pipelines/upload-cdn.ts
@@ -12,29 +12,38 @@ import * as vfs from 'vinyl-fs';
import * as util from '../lib/util';
import * as filter from 'gulp-filter';
import * as gzip from 'gulp-gzip';
+import { ClientSecretCredential } from '@azure/identity';
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
+const credential = new ClientSecretCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, process.env['AZURE_CLIENT_SECRET']!);
-function main() {
- return vfs.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true })
- .pipe(filter(f => !f.isDirectory()))
- .pipe(gzip({ append: false }))
- .pipe(es.through(function (data: Vinyl) {
- console.log('Uploading CDN file:', data.relative); // debug
- this.emit('data', data);
- }))
- .pipe(azure.upload({
- account: process.env.AZURE_STORAGE_ACCOUNT,
- key: process.env.AZURE_STORAGE_ACCESS_KEY,
- container: process.env.VSCODE_QUALITY,
- prefix: commit + '/',
- contentSettings: {
- contentEncoding: 'gzip',
- cacheControl: 'max-age=31536000, public'
- }
- }));
+function main(): Promise<void> {
+ return new Promise((c, e) => {
+ vfs.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true })
+ .pipe(filter(f => !f.isDirectory()))
+ .pipe(gzip({ append: false }))
+ .pipe(es.through(function (data: Vinyl) {
+ console.log('Uploading CDN file:', data.relative); // debug
+ this.emit('data', data);
+ }))
+ .pipe(azure.upload({
+ account: process.env.AZURE_STORAGE_ACCOUNT,
+ credential,
+ container: process.env.VSCODE_QUALITY,
+ prefix: commit + '/',
+ contentSettings: {
+ contentEncoding: 'gzip',
+ cacheControl: 'max-age=31536000, public'
+ }
+ }))
+ .on('end', () => c())
+ .on('error', (err: any) => e(err));
+ });
}
-main();
+main().catch(err => {
+ console.error(err);
+ process.exit(1);
+});
diff --git a/build/azure-pipelines/upload-configuration.js b/build/azure-pipelines/upload-configuration.js
new file mode 100644
index 00000000000..689d99fdae0
--- /dev/null
+++ b/build/azure-pipelines/upload-configuration.js
@@ -0,0 +1,112 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+'use strict';
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getSettingsSearchBuildId = exports.shouldSetupSettingsSearch = void 0;
+const path = require("path");
+const os = require("os");
+const cp = require("child_process");
+const vfs = require("vinyl-fs");
+const util = require("../lib/util");
+const identity_1 = require("@azure/identity");
+const azure = require('gulp-azure-storage');
+const packageJson = require("../../package.json");
+const root = path.dirname(path.dirname(__dirname));
+const commit = util.getVersion(root);
+function generateVSCodeConfigurationTask() {
+ return new Promise((resolve, reject) => {
+ const buildDir = process.env['AGENT_BUILDDIRECTORY'];
+ if (!buildDir) {
+ return reject(new Error('$AGENT_BUILDDIRECTORY not set'));
+ }
+ if (!shouldSetupSettingsSearch()) {
+ console.log(`Only runs on main and release branches, not ${process.env.BUILD_SOURCEBRANCH}`);
+ return resolve(undefined);
+ }
+ if (process.env.VSCODE_QUALITY !== 'insider' && process.env.VSCODE_QUALITY !== 'stable') {
+ console.log(`Only runs on insider and stable qualities, not ${process.env.VSCODE_QUALITY}`);
+ return resolve(undefined);
+ }
+ const result = path.join(os.tmpdir(), 'configuration.json');
+ const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
+ const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
+ const arch = process.env['VSCODE_ARCH'];
+ const appRoot = path.join(buildDir, `VSCode-darwin-${arch}`);
+ const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
+ const appPath = path.join(appRoot, appName, 'Contents', 'Resources', 'app', 'bin', 'code');
+ const codeProc = cp.exec(`${appPath} --export-default-configuration='${result}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`, (err, stdout, stderr) => {
+ clearTimeout(timer);
+ if (err) {
+ console.log(`err: ${err} ${err.message} ${err.toString()}`);
+ reject(err);
+ }
+ if (stdout) {
+ console.log(`stdout: ${stdout}`);
+ }
+ if (stderr) {
+ console.log(`stderr: ${stderr}`);
+ }
+ resolve(result);
+ });
+ const timer = setTimeout(() => {
+ codeProc.kill();
+ reject(new Error('export-default-configuration process timed out'));
+ }, 12 * 1000);
+ codeProc.on('error', err => {
+ clearTimeout(timer);
+ reject(err);
+ });
+ });
+}
+function shouldSetupSettingsSearch() {
+ const branch = process.env.BUILD_SOURCEBRANCH;
+ return !!(branch && (/\/main$/.test(branch) || branch.indexOf('/release/') >= 0));
+}
+exports.shouldSetupSettingsSearch = shouldSetupSettingsSearch;
+function getSettingsSearchBuildId(packageJson) {
+ try {
+ const branch = process.env.BUILD_SOURCEBRANCH;
+ const branchId = branch.indexOf('/release/') >= 0 ? 0 :
+ /\/main$/.test(branch) ? 1 :
+ 2; // Some unexpected branch
+ const out = cp.execSync(`git rev-list HEAD --count`);
+ const count = parseInt(out.toString());
+ // <version number><commit count><branchId (avoid unlikely conflicts)>
+ // 1.25.1, 1,234,567 commits, main = 1250112345671
+ return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
+ }
+ catch (e) {
+ throw new Error('Could not determine build number: ' + e.toString());
+ }
+}
+exports.getSettingsSearchBuildId = getSettingsSearchBuildId;
+async function main() {
+ const configPath = await generateVSCodeConfigurationTask();
+ if (!configPath) {
+ return;
+ }
+ const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
+ if (!settingsSearchBuildId) {
+ throw new Error('Failed to compute build number');
+ }
+ const credential = new identity_1.ClientSecretCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_CLIENT_SECRET']);
+ return new Promise((c, e) => {
+ vfs.src(configPath)
+ .pipe(azure.upload({
+ account: process.env.AZURE_STORAGE_ACCOUNT,
+ credential,
+ container: 'configuration',
+ prefix: `${settingsSearchBuildId}/${commit}/`
+ }))
+ .on('end', () => c())
+ .on('error', (err) => e(err));
+ });
+}
+if (require.main === module) {
+ main().catch(err => {
+ console.error(err);
+ process.exit(1);
+ });
+}
diff --git a/build/azure-pipelines/upload-configuration.ts b/build/azure-pipelines/upload-configuration.ts
new file mode 100644
index 00000000000..3acc337e749
--- /dev/null
+++ b/build/azure-pipelines/upload-configuration.ts
@@ -0,0 +1,132 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as path from 'path';
+import * as os from 'os';
+import * as cp from 'child_process';
+import * as vfs from 'vinyl-fs';
+import * as util from '../lib/util';
+import { ClientSecretCredential } from '@azure/identity';
+const azure = require('gulp-azure-storage');
+import * as packageJson from '../../package.json';
+
+const root = path.dirname(path.dirname(__dirname));
+const commit = util.getVersion(root);
+
+function generateVSCodeConfigurationTask(): Promise<string | undefined> {
+ return new Promise((resolve, reject) => {
+ const buildDir = process.env['AGENT_BUILDDIRECTORY'];
+ if (!buildDir) {
+ return reject(new Error('$AGENT_BUILDDIRECTORY not set'));
+ }
+
+ if (!shouldSetupSettingsSearch()) {
+ console.log(`Only runs on main and release branches, not ${process.env.BUILD_SOURCEBRANCH}`);
+ return resolve(undefined);
+ }
+
+ if (process.env.VSCODE_QUALITY !== 'insider' && process.env.VSCODE_QUALITY !== 'stable') {
+ console.log(`Only runs on insider and stable qualities, not ${process.env.VSCODE_QUALITY}`);
+ return resolve(undefined);
+ }
+
+ const result = path.join(os.tmpdir(), 'configuration.json');
+ const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
+ const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
+ const arch = process.env['VSCODE_ARCH'];
+ const appRoot = path.join(buildDir, `VSCode-darwin-${arch}`);
+ const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
+ const appPath = path.join(appRoot, appName, 'Contents', 'Resources', 'app', 'bin', 'code');
+ const codeProc = cp.exec(
+ `${appPath} --export-default-configuration='${result}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`,
+ (err, stdout, stderr) => {
+ clearTimeout(timer);
+ if (err) {
+ console.log(`err: ${err} ${err.message} ${err.toString()}`);
+ reject(err);
+ }
+
+ if (stdout) {
+ console.log(`stdout: ${stdout}`);
+ }
+
+ if (stderr) {
+ console.log(`stderr: ${stderr}`);
+ }
+
+ resolve(result);
+ }
+ );
+ const timer = setTimeout(() => {
+ codeProc.kill();
+ reject(new Error('export-default-configuration process timed out'));
+ }, 12 * 1000);
+
+ codeProc.on('error', err => {
+ clearTimeout(timer);
+ reject(err);
+ });
+ });
+}
+
+export function shouldSetupSettingsSearch(): boolean {
+ const branch = process.env.BUILD_SOURCEBRANCH;
+ return !!(branch && (/\/main$/.test(branch) || branch.indexOf('/release/') >= 0));
+}
+
+export function getSettingsSearchBuildId(packageJson: { version: string }) {
+ try {
+ const branch = process.env.BUILD_SOURCEBRANCH!;
+ const branchId = branch.indexOf('/release/') >= 0 ? 0 :
+ /\/main$/.test(branch) ? 1 :
+ 2; // Some unexpected branch
+
+ const out = cp.execSync(`git rev-list HEAD --count`);
+ const count = parseInt(out.toString());
+
+ // <version number><commit count><branchId (avoid unlikely conflicts)>
+ // 1.25.1, 1,234,567 commits, main = 1250112345671
+ return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
+ } catch (e) {
+ throw new Error('Could not determine build number: ' + e.toString());
+ }
+}
+
+async function main(): Promise<void> {
+ const configPath = await generateVSCodeConfigurationTask();
+
+ if (!configPath) {
+ return;
+ }
+
+ const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
+
+ if (!settingsSearchBuildId) {
+ throw new Error('Failed to compute build number');
+ }
+
+ const credential = new ClientSecretCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, process.env['AZURE_CLIENT_SECRET']!);
+
+ return new Promise((c, e) => {
+ vfs.src(configPath)
+ .pipe(azure.upload({
+ account: process.env.AZURE_STORAGE_ACCOUNT,
+ credential,
+ container: 'configuration',
+ prefix: `${settingsSearchBuildId}/${commit}/`
+ }))
+ .on('end', () => c())
+ .on('error', (err: any) => e(err));
+ });
+}
+
+if (require.main === module) {
+ main().catch(err => {
+ console.error(err);
+ process.exit(1);
+ });
+}
diff --git a/build/azure-pipelines/upload-nlsmetadata.js b/build/azure-pipelines/upload-nlsmetadata.js
index 27c9438187f..a09d569f7f6 100644
--- a/build/azure-pipelines/upload-nlsmetadata.js
+++ b/build/azure-pipelines/upload-nlsmetadata.js
@@ -10,79 +10,88 @@ const vfs = require("vinyl-fs");
const util = require("../lib/util");
const merge = require("gulp-merge-json");
const gzip = require("gulp-gzip");
+const identity_1 = require("@azure/identity");
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
+const credential = new identity_1.ClientSecretCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_CLIENT_SECRET']);
function main() {
- return es.merge(vfs.src('out-vscode-web-min/nls.metadata.json', { base: 'out-vscode-web-min' }), vfs.src('.build/extensions/**/nls.metadata.json', { base: '.build/extensions' }), vfs.src('.build/extensions/**/nls.metadata.header.json', { base: '.build/extensions' }), vfs.src('.build/extensions/**/package.nls.json', { base: '.build/extensions' }))
- .pipe(merge({
- fileName: 'combined.nls.metadata.json',
- jsonSpace: '',
- edit: (parsedJson, file) => {
- let key;
- if (file.base === 'out-vscode-web-min') {
- return { vscode: parsedJson };
- }
- // Handle extensions and follow the same structure as the Core nls file.
- switch (file.basename) {
- case 'package.nls.json':
- // put package.nls.json content in Core NlsMetadata format
- // language packs use the key "package" to specify that
- // translations are for the package.json file
- parsedJson = {
- messages: {
- package: Object.values(parsedJson)
- },
- keys: {
- package: Object.keys(parsedJson)
- },
- bundles: {
- main: ['package']
- }
- };
- break;
- case 'nls.metadata.header.json':
- parsedJson = { header: parsedJson };
- break;
- case 'nls.metadata.json':
- // put nls.metadata.json content in Core NlsMetadata format
- const modules = Object.keys(parsedJson);
- const json = {
- keys: {},
- messages: {},
- bundles: {
- main: []
+ return new Promise((c, e) => {
+ es.merge(vfs.src('out-vscode-web-min/nls.metadata.json', { base: 'out-vscode-web-min' }), vfs.src('.build/extensions/**/nls.metadata.json', { base: '.build/extensions' }), vfs.src('.build/extensions/**/nls.metadata.header.json', { base: '.build/extensions' }), vfs.src('.build/extensions/**/package.nls.json', { base: '.build/extensions' }))
+ .pipe(merge({
+ fileName: 'combined.nls.metadata.json',
+ jsonSpace: '',
+ edit: (parsedJson, file) => {
+ let key;
+ if (file.base === 'out-vscode-web-min') {
+ return { vscode: parsedJson };
+ }
+ // Handle extensions and follow the same structure as the Core nls file.
+ switch (file.basename) {
+ case 'package.nls.json':
+ // put package.nls.json content in Core NlsMetadata format
+ // language packs use the key "package" to specify that
+ // translations are for the package.json file
+ parsedJson = {
+ messages: {
+ package: Object.values(parsedJson)
+ },
+ keys: {
+ package: Object.keys(parsedJson)
+ },
+ bundles: {
+ main: ['package']
+ }
+ };
+ break;
+ case 'nls.metadata.header.json':
+ parsedJson = { header: parsedJson };
+ break;
+ case 'nls.metadata.json':
+ // put nls.metadata.json content in Core NlsMetadata format
+ const modules = Object.keys(parsedJson);
+ const json = {
+ keys: {},
+ messages: {},
+ bundles: {
+ main: []
+ }
+ };
+ for (const module of modules) {
+ json.messages[module] = parsedJson[module].messages;
+ json.keys[module] = parsedJson[module].keys;
+ json.bundles.main.push(module);
}
- };
- for (const module of modules) {
- json.messages[module] = parsedJson[module].messages;
- json.keys[module] = parsedJson[module].keys;
- json.bundles.main.push(module);
- }
- parsedJson = json;
- break;
+ parsedJson = json;
+ break;
+ }
+ key = 'vscode.' + file.relative.split('/')[0];
+ return { [key]: parsedJson };
+ },
+ }))
+ .pipe(gzip({ append: false }))
+ .pipe(vfs.dest('./nlsMetadata'))
+ .pipe(es.through(function (data) {
+ console.log(`Uploading ${data.path}`);
+ // trigger artifact upload
+ console.log(`##vso[artifact.upload containerfolder=nlsmetadata;artifactname=combined.nls.metadata.json]${data.path}`);
+ this.emit('data', data);
+ }))
+ .pipe(azure.upload({
+ account: process.env.AZURE_STORAGE_ACCOUNT,
+ credential,
+ container: 'nlsmetadata',
+ prefix: commit + '/',
+ contentSettings: {
+ contentEncoding: 'gzip',
+ cacheControl: 'max-age=31536000, public'
}
- key = 'vscode.' + file.relative.split('/')[0];
- return { [key]: parsedJson };
- },
- }))
- .pipe(gzip({ append: false }))
- .pipe(vfs.dest('./nlsMetadata'))
- .pipe(es.through(function (data) {
- console.log(`Uploading ${data.path}`);
- // trigger artifact upload
- console.log(`##vso[artifact.upload containerfolder=nlsmetadata;artifactname=combined.nls.metadata.json]${data.path}`);
- this.emit('data', data);
- }))
- .pipe(azure.upload({
- account: process.env.AZURE_STORAGE_ACCOUNT,
- key: process.env.AZURE_STORAGE_ACCESS_KEY,
- container: 'nlsmetadata',
- prefix: commit + '/',
- contentSettings: {
- contentEncoding: 'gzip',
- cacheControl: 'max-age=31536000, public'
- }
- }));
+ }))
+ .on('end', () => c())
+ .on('error', (err) => e(err));
+ });
}
-main();
+main().catch(err => {
+ console.error(err);
+ process.exit(1);
+});
diff --git a/build/azure-pipelines/upload-nlsmetadata.ts b/build/azure-pipelines/upload-nlsmetadata.ts
index 72a6701dddd..b3a19b218e3 100644
--- a/build/azure-pipelines/upload-nlsmetadata.ts
+++ b/build/azure-pipelines/upload-nlsmetadata.ts
@@ -12,10 +12,12 @@ import * as vfs from 'vinyl-fs';
import * as util from '../lib/util';
import * as merge from 'gulp-merge-json';
import * as gzip from 'gulp-gzip';
+import { ClientSecretCredential } from '@azure/identity';
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
+const credential = new ClientSecretCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, process.env['AZURE_CLIENT_SECRET']!);
interface NlsMetadata {
keys: { [module: string]: string },
@@ -23,85 +25,94 @@ interface NlsMetadata {
bundles: { [bundle: string]: string[] },
}
-function main() {
- return es.merge(
- vfs.src('out-vscode-web-min/nls.metadata.json', { base: 'out-vscode-web-min' }),
- vfs.src('.build/extensions/**/nls.metadata.json', { base: '.build/extensions' }),
- vfs.src('.build/extensions/**/nls.metadata.header.json', { base: '.build/extensions' }),
- vfs.src('.build/extensions/**/package.nls.json', { base: '.build/extensions' }))
- .pipe(merge({
- fileName: 'combined.nls.metadata.json',
- jsonSpace: '',
- edit: (parsedJson, file) => {
- let key;
- if (file.base === 'out-vscode-web-min') {
- return { vscode: parsedJson };
- }
+function main(): Promise<void> {
+ return new Promise((c, e) => {
- // Handle extensions and follow the same structure as the Core nls file.
- switch (file.basename) {
- case 'package.nls.json':
- // put package.nls.json content in Core NlsMetadata format
- // language packs use the key "package" to specify that
- // translations are for the package.json file
- parsedJson = {
- messages: {
- package: Object.values(parsedJson)
- },
- keys: {
- package: Object.keys(parsedJson)
- },
- bundles: {
- main: ['package']
- }
- };
- break;
+ es.merge(
+ vfs.src('out-vscode-web-min/nls.metadata.json', { base: 'out-vscode-web-min' }),
+ vfs.src('.build/extensions/**/nls.metadata.json', { base: '.build/extensions' }),
+ vfs.src('.build/extensions/**/nls.metadata.header.json', { base: '.build/extensions' }),
+ vfs.src('.build/extensions/**/package.nls.json', { base: '.build/extensions' }))
+ .pipe(merge({
+ fileName: 'combined.nls.metadata.json',
+ jsonSpace: '',
+ edit: (parsedJson, file) => {
+ let key;
+ if (file.base === 'out-vscode-web-min') {
+ return { vscode: parsedJson };
+ }
- case 'nls.metadata.header.json':
- parsedJson = { header: parsedJson };
- break;
+ // Handle extensions and follow the same structure as the Core nls file.
+ switch (file.basename) {
+ case 'package.nls.json':
+ // put package.nls.json content in Core NlsMetadata format
+ // language packs use the key "package" to specify that
+ // translations are for the package.json file
+ parsedJson = {
+ messages: {
+ package: Object.values(parsedJson)
+ },
+ keys: {
+ package: Object.keys(parsedJson)
+ },
+ bundles: {
+ main: ['package']
+ }
+ };
+ break;
- case 'nls.metadata.json':
- // put nls.metadata.json content in Core NlsMetadata format
- const modules = Object.keys(parsedJson);
+ case 'nls.metadata.header.json':
+ parsedJson = { header: parsedJson };
+ break;
- const json: NlsMetadata = {
- keys: {},
- messages: {},
- bundles: {
- main: []
+ case 'nls.metadata.json':
+ // put nls.metadata.json content in Core NlsMetadata format
+ const modules = Object.keys(parsedJson);
+
+ const json: NlsMetadata = {
+ keys: {},
+ messages: {},
+ bundles: {
+ main: []
+ }
+ };
+ for (const module of modules) {
+ json.messages[module] = parsedJson[module].messages;
+ json.keys[module] = parsedJson[module].keys;
+ json.bundles.main.push(module);
}
- };
- for (const module of modules) {
- json.messages[module] = parsedJson[module].messages;
- json.keys[module] = parsedJson[module].keys;
- json.bundles.main.push(module);
- }
- parsedJson = json;
- break;
+ parsedJson = json;
+ break;
+ }
+ key = 'vscode.' + file.relative.split('/')[0];
+ return { [key]: parsedJson };
+ },
+ }))
+ .pipe(gzip({ append: false }))
+ .pipe(vfs.dest('./nlsMetadata'))
+ .pipe(es.through(function (data: Vinyl) {
+ console.log(`Uploading ${data.path}`);
+ // trigger artifact upload
+ console.log(`##vso[artifact.upload containerfolder=nlsmetadata;artifactname=combined.nls.metadata.json]${data.path}`);
+ this.emit('data', data);
+ }))
+ .pipe(azure.upload({
+ account: process.env.AZURE_STORAGE_ACCOUNT,
+ credential,
+ container: 'nlsmetadata',
+ prefix: commit + '/',
+ contentSettings: {
+ contentEncoding: 'gzip',
+ cacheControl: 'max-age=31536000, public'
}
- key = 'vscode.' + file.relative.split('/')[0];
- return { [key]: parsedJson };
- },
- }))
- .pipe(gzip({ append: false }))
- .pipe(vfs.dest('./nlsMetadata'))
- .pipe(es.through(function (data: Vinyl) {
- console.log(`Uploading ${data.path}`);
- // trigger artifact upload
- console.log(`##vso[artifact.upload containerfolder=nlsmetadata;artifactname=combined.nls.metadata.json]${data.path}`);
- this.emit('data', data);
- }))
- .pipe(azure.upload({
- account: process.env.AZURE_STORAGE_ACCOUNT,
- key: process.env.AZURE_STORAGE_ACCESS_KEY,
- container: 'nlsmetadata',
- prefix: commit + '/',
- contentSettings: {
- contentEncoding: 'gzip',
- cacheControl: 'max-age=31536000, public'
- }
- }));
+ }))
+ .on('end', () => c())
+ .on('error', (err: any) => e(err));
+ });
}
-main();
+main().catch(err => {
+ console.error(err);
+ process.exit(1);
+});
+
diff --git a/build/azure-pipelines/upload-sourcemaps.js b/build/azure-pipelines/upload-sourcemaps.js
index b2a886f6c7c..4edcd2ccd70 100644
--- a/build/azure-pipelines/upload-sourcemaps.js
+++ b/build/azure-pipelines/upload-sourcemaps.js
@@ -10,9 +10,11 @@ const vfs = require("vinyl-fs");
const util = require("../lib/util");
// @ts-ignore
const deps = require("../lib/dependencies");
+const identity_1 = require("@azure/identity");
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
+const credential = new identity_1.ClientSecretCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_CLIENT_SECRET']);
// optionally allow to pass in explicit base/maps to upload
const [, , base, maps] = process.argv;
function src(base, maps = `${base}/**/*.map`) {
@@ -40,16 +42,23 @@ function main() {
else {
sources.push(src(base, maps));
}
- return es.merge(...sources)
- .pipe(es.through(function (data) {
- console.log('Uploading Sourcemap', data.relative); // debug
- this.emit('data', data);
- }))
- .pipe(azure.upload({
- account: process.env.AZURE_STORAGE_ACCOUNT,
- key: process.env.AZURE_STORAGE_ACCESS_KEY,
- container: 'sourcemaps',
- prefix: commit + '/'
- }));
+ return new Promise((c, e) => {
+ es.merge(...sources)
+ .pipe(es.through(function (data) {
+ console.log('Uploading Sourcemap', data.relative); // debug
+ this.emit('data', data);
+ }))
+ .pipe(azure.upload({
+ account: process.env.AZURE_STORAGE_ACCOUNT,
+ credential,
+ container: 'sourcemaps',
+ prefix: commit + '/'
+ }))
+ .on('end', () => c())
+ .on('error', (err) => e(err));
+ });
}
-main();
+main().catch(err => {
+ console.error(err);
+ process.exit(1);
+});
diff --git a/build/azure-pipelines/upload-sourcemaps.ts b/build/azure-pipelines/upload-sourcemaps.ts
index 769e224e6f7..f065ff2cf38 100644
--- a/build/azure-pipelines/upload-sourcemaps.ts
+++ b/build/azure-pipelines/upload-sourcemaps.ts
@@ -12,10 +12,12 @@ import * as vfs from 'vinyl-fs';
import * as util from '../lib/util';
// @ts-ignore
import * as deps from '../lib/dependencies';
+import { ClientSecretCredential } from '@azure/identity';
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
+const credential = new ClientSecretCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, process.env['AZURE_CLIENT_SECRET']!);
// optionally allow to pass in explicit base/maps to upload
const [, , base, maps] = process.argv;
@@ -28,8 +30,8 @@ function src(base: string, maps = `${base}/**/*.map`) {
}));
}
-function main() {
- const sources = [];
+function main(): Promise<void> {
+ const sources: any[] = [];
// vscode client maps (default)
if (!base) {
@@ -51,17 +53,25 @@ function main() {
sources.push(src(base, maps));
}
- return es.merge(...sources)
- .pipe(es.through(function (data: Vinyl) {
- console.log('Uploading Sourcemap', data.relative); // debug
- this.emit('data', data);
- }))
- .pipe(azure.upload({
- account: process.env.AZURE_STORAGE_ACCOUNT,
- key: process.env.AZURE_STORAGE_ACCESS_KEY,
- container: 'sourcemaps',
- prefix: commit + '/'
- }));
+ return new Promise((c, e) => {
+ es.merge(...sources)
+ .pipe(es.through(function (data: Vinyl) {
+ console.log('Uploading Sourcemap', data.relative); // debug
+ this.emit('data', data);
+ }))
+ .pipe(azure.upload({
+ account: process.env.AZURE_STORAGE_ACCOUNT,
+ credential,
+ container: 'sourcemaps',
+ prefix: commit + '/'
+ }))
+ .on('end', () => c())
+ .on('error', (err: any) => e(err));
+ });
}
-main();
+main().catch(err => {
+ console.error(err);
+ process.exit(1);
+});
+
diff --git a/build/azure-pipelines/web/product-build-web.yml b/build/azure-pipelines/web/product-build-web.yml
index c12cdf063bf..2a467124141 100644
--- a/build/azure-pipelines/web/product-build-web.yml
+++ b/build/azure-pipelines/web/product-build-web.yml
@@ -8,7 +8,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- SecretsFilter: "github-distro-mixin-password,web-storage-account,web-storage-key,ticino-storage-key"
+ SecretsFilter: "github-distro-mixin-password"
- task: DownloadPipelineArtifact@2
inputs:
@@ -99,11 +99,24 @@ steps:
yarn gulp vscode-web-min-ci
displayName: Build
+ - task: AzureCLI@2
+ inputs:
+ azureSubscription: "vscode-builds-subscription"
+ scriptType: pscore
+ scriptLocation: inlineScript
+ addSpnToEnvironment: true
+ inlineScript: |
+ Write-Host "##vso[task.setvariable variable=AZURE_TENANT_ID]$env:tenantId"
+ Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_ID]$env:servicePrincipalId"
+ Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_SECRET;issecret=true]$env:servicePrincipalKey"
+
- script: |
set -e
- AZURE_STORAGE_ACCOUNT="$(web-storage-account)" \
- AZURE_STORAGE_ACCESS_KEY="$(web-storage-key)" \
- node build/azure-pipelines/upload-cdn.js
+ AZURE_STORAGE_ACCOUNT="vscodeweb" \
+ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
+ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
+ AZURE_CLIENT_SECRET="$(AZURE_CLIENT_SECRET)" \
+ node build/azure-pipelines/upload-cdn
displayName: Upload to CDN
# upload only the workbench.web.api.js source maps because
@@ -111,13 +124,19 @@ steps:
# general task to upload source maps has already been run
- script: |
set -e
- AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
+ AZURE_STORAGE_ACCOUNT="ticino" \
+ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
+ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
+ AZURE_CLIENT_SECRET="$(AZURE_CLIENT_SECRET)" \
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.api.js.map
displayName: Upload sourcemaps (Web)
- script: |
set -e
- AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
+ AZURE_STORAGE_ACCOUNT="ticino" \
+ AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \
+ AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \
+ AZURE_CLIENT_SECRET="$(AZURE_CLIENT_SECRET)" \
node build/azure-pipelines/upload-nlsmetadata
displayName: Upload NLS Metadata
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/build/azure-pipelines/win32/product-build-win32.yml b/build/azure-pipelines/win32/product-build-win32.yml
index f74a431c522..6ff2da9724a 100644
--- a/build/azure-pipelines/win32/product-build-win32.yml
+++ b/build/azure-pipelines/win32/product-build-win32.yml
@@ -13,7 +13,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- SecretsFilter: "github-distro-mixin-password,vscode-storage-key,builds-docdb-key-readwrite,ESRP-PKI,esrp-aad-username,esrp-aad-password"
+ SecretsFilter: "github-distro-mixin-password,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- task: DownloadPipelineArtifact@2
inputs: