Merge from vscode 2c306f762bf9c3db82dc06c7afaa56ef46d72f79 (#14050)

* Merge from vscode 2c306f762bf9c3db82dc06c7afaa56ef46d72f79

* Fix breaks

* Extension management fixes

* Fix breaks in windows bundling

* Fix/skip failing tests

* Update distro

* Add clear to nuget.config

* Add hygiene task

* Bump distro

* Fix hygiene issue

* Add build to hygiene exclusion

* Update distro

* Update hygiene

* Hygiene exclusions

* Update tsconfig

* Bump distro for server breaks

* Update build config

* Update darwin path

* Add done calls to notebook tests

* Skip failing tests

* Disable smoke tests
This commit is contained in:
Karl Burtram
2021-02-09 16:15:05 -08:00
committed by GitHub
parent 6f192f9af5
commit ce612a3d96
1929 changed files with 68012 additions and 34564 deletions

View File

@@ -1 +1,3 @@
* text eol=lf
*.exe binary
*.dll binary

View File

@@ -8,6 +8,10 @@
**/LICENSE
**/CONTRIBUTORS
**/docs/**
**/example/**
**/examples/**
jschardet/index.js
jschardet/src/**
jschardet/dist/jschardet.js

View File

@@ -24,7 +24,7 @@ const files = [
];
async function main() {
return new Promise((resolve, reject) => {
return new Promise<void>((resolve, reject) => {
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
.pipe(es.through(file => {
const filePath = path.join(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!,

View File

@@ -128,7 +128,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
}
async function assertContainer(blobService: azure.BlobService, quality: string): Promise<void> {
await new Promise((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
await new Promise<void>((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
@@ -144,7 +144,7 @@ async function uploadBlob(blobService: azure.BlobService, quality: string, blobN
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
}
interface PublishOptions {

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
@@ -14,10 +14,10 @@ steps:
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
@@ -35,21 +35,21 @@ steps:
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
@@ -59,17 +59,17 @@ steps:
# ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-macos
targetPath: .build/crashes
displayName: 'Publish Crash Reports'
continueOnError: true
condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-macos
targetPath: .build/crashes
displayName: "Publish Crash Reports"
continueOnError: true
condition: failed()
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: succeededOrFailed()

View File

@@ -1,265 +1,337 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- script: |
set -e
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
set -e
sudo xcode-select -s /Applications/Xcode_12.2.app
displayName: Switch to Xcode 12
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
echo -n $(VSCODE_ARCH) > .build/arch
displayName: Prepare yarn cache flags
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
npm install -g node-gyp@7.1.0
node-gyp --version
displayName: Update node-gyp
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-darwin-min-ci
displayName: Build
- script: |
set -e
export npm_config_arch=$(VSCODE_ARCH)
export npm_config_node_gyp=$(which node-gyp)
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
export CHILD_CONCURRENCY="1"
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
echo "Yarn failed too many times" >&2
exit 1
fi
echo "Yarn failed $i, trying again..."
done
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
export npm_config_arch=$(VSCODE_ARCH)
export npm_config_node_gyp=$(which node-gyp)
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
./resources/server/test/test-web-integration.sh --browser webkit
displayName: Run integration tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
export npm_config_arch=$(VSCODE_ARCH)
export npm_config_node_gyp=$(which node-gyp)
export npm_config_build_from_source=true
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
yarn electron-rebuild
cd ./node_modules/keytar
node-gyp rebuild
displayName: Rebuild native modules for ARM64
condition: eq(variables['VSCODE_ARCH'], 'arm64')
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME"
continueOnError: true
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
displayName: Build
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
yarn smoketest --web --headless
continueOnError: true
displayName: Run smoke tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-darwin-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-darwin-min-ci
displayName: Build reh
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-macos
targetPath: .build/crashes
displayName: 'Publish Crash Reports'
continueOnError: true
condition: failed()
- script: |
set -e
yarn electron $(VSCODE_ARCH)
displayName: Download Electron
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- script: |
set -e
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
security default-keychain -s $(agent.tempdirectory)/buildagent.keychain
security unlock-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
VSCODE_ARCH="$(VSCODE_ARCH)" DEBUG=electron-osx-sign* node build/darwin/sign.js
displayName: Set Hardened Entitlements
- script: |
set -e
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
security default-keychain -s $(agent.tempdirectory)/buildagent.keychain
security unlock-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
DEBUG=electron-osx-sign* node build/darwin/sign.js
displayName: Set Hardened Entitlements
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
pushd $(agent.builddirectory)/VSCode-darwin && zip -r -X -y $(agent.builddirectory)/VSCode-darwin.zip * && popd
displayName: Archive build
- script: |
set -e
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: 'VSCode-darwin.zip'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [
{
"parameterName": "Hardening",
"parameterValue": "--options=runtime"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Codesign
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
zip -d $(agent.builddirectory)/VSCode-darwin.zip "*.pkg"
displayName: Clean Archive
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
./resources/server/test/test-web-integration.sh --browser webkit
displayName: Run integration tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: 'VSCode-darwin.zip'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppNotarize",
"parameters": [
{
"parameterName": "BundleId",
"parameterValue": "$(BundleIdentifier)"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Notarization
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME"
continueOnError: true
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
displayName: Verify start after signing (export configuration)
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
yarn smoketest --web --headless
continueOnError: true
displayName: Run smoke tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
./build/azure-pipelines/darwin/publish.sh
displayName: Publish
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-macos-$(VSCODE_ARCH)
targetPath: .build/crashes
displayName: "Publish Crash Reports"
continueOnError: true
condition: failed()
- script: |
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
yarn gulp upload-vscode-configuration
displayName: Upload configuration (for Bing settings search)
continueOnError: true
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true
- script: |
set -e
pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip * && popd
displayName: Archive build
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [
{
"parameterName": "Hardening",
"parameterValue": "--options=runtime"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Codesign
- script: |
zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip "*.pkg"
displayName: Clean Archive
- script: |
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppNotarize",
"parameters": [
{
"parameterName": "BundleId",
"parameterValue": "$(BundleIdentifier)"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Notarization
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
displayName: Verify start after signing (export configuration)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \
./build/azure-pipelines/darwin/publish.sh
displayName: Publish
- script: |
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \
yarn gulp upload-vscode-configuration
displayName: Upload configuration (for Bing settings search)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
continueOnError: true
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"
continueOnError: true

View File

@@ -1,19 +1,27 @@
#!/usr/bin/env bash
set -e
# Publish DEB
case $VSCODE_ARCH in
x64) ASSET_ID="darwin" ;;
arm64) ASSET_ID="darwin-arm64" ;;
esac
# publish the build
node build/azure-pipelines/common/createAsset.js \
darwin \
"$ASSET_ID" \
archive \
"VSCode-darwin-$VSCODE_QUALITY.zip" \
../VSCode-darwin.zip
"VSCode-$ASSET_ID.zip" \
../VSCode-darwin-$VSCODE_ARCH.zip
# package Remote Extension Host
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
if [ "$VSCODE_ARCH" == "x64" ]; then
# package Remote Extension Host
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
# publish Remote Extension Host
node build/azure-pipelines/common/createAsset.js \
server-darwin \
archive-unsigned \
"vscode-server-darwin.zip" \
../vscode-server-darwin.zip
# publish Remote Extension Host
node build/azure-pipelines/common/createAsset.js \
server-darwin \
archive-unsigned \
"vscode-server-darwin.zip" \
../vscode-server-darwin.zip
fi

View File

@@ -98,7 +98,7 @@ steps:
- script: |
set -e
yarn gulp package-rebuild-extensions
yarn gulp vscode-darwin-min-ci
yarn gulp vscode-darwin-x64-min-ci
displayName: Build
env:
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
@@ -114,7 +114,7 @@ steps:
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-darwin" \
@@ -122,14 +122,14 @@ steps:
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log"
displayName: Run smoke tests (Electron)
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
# - script: |
# set -e
# APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
# APP_NAME="`ls $APP_ROOT | head -n 1`"
# yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log"
# displayName: Run smoke tests (Electron)
# continueOnError: true
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
# - script: |
# set -e
@@ -142,7 +142,7 @@ steps:
- script: |
set -e
pushd ../azuredatastudio-darwin
pushd ../azuredatastudio-darwin-x64
ls
echo "Cleaning the application"
@@ -172,7 +172,7 @@ steps:
- script: |
set -e
mkdir -p .build/darwin/archive
pushd ../azuredatastudio-darwin
pushd ../azuredatastudio-darwin-x64
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
popd
displayName: 'Archive (no signing)'
@@ -181,7 +181,7 @@ steps:
- script: |
set -e
mkdir -p .build/darwin/archive
pushd ../azuredatastudio-darwin
pushd ../azuredatastudio-darwin-x64
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin-unsigned.zip
popd
displayName: 'Archive'

View File

@@ -9,9 +9,9 @@ pr:
include: ['main', 'release/*']
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -19,8 +19,8 @@ steps:
azureSubscription: 'azuredatastudio-adointegration'
KeyVaultName: ado-secrets
- script: |
set -e
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
@@ -37,9 +37,9 @@ steps:
# Push main branch into oss/master
git push distro origin/main:refs/heads/oss/master
# Push every release branch into oss/release
git for-each-ref --format="%(refname:short)" refs/remotes/origin/release/* | sed 's/^origin\/\(.*\)$/\0:refs\/heads\/oss\/\1/' | xargs git push distro
# Push every release branch into oss/release
git for-each-ref --format="%(refname:short)" refs/remotes/origin/release/* | sed 's/^origin\/\(.*\)$/\0:refs\/heads\/oss\/\1/' | xargs git push distro
git merge $(node -p "require('./package.json').distro")
git merge $(node -p "require('./package.json').distro")
displayName: Sync & Merge Distro
displayName: Sync & Merge Distro

View File

@@ -1,40 +1,40 @@
pool:
vmImage: 'Ubuntu-16.04'
vmImage: "Ubuntu-16.04"
trigger:
branches:
include: ['main']
include: ["main"]
pr:
branches:
include: ['main']
include: ["main"]
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- script: |
set -e
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git checkout origin/electron-11.x.y
git merge origin/master
git checkout origin/electron-11.x.y
git merge origin/master
# Push master branch into exploration branch
git push origin HEAD:electron-11.x.y
# Push master branch into exploration branch
git push origin HEAD:electron-11.x.y
displayName: Sync & Merge Exploration
displayName: Sync & Merge Exploration

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -e
echo "Installing remote dependencies"
(cd remote && rm -rf node_modules && yarn)

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
PLATFORM_LINUX="linux-alpine"
# Publish Remote Extension Host
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
# Publish Remote Extension Host (Web)
LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"

View File

@@ -1,99 +1,99 @@
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
- script: |
set -e
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
inputs:
keyfile: "build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
vstsFeed: "npm-cache" # {{SQL CARBON EDIT}} update build cache
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
inputs:
keyfile: "build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
vstsFeed: "npm-cache" # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
displayName: Download Electron
- script: |
yarn electron x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-vscode
displayName: Run Strict Null Check
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-vscode
displayName: Run Strict Null Check
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable
# DISPLAY=:10 yarn test-browser --browser chromium --tfs "Browser Unit Tests"
# displayName: Run Unit Tests (Browser)
# - script: | {{SQL CARBON EDIT}} disable
# DISPLAY=:10 yarn test-browser --browser chromium --tfs "Browser Unit Tests"
# displayName: Run Unit Tests (Browser)
# - script: | {{SQL CARBON EDIT}} disable
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
# - task: PublishPipelineArtifact@0
# inputs:
# artifactName: crash-dump-linux
# targetPath: .build/crashes
# displayName: 'Publish Crash Reports'
# condition: succeededOrFailed()
# - task: PublishPipelineArtifact@0
# inputs:
# artifactName: crash-dump-linux
# targetPath: .build/crashes
# displayName: 'Publish Crash Reports'
# condition: succeededOrFailed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-linux
targetPath: .build/crashes
displayName: 'Publish Crash Reports'
continueOnError: true
condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-linux
targetPath: .build/crashes
displayName: "Publish Crash Reports"
continueOnError: true
condition: failed()
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: succeededOrFailed()

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -0,0 +1,135 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: Docker@1
displayName: "Pull image"
inputs:
azureSubscriptionEndpoint: "vscode-builds-subscription"
azureContainerRegistry: vscodehub.azurecr.io
command: "Run an image"
imageName: "vscode-linux-build-agent:alpine"
containerCommand: uname
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
echo -n "alpine" > .build/arch
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
- script: |
set -e
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
echo "Yarn failed too many times" >&2
exit 1
fi
echo "Yarn failed $i, trying again..."
done
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
docker run -e VSCODE_QUALITY -e CHILD_CONCURRENCY=1 -v $(pwd):/root/vscode -v ~/.netrc:/root/.netrc vscodehub.azurecr.io/vscode-linux-build-agent:alpine /root/vscode/build/azure-pipelines/linux/alpine/install-dependencies.sh
displayName: Prebuild
- script: |
set -e
yarn gulp vscode-reh-linux-alpine-min-ci
yarn gulp vscode-reh-web-linux-alpine-min-ci
displayName: Build
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/linux/alpine/publish.sh
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"
continueOnError: true

View File

@@ -1,115 +0,0 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: Docker@1
displayName: 'Pull image'
inputs:
azureSubscriptionEndpoint: 'vscode-builds-subscription'
azureContainerRegistry: vscodehub.azurecr.io
command: 'Run an image'
imageName: 'vscode-linux-build-agent:$(VSCODE_ARCH)'
containerCommand: uname
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
CHILD_CONCURRENCY=1 ./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/prebuild.sh
displayName: Prebuild
- script: |
set -e
./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/build.sh
displayName: Build
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/publish.sh
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View File

@@ -1,217 +1,231 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
echo -n $VSCODE_ARCH > .build/arch
displayName: Prepare arch cache flag
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- script: |
echo -n $(VSCODE_ARCH) > .build/arch
displayName: Prepare yarn cache flags
- script: |
set -e
CHILD_CONCURRENCY=1 npm_config_arch=$(NPM_ARCH) yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
export npm_config_arch=$(NPM_ARCH)
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
echo "Yarn failed too many times" >&2
exit 1
fi
echo "Yarn failed $i, trying again..."
done
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
displayName: Build
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
service xvfb start
displayName: Start xvfb
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
displayName: Build
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
service xvfb start
displayName: Start xvfb
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
displayName: Run integration tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
displayName: Run integration tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: 'crash-dump-linux-$(VSCODE_ARCH)'
targetPath: .build/crashes
displayName: 'Publish Crash Reports'
continueOnError: true
condition: failed()
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: "crash-dump-linux-$(VSCODE_ARCH)"
targetPath: .build/crashes
displayName: "Publish Crash Reports"
continueOnError: true
condition: failed()
- script: |
set -e
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb"
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm"
displayName: Build deb, rpm packages
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
displayName: Prepare snap package
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
set -e
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb"
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm"
displayName: Build deb, rpm packages
# needed for code signing
- task: UseDotNet@2
displayName: 'Install .NET Core SDK 2.x'
inputs:
version: 2.x
- script: |
set -e
yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
displayName: Prepare snap package
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '.build/linux/rpm'
Pattern: '*.rpm'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-450779-Pgp",
"operationSetCode": "LinuxSign",
"parameters": [ ],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
displayName: Codesign rpm
# needed for code signing
- task: UseDotNet@2
displayName: "Install .NET Core SDK 2.x"
inputs:
version: 2.x
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \
./build/azure-pipelines/linux/publish.sh
displayName: Publish
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: ".build/linux/rpm"
Pattern: "*.rpm"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-450779-Pgp",
"operationSetCode": "LinuxSign",
"parameters": [ ],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
displayName: Codesign rpm
- task: PublishPipelineArtifact@0
displayName: 'Publish Pipeline Artifact'
inputs:
artifactName: 'snap-$(VSCODE_ARCH)'
targetPath: .build/linux/snap-tarball
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \
./build/azure-pipelines/linux/publish.sh
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true
- task: PublishPipelineArtifact@0
displayName: "Publish Pipeline Artifact"
inputs:
artifactName: "snap-$(VSCODE_ARCH)"
targetPath: .build/linux/snap-tarball
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"
continueOnError: true

View File

@@ -52,11 +52,9 @@ RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
if [ "$VSCODE_ARCH" == "x64" ]; then
# Publish Snap
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
fi
# Publish Snap
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)

View File

@@ -1,52 +1,56 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact'
inputs:
artifactName: snap-x64
targetPath: .build/linux/snap-tarball
- task: DownloadPipelineArtifact@0
displayName: "Download Pipeline Artifact"
inputs:
artifactName: snap-$(VSCODE_ARCH)
targetPath: .build/linux/snap-tarball
- script: |
set -e
- script: |
set -e
# Get snapcraft version
snapcraft --version
# Get snapcraft version
snapcraft --version
# Make sure we get latest packages
sudo apt-get update
sudo apt-get upgrade -y
# Make sure we get latest packages
sudo apt-get update
sudo apt-get upgrade -y
# Define variables
REPO="$(pwd)"
SNAP_ROOT="$REPO/.build/linux/snap/x64"
# Define variables
REPO="$(pwd)"
SNAP_ROOT="$REPO/.build/linux/snap/$(VSCODE_ARCH)"
# Install build dependencies
(cd build && yarn)
# Install build dependencies
(cd build && yarn)
# Unpack snap tarball artifact, in order to preserve file perms
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
# Unpack snap tarball artifact, in order to preserve file perms
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$(VSCODE_ARCH).tar.gz"
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
# Create snap package
BUILD_VERSION="$(date +%s)"
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
# Create snap package
BUILD_VERSION="$(date +%s)"
SNAP_FILENAME="code-$VSCODE_QUALITY-$(VSCODE_ARCH)-$BUILD_VERSION.snap"
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
case $(VSCODE_ARCH) in
x64) SNAPCRAFT_TARGET_ARGS="" ;;
*) SNAPCRAFT_TARGET_ARGS="--target-arch $(VSCODE_ARCH)" ;;
esac
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap $SNAPCRAFT_TARGET_ARGS --output "$SNAP_PATH")
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/createAsset.js "linux-snap-x64" package "$SNAP_FILENAME" "$SNAP_PATH"
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/createAsset.js "linux-snap-$(VSCODE_ARCH)" package "$SNAP_FILENAME" "$SNAP_PATH"

View File

@@ -2,158 +2,201 @@ trigger: none
pr: none
schedules:
- cron: "0 5 * * Mon-Fri"
displayName: Mon-Fri at 7:00
branches:
include:
- master
- cron: "0 5 * * Mon-Fri"
displayName: Mon-Fri at 7:00
branches:
include:
- master
resources:
containers:
- container: vscode-x64
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
endpoint: VSCodeHub
- container: vscode-arm64
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
endpoint: VSCodeHub
- container: vscode-armhf
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-armhf
endpoint: VSCodeHub
- container: snapcraft
image: snapcore/snapcraft:stable
- container: vscode-x64
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
endpoint: VSCodeHub
- container: vscode-arm64
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
endpoint: VSCodeHub
- container: vscode-armhf
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-armhf
endpoint: VSCodeHub
- container: snapcraft
image: snapcore/snapcraft:stable
stages:
- stage: Compile
jobs:
- job: Compile
pool:
vmImage: 'Ubuntu-16.04'
container: vscode-x64
steps:
- template: product-compile.yml
- stage: Compile
jobs:
- job: Compile
pool:
vmImage: "Ubuntu-16.04"
container: vscode-x64
variables:
VSCODE_ARCH: x64
steps:
- template: product-compile.yml
- stage: Windows
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: VS2017-Win2016
jobs:
- job: Windows
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
variables:
VSCODE_ARCH: x64
steps:
- template: win32/product-build-win32.yml
- job: Windows32
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
variables:
VSCODE_ARCH: ia32
steps:
- template: win32/product-build-win32.yml
- job: WindowsARM64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true'))
variables:
VSCODE_ARCH: arm64
steps:
- template: win32/product-build-win32-arm64.yml
- stage: Linux
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: 'Ubuntu-16.04'
jobs:
- job: Linux
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
container: vscode-x64
variables:
VSCODE_ARCH: x64
NPM_ARCH: x64
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnap
- stage: Windows
dependsOn:
- Linux
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
container: snapcraft
variables:
VSCODE_ARCH: x64
steps:
- template: linux/snap-build-linux.yml
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: VS2017-Win2016
jobs:
- job: Windows
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: win32/product-build-win32.yml
- job: LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
container: vscode-armhf
variables:
VSCODE_ARCH: armhf
NPM_ARCH: armv7l
steps:
- template: linux/product-build-linux.yml
- job: Windows32
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: ia32
steps:
- template: win32/product-build-win32.yml
- job: LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
container: vscode-arm64
variables:
VSCODE_ARCH: arm64
NPM_ARCH: arm64
steps:
- template: linux/product-build-linux.yml
- job: WindowsARM64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: arm64
steps:
- template: win32/product-build-win32.yml
- job: LinuxAlpine
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
variables:
VSCODE_ARCH: alpine
steps:
- template: linux/product-build-linux-multiarch.yml
- stage: Linux
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: "Ubuntu-16.04"
jobs:
- job: Linux
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
container: vscode-x64
variables:
VSCODE_ARCH: x64
NPM_ARCH: x64
steps:
- template: linux/product-build-linux.yml
- job: LinuxWeb
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
variables:
VSCODE_ARCH: x64
steps:
- template: web/product-build-web.yml
- job: LinuxSnap
dependsOn:
- Linux
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
container: snapcraft
variables:
VSCODE_ARCH: x64
steps:
- template: linux/snap-build-linux.yml
- stage: macOS
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: macOS-latest
jobs:
- job: macOS
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
steps:
- template: darwin/product-build-darwin.yml
- job: LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
container: vscode-armhf
variables:
VSCODE_ARCH: armhf
NPM_ARCH: armv7l
steps:
- template: linux/product-build-linux.yml
- stage: Mooncake
dependsOn:
- Windows
- Linux
- macOS
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: 'Ubuntu-16.04'
jobs:
- job: SyncMooncake
displayName: Sync Mooncake
steps:
- template: sync-mooncake.yml
- job: LinuxSnapArmhf
dependsOn:
- LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
container: snapcraft
variables:
VSCODE_ARCH: armhf
steps:
- template: linux/snap-build-linux.yml
- stage: Publish
dependsOn:
- Windows
- Linux
- macOS
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
pool:
vmImage: 'Ubuntu-16.04'
jobs:
- job: BuildService
displayName: Build Service
steps:
- template: release.yml
- job: LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
container: vscode-arm64
variables:
VSCODE_ARCH: arm64
NPM_ARCH: arm64
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnapArm64
dependsOn:
- LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
container: snapcraft
variables:
VSCODE_ARCH: arm64
steps:
- template: linux/snap-build-linux.yml
- job: LinuxAlpine
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
steps:
- template: linux/product-build-alpine.yml
- job: LinuxWeb
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
variables:
VSCODE_ARCH: x64
steps:
- template: web/product-build-web.yml
- stage: macOS
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: macOS-latest
jobs:
- job: macOS
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: darwin/product-build-darwin.yml
- stage: macOSARM64
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: macOS-latest
jobs:
- job: macOSARM64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS_ARM64'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: arm64
steps:
- template: darwin/product-build-darwin.yml
- stage: Mooncake
dependsOn:
- Windows
- Linux
- macOS
- macOSARM64
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: "Ubuntu-16.04"
jobs:
- job: SyncMooncake
displayName: Sync Mooncake
steps:
- template: sync-mooncake.yml
- stage: Publish
dependsOn:
- Windows
- Linux
- macOS
- macOSARM64
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
pool:
vmImage: "Ubuntu-16.04"
jobs:
- job: BuildService
displayName: Build Service
steps:
- template: release.yml

View File

@@ -1,146 +1,161 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
dryRun: true
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
dryRun: true
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
echo -n $VSCODE_ARCH > .build/arch
displayName: Prepare arch cache flag
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
echo -n $(VSCODE_ARCH) > .build/arch
displayName: Prepare yarn cache flags
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
echo "Yarn failed too many times" >&2
exit 1
fi
echo "Yarn failed $i, trying again..."
done
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn gulp hygiene
yarn monaco-compile-check
yarn valid-layers-check
displayName: Run hygiene, monaco compile & valid layers checks
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -
./build/azure-pipelines/common/extract-telemetry.sh
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
yarn gulp hygiene
yarn monaco-compile-check
yarn valid-layers-check
displayName: Run hygiene, monaco compile & valid layers checks
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -
./build/azure-pipelines/common/extract-telemetry.sh
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
yarn gulp compile-build
yarn gulp compile-extensions-build
yarn gulp minify-vscode
yarn gulp vscode-reh-linux-x64-min
yarn gulp vscode-reh-web-linux-x64-min
displayName: Compile
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
yarn gulp compile-build
yarn gulp compile-extensions-build
yarn gulp minify-vscode
yarn gulp vscode-reh-linux-x64-min
yarn gulp vscode-reh-web-linux-x64-min
displayName: Compile
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
VERSION=`node -p "require(\"./package.json\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/createBuild.js $VERSION
displayName: Create build
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
VERSION=`node -p "require(\"./package.json\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/createBuild.js $VERSION
displayName: Create build
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))

View File

@@ -2,82 +2,82 @@
trigger:
branches:
include: ['refs/tags/*']
include: ["refs/tags/*"]
pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
if [ "$TAG_VERSION" == "1.999.0" ]; then
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
if [ "$TAG_VERSION" == "1.999.0" ]; then
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
exit 1
fi
displayName: Check 1.999.0 tag
- bash: |
# Install build dependencies
(cd build && yarn)
node build/azure-pipelines/publish-types/check-version.js
displayName: Check version
- bash: |
git config --global user.email "vscode@microsoft.com"
git config --global user.name "VSCode"
git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1
node build/azure-pipelines/publish-types/update-types.js
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
cd DefinitelyTyped
git diff --color | cat
git add -A
git status
git checkout -b "vscode-types-$TAG_VERSION"
git commit -m "VS Code $TAG_VERSION Extension API"
git push origin "vscode-types-$TAG_VERSION"
displayName: Push update to DefinitelyTyped
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame master, please open this link, examine changes and create a PR:"
LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
MESSAGE2="[@eamodio, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
exit 1
fi
displayName: Check 1.999.0 tag
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$LINK"'"}' \
https://slack.com/api/chat.postMessage
- bash: |
# Install build dependencies
(cd build && yarn)
node build/azure-pipelines/publish-types/check-version.js
displayName: Check version
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE2"'"}' \
https://slack.com/api/chat.postMessage
- bash: |
git config --global user.email "vscode@microsoft.com"
git config --global user.name "VSCode"
git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1
node build/azure-pipelines/publish-types/update-types.js
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
cd DefinitelyTyped
git diff --color | cat
git add -A
git status
git checkout -b "vscode-types-$TAG_VERSION"
git commit -m "VS Code $TAG_VERSION Extension API"
git push origin "vscode-types-$TAG_VERSION"
displayName: Push update to DefinitelyTyped
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame master, please open this link, examine changes and create a PR:"
LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
MESSAGE2="[@eamodio, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$LINK"'"}' \
https://slack.com/api/chat.postMessage
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE2"'"}' \
https://slack.com/api/chat.postMessage
displayName: Send message on Slack
displayName: Send message on Slack

View File

@@ -1,22 +1,22 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.x"
- task: NodeTool@0
inputs:
versionSpec: "10.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- script: |
set -e
- script: |
set -e
(cd build ; yarn)
(cd build ; yarn)
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/releaseBuild.js
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/releaseBuild.js

View File

@@ -46,7 +46,8 @@ jobs:
steps:
- template: linux/sql-product-build-linux.yml
parameters:
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects", "data-workspace"]
# extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects", "data-workspace"]
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "resource-deployment", "machine-learning", "sql-database-projects", "data-workspace"]
timeoutInMinutes: 70
- job: LinuxWeb

View File

@@ -1,24 +1,24 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- script: |
set -e
- script: |
set -e
(cd build ; yarn)
(cd build ; yarn)
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(vscode-mooncake-storage-key)" \
node build/azure-pipelines/common/sync-mooncake.js "$VSCODE_QUALITY"
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(vscode-mooncake-storage-key)" \
node build/azure-pipelines/common/sync-mooncake.js "$VSCODE_QUALITY"

View File

@@ -0,0 +1,35 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const es = require("event-stream");
const vfs = require("vinyl-fs");
const util = require("../lib/util");
const filter = require("gulp-filter");
const gzip = require("gulp-gzip");
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
function main() {
return vfs.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true })
.pipe(filter(f => !f.isDirectory()))
.pipe(gzip({ append: false }))
.pipe(es.through(function (data) {
console.log('Uploading CDN file:', data.relative); // debug
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: process.env.VSCODE_QUALITY,
prefix: commit + '/',
contentSettings: {
contentEncoding: 'gzip',
cacheControl: 'max-age=31536000, public'
}
}));
}
main();

View File

@@ -0,0 +1,40 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as es from 'event-stream';
import * as Vinyl from 'vinyl';
import * as vfs from 'vinyl-fs';
import * as util from '../lib/util';
import * as filter from 'gulp-filter';
import * as gzip from 'gulp-gzip';
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
function main() {
return vfs.src('**', { cwd: '../vscode-web', base: '../vscode-web', dot: true })
.pipe(filter(f => !f.isDirectory()))
.pipe(gzip({ append: false }))
.pipe(es.through(function (data: Vinyl) {
console.log('Uploading CDN file:', data.relative); // debug
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: process.env.VSCODE_QUALITY,
prefix: commit + '/',
contentSettings: {
contentEncoding: 'gzip',
cacheControl: 'max-age=31536000, public'
}
}));
}
main();

View File

@@ -1,106 +1,132 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# inputs:
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: 'npm-vscode'
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
echo -n "web" > .build/arch
displayName: Prepare yarn cache flag
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# inputs:
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: 'npm-vscode'
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
# - script: |
# set -e
# yarn postinstall
# displayName: Run postinstall scripts
# condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
echo "Yarn failed too many times" >&2
exit 1
fi
echo "Yarn failed $i, trying again..."
done
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-web-min-ci
displayName: Build
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
# upload only the workbench.web.api.js source maps because
# we just compiled these bits in the previous step and the
# general task to upload source maps has already been run
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.api.js.map
displayName: Upload sourcemaps (Web)
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/web/publish.sh
displayName: Publish
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-web-min-ci
displayName: Build
- script: |
set -e
AZURE_STORAGE_ACCOUNT="$(web-storage-account)" \
AZURE_STORAGE_ACCESS_KEY="$(web-storage-key)" \
node build/azure-pipelines/upload-cdn.js
displayName: Upload to CDN
# upload only the workbench.web.api.js source maps because
# we just compiled these bits in the previous step and the
# general task to upload source maps has already been run
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.api.js.map
displayName: Upload sourcemaps (Web)
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/web/publish.sh
displayName: Publish

View File

@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<clear />
<clear/>
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
</packageSources>
</configuration>

View File

@@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Microsoft.ESRPClient" version="1.2.25" />
<package id="Microsoft.ESRPClient" version="1.2.47" />
</packages>

View File

@@ -1,82 +1,82 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: UsePythonVersion@0
inputs:
versionSpec: "2.x"
addToPath: true
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
inputs:
keyfile: "build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
vstsFeed: "npm-cache" # {{SQL CARBON EDIT}} update build cache
- powershell: |
yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: "1"
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: "1"
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
inputs:
keyfile: "build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
vstsFeed: "npm-cache" # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
yarn electron
displayName: Download Electron
- powershell: |
yarn electron
displayName: Download Electron
# - powershell: | {{SQL CARBON EDIT}} remove editor check
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
# - powershell: | {{SQL CARBON EDIT}} remove editor check
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- powershell: |
yarn compile
displayName: Compile Sources
- powershell: |
yarn compile
displayName: Compile Sources
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
# - powershell: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser firefox --tfs "Browser Unit Tests"
# displayName: Run Unit Tests (Browser)
# - powershell: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser firefox --tfs "Browser Unit Tests"
# displayName: Run Unit Tests (Browser)
# - powershell: | {{SQL CARBON EDIT}} disable
# .\scripts\test-integration.bat --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
# - powershell: | {{SQL CARBON EDIT}} disable
# .\scripts\test-integration.bat --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
- task: PublishPipelineArtifact@0
displayName: 'Publish Crash Reports'
inputs:
artifactName: crash-dump-windows
targetPath: .build\crashes
continueOnError: true
condition: failed()
- task: PublishPipelineArtifact@0
displayName: "Publish Crash Reports"
inputs:
artifactName: crash-dump-windows
targetPath: .build\crashes
continueOnError: true
condition: failed()
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: succeededOrFailed()

View File

@@ -1,14 +1,14 @@
Param(
[string]$AuthCertificateBase64,
[string]$AuthCertificateKey
)
param ($CertBase64)
$ErrorActionPreference = "Stop"
# Import auth certificate
$AuthCertificateFileName = [System.IO.Path]::GetTempFileName()
$AuthCertificateBytes = [Convert]::FromBase64String($AuthCertificateBase64)
[IO.File]::WriteAllBytes($AuthCertificateFileName, $AuthCertificateBytes)
$AuthCertificate = Import-PfxCertificate -FilePath $AuthCertificateFileName -CertStoreLocation Cert:\LocalMachine\My -Password (ConvertTo-SecureString $AuthCertificateKey -AsPlainText -Force)
rm $AuthCertificateFileName
$ESRPAuthCertificateSubjectName = $AuthCertificate.Subject
$CertBytes = [System.Convert]::FromBase64String($CertBase64)
$CertCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
$CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable)
Write-Output ("##vso[task.setvariable variable=ESRPAuthCertificateSubjectName;]$ESRPAuthCertificateSubjectName")
$CertStore = New-Object System.Security.Cryptography.X509Certificates.X509Store("My","LocalMachine")
$CertStore.Open("ReadWrite")
$CertStore.AddRange($CertCollection)
$CertStore.Close()
$ESRPAuthCertificateSubjectName = $CertCollection[0].Subject
Write-Output ("##vso[task.setvariable variable=ESRPAuthCertificateSubjectName;]$ESRPAuthCertificateSubjectName")

View File

@@ -1,190 +0,0 @@
steps:
- powershell: |
mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- powershell: |
$ErrorActionPreference = "Stop"
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
mkdir .build -ea 0
"$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch
displayName: Prepare tooling
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
exec { git fetch distro }
exec { git merge $(node -p "require('./package.json').distro") }
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/arch, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
exec { yarn --frozen-lockfile }
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .build/arch, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn postinstall }
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/mixin }
displayName: Mix in quality
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-code-helper" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-inno-updater" }
displayName: Build
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- task: NuGetCommand@2
displayName: Install ESRPClient.exe
inputs:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
restoreDirectory: packages
- task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate
inputs:
ESRP: 'ESRP CodeSign'
- powershell: |
$ErrorActionPreference = "Stop"
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(esrp-auth-certificate) -AuthCertificateKey $(esrp-auth-certificate-key)
displayName: Import ESRP Auth Certificate
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
.\build\azure-pipelines\win32\publish.ps1
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View File

@@ -1,252 +1,272 @@
steps:
- powershell: |
mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
displayName: Prepare cache flag
- powershell: |
mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
"$env:ENABLE_TERRAPIN" | Out-File -Encoding ascii -NoNewLine .build\terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- powershell: |
$ErrorActionPreference = "Stop"
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- powershell: |
$ErrorActionPreference = "Stop"
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: UsePythonVersion@0
inputs:
versionSpec: "2.x"
addToPath: true
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
displayName: Prepare tooling
mkdir .build -ea 0
"$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch
displayName: Prepare tooling
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
exec { git fetch distro }
exec { git merge $(node -p "require('./package.json').distro") }
displayName: Merge distro
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
exec { git fetch distro }
exec { git merge $(node -p "require('./package.json').distro") }
displayName: Merge distro
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/arch, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- powershell: |
"$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch
displayName: Prepare yarn cache flags
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
exec { yarn --frozen-lockfile }
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .build/arch, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
. build/azure-pipelines/win32/retry.ps1
$ErrorActionPreference = "Stop"
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
retry { exec { yarn --frozen-lockfile } }
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn postinstall }
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/mixin }
displayName: Mix in quality
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn postinstall }
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-reh-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-reh-web-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-code-helper" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-inno-updater" }
displayName: Build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/mixin }
displayName: Mix in quality
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn electron $(VSCODE_ARCH) }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-min-ci" }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-code-helper" }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)"
displayName: Build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn test-browser --build --browser chromium --browser firefox --tfs "Browser Unit Tests" }
displayName: Run unit tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-reh-win32-$(VSCODE_ARCH)-min-ci" }
exec { yarn gulp "vscode-reh-web-win32-$(VSCODE_ARCH)-min-ci" }
echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(CodeSigningFolderPath),$(agent.builddirectory)/vscode-reh-win32-$(VSCODE_ARCH)"
displayName: Build Server
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn electron $(VSCODE_ARCH) }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
displayName: Run integration tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn test-browser --build --browser chromium --browser firefox --tfs "Browser Unit Tests" }
displayName: Run unit tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
displayName: Run remote integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-windows-$(VSCODE_ARCH)
targetPath: .build\crashes
displayName: 'Publish Crash Reports'
continueOnError: true
condition: failed()
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
displayName: Run integration tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
displayName: Run remote integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH),$(agent.builddirectory)/vscode-reh-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-windows-$(VSCODE_ARCH)
targetPath: .build\crashes
displayName: "Publish Crash Reports"
continueOnError: true
condition: failed()
- task: NuGetCommand@2
displayName: Install ESRPClient.exe
inputs:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: 'ESRP Nuget'
restoreDirectory: packages
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: and(succeededOrFailed(), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate
inputs:
ESRP: 'ESRP CodeSign'
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(CodeSigningFolderPath)"
Pattern: "*.dll,*.exe,*.node"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- powershell: |
$ErrorActionPreference = "Stop"
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(esrp-auth-certificate) -AuthCertificateKey $(esrp-auth-certificate-key)
displayName: Import ESRP Auth Certificate
- task: NuGetCommand@2
displayName: Install ESRPClient.exe
inputs:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: "ESRP Nuget"
restoreDirectory: packages
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
.\build\azure-pipelines\win32\publish.ps1
displayName: Publish
- task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate
inputs:
ESRP: "ESRP CodeSign"
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true
- task: PowerShell@2
inputs:
targetType: filePath
filePath: .\build\azure-pipelines\win32\import-esrp-auth-cert.ps1
arguments: "$(ESRP-SSL-AADAuth)"
displayName: Import ESRP Auth Certificate
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
.\build\azure-pipelines\win32\publish.ps1
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"
continueOnError: true

View File

@@ -0,0 +1,19 @@
function Retry
{
[CmdletBinding()]
param(
[Parameter(Position=0,Mandatory=1)][scriptblock]$cmd
)
$retry = 0
while ($retry++ -lt 3) {
try {
& $cmd
return
} catch {
# noop
}
}
throw "Max retries reached"
}

View File

@@ -12,6 +12,7 @@ $Auth = Create-TmpJson @{
SubjectName = $env:ESRPAuthCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
SendX5c = "true"
}
RequestSigningCert = @{
SubjectName = $env:ESRPCertificateSubjectName
@@ -67,4 +68,4 @@ $Input = Create-TmpJson @{
$Output = [System.IO.Path]::GetTempFileName()
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
& "$ScriptPath\ESRPClient\packages\Microsoft.ESRPClient.1.2.25\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output
& "$ScriptPath\ESRPClient\packages\Microsoft.ESRPClient.*\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output

View File

@@ -11,6 +11,7 @@ const product = require("../../product.json");
async function main() {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
const tempDir = process.env['AGENT_TEMPDIRECTORY'];
const arch = process.env['VSCODE_ARCH'];
if (!buildDir) {
throw new Error('$AGENT_BUILDDIRECTORY not set');
}
@@ -18,7 +19,7 @@ async function main() {
throw new Error('$AGENT_TEMPDIRECTORY not set');
}
const baseDir = path.dirname(__dirname);
const appRoot = path.join(buildDir, 'VSCode-darwin');
const appRoot = path.join(buildDir, `VSCode-darwin-${arch}`);
const appName = product.nameLong + '.app';
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
const helperAppBaseName = product.nameShort;

View File

@@ -13,6 +13,7 @@ import * as product from '../../product.json';
async function main(): Promise<void> {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
const tempDir = process.env['AGENT_TEMPDIRECTORY'];
const arch = process.env['VSCODE_ARCH'];
if (!buildDir) {
throw new Error('$AGENT_BUILDDIRECTORY not set');
@@ -23,7 +24,7 @@ async function main(): Promise<void> {
}
const baseDir = path.dirname(__dirname);
const appRoot = path.join(buildDir, 'VSCode-darwin');
const appRoot = path.join(buildDir, `VSCode-darwin-${arch}`);
const appName = product.nameLong + '.app';
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
const helperAppBaseName = product.nameShort;

View File

@@ -434,7 +434,7 @@ function createTscCompileTask(watch) {
// stdio: [null, 'pipe', 'inherit']
});
let errors = [];
let reporter = createReporter();
let reporter = createReporter('monaco');
let report;
// eslint-disable-next-line no-control-regex
let magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings

View File

@@ -70,7 +70,7 @@ const tasks = compilations.map(function (tsconfigFile) {
}
function createPipeline(build, emitError) {
const reporter = createReporter();
const reporter = createReporter('extensions');
overrideOptions.inlineSources = Boolean(build);
overrideOptions.base = path.dirname(absolutePath);

View File

@@ -3,484 +3,46 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const filter = require('gulp-filter');
const es = require('event-stream');
const gulpeslint = require('gulp-eslint');
const tsfmt = require('typescript-formatter');
const VinylFile = require('vinyl');
const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
const task = require('./lib/task');
/**
* Hygiene works by creating cascading subsets of all our files and
* passing them through a sequence of checks. Here are the current subsets,
* named according to the checks performed on them. Each subset contains
* the following one, as described in mathematical notation:
*
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
*/
const all = [
'*',
'build/**/*',
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*',
'!test/**/out/**',
'!**/node_modules/**',
'!build/actions/**/*.js' // {{ SQL CARBON EDIT }}
];
const indentationFilter = [
'**',
// except specific files
'!**/ThirdPartyNotices.txt',
'!**/LICENSE.{txt,rtf}',
'!LICENSES.chromium.html',
'!**/LICENSE',
'!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/unit/assert.js',
// except specific folders
'!test/automation/out/**',
'!test/smoke/out/**',
'!extensions/typescript-language-features/test-workspace/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/codicon/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,darwin}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
'!**/Dockerfile',
'!**/Dockerfile.*',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
// {{SQL CARBON EDIT}}
'!**/*.gif',
'!build/actions/**/*.js',
'!**/*.{xlf,lcl,docx,sql,vsix,bacpac,ipynb,jpg}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/integration-tests/testData/**',
'!extensions/arc/src/controller/generated/**',
'!extensions/sql-database-projects/resources/templates/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.json',
'!extensions/sql-database-projects/src/test/baselines/*.sqlproj',
'!extensions/sql-database-projects/BuildDirectory/SystemDacpacs/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts',
'!resources/linux/snap/electron-launch'
];
const copyrightFilter = [
'**',
'!**/*.desktop',
'!**/*.json',
'!**/*.html',
'!**/*.template',
'!**/*.md',
'!**/*.bat',
'!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml',
'!**/*.sh',
'!**/*.txt',
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!**/*.js.map',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/linux/snap/electron-launch',
'!resources/win32/bin/code.js',
'!resources/web/code-web.js',
'!resources/completions/**',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js',
'!resources/serverless/code-web.js',
'!src/vs/editor/test/node/classification/typescript-test.ts',
// {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/services/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/services/notebook/common/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/notebook/src/prompts/**',
'!extensions/mssql/src/prompts/**',
'!extensions/kusto/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/query-history/images/**',
'!extensions/sql/build/update-grammar.js',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac',
'!**/*.bacpac',
'!**/*.py'
];
const jsHygieneFilter = [
'src/**/*.js',
'build/gulpfile.*.js',
'!src/vs/loader.js',
'!src/vs/css.js',
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js',
'!**/test/**'
];
const tsHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/typescript-basics/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}},
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
'!src/vs/workbench/contrib/extensions/browser/extensionRecommendationsService.ts' // {{SQL CARBON EDIT}} skip this because known issue
];
const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Licensed under the Source EULA. See License.txt in the project root for license information.',
' *--------------------------------------------------------------------------------------------*/'
];
gulp.task('eslint', () => {
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(jsHygieneFilter.concat(tsHygieneFilter)))
.pipe(gulpeslint({
configFile: '.eslintrc.json',
rulePaths: ['./build/lib/eslint']
}))
.pipe(gulpeslint.formatEach('compact'))
.pipe(gulpeslint.results(results => {
if (results.warningCount > 0 || results.errorCount > 0) {
throw new Error('eslint failed with warnings and/or errors');
}
}));
});
const { hygiene } = require('./hygiene');
function checkPackageJSON(actualPath) {
const actual = require(path.join(__dirname, '..', actualPath));
const rootPackageJSON = require('../package.json');
const checkIncluded = (set1, set2) => {
for (let depName in set1) {
const depVersion = set1[depName];
const rootDepVersion = set2[depName];
if (!rootDepVersion) {
// missing in root is allowed
continue;
}
if (depVersion !== rootDepVersion) {
this.emit(
'error',
`The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`
);
}
}
};
for (let depName in actual.dependencies) {
const depVersion = actual.dependencies[depName];
const rootDepVersion = rootPackageJSON.dependencies[depName];
if (!rootDepVersion) {
// missing in root is allowed
continue;
}
if (depVersion !== rootDepVersion) {
this.emit('error', `The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`);
}
}
checkIncluded(actual.dependencies, rootPackageJSON.dependencies);
checkIncluded(actual.devDependencies, rootPackageJSON.devDependencies);
}
const checkPackageJSONTask = task.define('check-package-json', () => {
return gulp.src('package.json')
.pipe(es.through(function () {
return gulp.src('package.json').pipe(
es.through(function () {
checkPackageJSON.call(this, 'remote/package.json');
checkPackageJSON.call(this, 'remote/web/package.json');
}));
checkPackageJSON.call(this, 'build/package.json');
})
);
});
gulp.task(checkPackageJSONTask);
function hygiene(some) {
let errorCount = 0;
const productJson = es.through(function (file) {
// const product = JSON.parse(file.contents.toString('utf8'));
// if (product.extensionsGallery) { // {{SQL CARBON EDIT}} @todo we need to research on what the point of this is
// console.error('product.json: Contains "extensionsGallery"');
// errorCount++;
// }
this.emit('data', file);
});
const indentation = es.through(function (file) {
const lines = file.contents.toString('utf8').split(/\r\n|\r|\n/);
file.__lines = lines;
lines
.forEach((line, i) => {
if (/^\s*$/.test(line)) {
// empty or whitespace lines are OK
} else if (/^[\t]*[^\s]/.test(line)) {
// good indent
} else if (/^[\t]* \*/.test(line)) {
// block comment using an extra space
} else {
console.error(file.relative + '(' + (i + 1) + ',1): Bad whitespace indentation');
errorCount++;
}
});
this.emit('data', file);
});
const copyrights = es.through(function (file) {
const lines = file.__lines;
for (let i = 0; i < copyrightHeaderLines.length; i++) {
if (lines[i] !== copyrightHeaderLines[i]) {
console.error(file.relative + ': Missing or bad copyright statement');
errorCount++;
break;
}
}
this.emit('data', file);
});
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: false,
tsfmt: true,
// verbose: true,
// keep checkJS happy
editorconfig: undefined,
replace: undefined,
tsconfig: undefined,
tsconfigFile: undefined,
tsfmtFile: undefined,
vscode: undefined,
vscodeFile: undefined
}).then(result => {
let original = result.src.replace(/\r\n/gm, '\n');
let formatted = result.dest.replace(/\r\n/gm, '\n');
if (original !== formatted) {
console.error('File not formatted. Run the \'Format Document\' command to fix it:', file.relative);
errorCount++;
}
cb(null, file);
}, err => {
cb(err);
});
});
let input;
if (Array.isArray(some) || typeof some === 'string' || !some) {
const options = { base: '.', follow: true, allowEmpty: true };
if (some) {
input = vfs.src(some, options).pipe(filter(all)); // split this up to not unnecessarily filter all a second time
} else {
input = vfs.src(all, options);
}
} else {
input = some;
}
const productJsonFilter = filter('product.json', { restore: true });
const result = input
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(productJsonFilter)
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
.pipe(productJsonFilter.restore)
.pipe(filter(indentationFilter))
.pipe(indentation)
.pipe(filter(copyrightFilter))
.pipe(copyrights);
const typescript = result
.pipe(filter(tsHygieneFilter))
.pipe(formatting);
const javascript = result
.pipe(filter(jsHygieneFilter.concat(tsHygieneFilter)))
.pipe(gulpeslint({
configFile: '.eslintrc.json',
rulePaths: ['./build/lib/eslint']
}))
.pipe(gulpeslint.formatEach('compact'))
.pipe(gulpeslint.results(results => {
errorCount += results.warningCount;
errorCount += results.errorCount;
}));
let count = 0;
return es.merge(typescript, javascript)
.pipe(es.through(function (data) {
count++;
if (process.env['TRAVIS'] && count % 10 === 0) {
process.stdout.write('.');
}
this.emit('data', data);
}, function () {
process.stdout.write('\n');
if (errorCount > 0) {
this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
} else {
this.emit('end');
}
}));
}
function createGitIndexVinyls(paths) {
const cp = require('child_process');
const repositoryPath = process.cwd();
const fns = paths.map(relativePath => () => new Promise((c, e) => {
const fullPath = path.join(repositoryPath, relativePath);
fs.stat(fullPath, (err, stat) => {
if (err && err.code === 'ENOENT') { // ignore deletions
return c(null);
} else if (err) {
return e(err);
}
cp.exec(`git show :${relativePath}`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
if (err) {
return e(err);
}
c(new VinylFile({
path: fullPath,
base: repositoryPath,
contents: out,
stat
}));
});
});
}));
return pall(fns, { concurrency: 4 })
.then(r => r.filter(p => !!p));
}
gulp.task('hygiene', task.series(checkPackageJSONTask, () => hygiene()));
// this allows us to run hygiene as a git pre-commit hook
if (require.main === module) {
const cp = require('child_process');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
process.exit(1);
});
if (process.argv.length > 2) {
hygiene(process.argv.slice(2)).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
} else {
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
return;
}
const some = out
.split(/\r?\n/)
.filter(l => !!l);
if (some.length > 0) {
console.log('Reading git index versions...');
createGitIndexVinyls(some)
.then(vinyls => new Promise((c, e) => hygiene(es.readArray(vinyls))
.on('end', () => c())
.on('error', e)))
.catch(err => {
console.error();
console.error(err);
process.exit(1);
});
}
});
}
}
const hygieneTask = task.define('hygiene', task.series(checkPackageJSONTask, () => hygiene(undefined, false)));
gulp.task(hygieneTask);

View File

@@ -221,13 +221,15 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const telemetry = gulp.src('.build/telemetry/**', { base: '.build/telemetry', dot: true });
const jsFilter = util.filter(data => !data.isDirectory() &&/\.js$/.test(data.path));
const root = path.resolve(path.join(__dirname, '..'));
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
// {{SQL CARBON EDIT}} - fix runtime module load break
const deps = gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*', '**/*.wasm'], 'app/node_modules.asar'));
.pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore')))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*', '**/*.wasm'], 'node_modules.asar'));
let all = es.merge(
packageJsonStream,
@@ -342,7 +344,8 @@ const BUILD_TARGETS = [
{ platform: 'win32', arch: 'ia32' },
{ platform: 'win32', arch: 'x64' },
{ platform: 'win32', arch: 'arm64' },
{ platform: 'darwin', arch: null, opts: { stats: true } },
{ platform: 'darwin', arch: 'x64', opts: { stats: true } },
{ platform: 'darwin', arch: 'arm64', opts: { stats: true } },
{ platform: 'linux', arch: 'ia32' },
{ platform: 'linux', arch: 'x64' },
{ platform: 'linux', arch: 'armhf' },
@@ -471,8 +474,10 @@ const generateVSCodeConfigurationTask = task.define('generate-vscode-configurati
const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
const arch = process.env['VSCODE_ARCH'];
const appRoot = path.join(buildDir, `VSCode-darwin-${arch}`);
const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
const appPath = path.join(buildDir, `VSCode-darwin/${appName}/Contents/Resources/app/bin/code`);
const appPath = path.join(appRoot, appName, 'Contents', 'Resources', 'app', 'bin', 'code');
const codeProc = cp.exec(
`${appPath} --export-default-configuration='${allConfigDetailsPath}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`,
(err, stdout, stderr) => {

View File

@@ -240,6 +240,7 @@ function prepareSnapPackage(arch) {
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@VERSION@@', commit.substr(0, 8)))
.pipe(replace('@@ARCHITECTURE@@', arch))
.pipe(rename('snap/snapcraft.yaml'));
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })

View File

@@ -55,7 +55,13 @@ function packageInnoSetup(iss, options, cb) {
cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] })
.on('error', cb)
.on('exit', () => cb(null));
.on('exit', code => {
if (code === 0) {
cb(null);
} else {
cb(new Error(`InnoSetup returned exit code: ${code}`));
}
});
}
function buildWin32Setup(arch, target) {

482
build/hygiene.js Normal file
View File

@@ -0,0 +1,482 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const filter = require('gulp-filter');
const es = require('event-stream');
const gulpeslint = require('gulp-eslint');
const tsfmt = require('typescript-formatter');
const VinylFile = require('vinyl');
const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
/**
* Hygiene works by creating cascading subsets of all our files and
* passing them through a sequence of checks. Here are the current subsets,
* named according to the checks performed on them. Each subset contains
* the following one, as described in mathematical notation:
*
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
*/
const all = [
'*',
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*',
'!test/**/out/**',
'!**/node_modules/**',
'!build/actions/**/*.js', // {{ SQL CARBON EDIT }}
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.all = all;
const indentationFilter = [
'**',
// except specific files
'!**/ThirdPartyNotices.txt',
'!**/LICENSE.{txt,rtf}',
'!LICENSES.chromium.html',
'!**/LICENSE',
'!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/semver/semver.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/unit/assert.js',
'!resources/linux/snap/electron-launch',
// except specific folders
'!test/automation/out/**',
'!test/smoke/out/**',
'!extensions/typescript-language-features/test-workspace/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/codicon/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,darwin}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
'!**/Dockerfile',
'!**/Dockerfile.*',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
// {{SQL CARBON EDIT}}
'!**/*.gif',
'!build/actions/**/*.js',
'!**/*.{xlf,lcl,docx,sql,vsix,bacpac,ipynb,jpg}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/integration-tests/testData/**',
'!extensions/arc/src/controller/generated/**',
'!extensions/sql-database-projects/resources/templates/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.json',
'!extensions/sql-database-projects/src/test/baselines/*.sqlproj',
'!extensions/sql-database-projects/BuildDirectory/SystemDacpacs/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts',
'!resources/linux/snap/electron-launch',
'!build/**/*' // {{SQL CARBON EDIT}}
];
const copyrightFilter = [
'**',
'!**/*.desktop',
'!**/*.json',
'!**/*.html',
'!**/*.template',
'!**/*.md',
'!**/*.bat',
'!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml',
'!**/*.sh',
'!**/*.txt',
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!**/*.js.map',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js',
'!resources/web/code-web.js',
'!resources/completions/**',
'!extensions/configuration-editing/build/inline-allOf.ts',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js',
'!resources/serverless/code-web.js',
'!src/vs/editor/test/node/classification/typescript-test.ts',
// {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/services/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/services/notebook/common/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/notebook/src/prompts/**',
'!extensions/mssql/src/prompts/**',
'!extensions/kusto/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/query-history/images/**',
'!extensions/sql/build/update-grammar.js',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac',
'!**/*.bacpac',
'!**/*.py'
];
const jsHygieneFilter = [
'src/**/*.js',
'build/gulpfile.*.js',
'!src/vs/loader.js',
'!src/vs/css.js',
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js',
'!src/**/semver.js',
'!**/test/**',
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.jsHygieneFilter = jsHygieneFilter;
const tsHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/typescript-basics/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}}
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
'!src/vs/workbench/contrib/extensions/browser/extensionRecommendationsService.ts', // {{SQL CARBON EDIT}} skip this because known issue
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.tsHygieneFilter = tsHygieneFilter;
const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Licensed under the Source EULA. See License.txt in the project root for license information.',
' *--------------------------------------------------------------------------------------------*/',
];
function hygiene(some) {
let errorCount = 0;
const productJson = es.through(function (file) {
// const product = JSON.parse(file.contents.toString('utf8'));
// if (product.extensionsGallery) { // {{SQL CARBON EDIT}} @todo we need to research on what the point of this is
// console.error('product.json: Contains "extensionsGallery"');
// errorCount++;
// }
this.emit('data', file);
});
const indentation = es.through(function (file) {
const lines = file.contents.toString('utf8').split(/\r\n|\r|\n/);
file.__lines = lines;
lines.forEach((line, i) => {
if (/^\s*$/.test(line)) {
// empty or whitespace lines are OK
} else if (/^[\t]*[^\s]/.test(line)) {
// good indent
} else if (/^[\t]* \*/.test(line)) {
// block comment using an extra space
} else {
console.error(
file.relative + '(' + (i + 1) + ',1): Bad whitespace indentation'
);
errorCount++;
}
});
this.emit('data', file);
});
const copyrights = es.through(function (file) {
const lines = file.__lines;
for (let i = 0; i < copyrightHeaderLines.length; i++) {
if (lines[i] !== copyrightHeaderLines[i]) {
//console.error(file.relative + ': Missing or bad copyright statement');
//errorCount++;
break;
}
}
this.emit('data', file);
});
const formatting = es.map(function (file, cb) {
tsfmt
.processString(file.path, file.contents.toString('utf8'), {
verify: false,
tsfmt: true,
// verbose: true,
// keep checkJS happy
editorconfig: undefined,
replace: undefined,
tsconfig: undefined,
tsconfigFile: undefined,
tsfmtFile: undefined,
vscode: undefined,
vscodeFile: undefined,
})
.then(
(result) => {
let original = result.src.replace(/\r\n/gm, '\n');
let formatted = result.dest.replace(/\r\n/gm, '\n');
if (original !== formatted) {
console.error(
`File not formatted. Run the 'Format Document' command to fix it:`,
file.relative
);
errorCount++;
}
cb(null, file);
},
(err) => {
cb(err);
}
);
});
let input;
if (Array.isArray(some) || typeof some === 'string' || !some) {
const options = { base: '.', follow: true, allowEmpty: true };
if (some) {
input = vfs.src(some, options).pipe(filter(all)); // split this up to not unnecessarily filter all a second time
} else {
input = vfs.src(all, options);
}
} else {
input = some;
}
const productJsonFilter = filter('product.json', { restore: true });
const result = input
.pipe(filter((f) => !f.stat.isDirectory()))
.pipe(productJsonFilter)
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
.pipe(productJsonFilter.restore)
.pipe(filter(indentationFilter))
.pipe(indentation)
.pipe(filter(copyrightFilter))
.pipe(copyrights);
const typescript = result.pipe(filter(tsHygieneFilter)).pipe(formatting);
const javascript = result
.pipe(filter(jsHygieneFilter.concat(tsHygieneFilter)))
.pipe(
gulpeslint({
configFile: '.eslintrc.json',
rulePaths: ['./build/lib/eslint'],
})
)
.pipe(gulpeslint.formatEach('compact'))
.pipe(
gulpeslint.results((results) => {
errorCount += results.warningCount;
errorCount += results.errorCount;
})
);
let count = 0;
return es.merge(typescript, javascript).pipe(
es.through(
function (data) {
count++;
if (process.env['TRAVIS'] && count % 10 === 0) {
process.stdout.write('.');
}
this.emit('data', data);
},
function () {
process.stdout.write('\n');
if (errorCount > 0) {
this.emit(
'error',
'Hygiene failed with ' +
errorCount +
` errors. Check 'build / gulpfile.hygiene.js'.`
);
} else {
this.emit('end');
}
}
)
);
}
module.exports.hygiene = hygiene;
function createGitIndexVinyls(paths) {
const cp = require('child_process');
const repositoryPath = process.cwd();
const fns = paths.map((relativePath) => () =>
new Promise((c, e) => {
const fullPath = path.join(repositoryPath, relativePath);
fs.stat(fullPath, (err, stat) => {
if (err && err.code === 'ENOENT') {
// ignore deletions
return c(null);
} else if (err) {
return e(err);
}
cp.exec(
`git show :${relativePath}`,
{ maxBuffer: 2000 * 1024, encoding: 'buffer' },
(err, out) => {
if (err) {
return e(err);
}
c(
new VinylFile({
path: fullPath,
base: repositoryPath,
contents: out,
stat,
})
);
}
);
});
})
);
return pall(fns, { concurrency: 4 }).then((r) => r.filter((p) => !!p));
}
// this allows us to run hygiene as a git pre-commit hook
if (require.main === module) {
const cp = require('child_process');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
process.exit(1);
});
if (process.argv.length > 2) {
hygiene(process.argv.slice(2)).on('error', (err) => {
console.error();
console.error(err);
process.exit(1);
});
} else {
cp.exec(
'git diff --cached --name-only',
{ maxBuffer: 2000 * 1024 },
(err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out.split(/\r?\n/).filter((l) => !!l);
if (some.length > 0) {
console.log('Reading git index versions...');
createGitIndexVinyls(some)
.then(
(vinyls) =>
new Promise((c, e) =>
hygiene(es.readArray(vinyls))
.on('end', () => c())
.on('error', e)
)
)
.catch((err) => {
console.error();
console.error(err);
process.exit(1);
});
}
}
);
}
}

View File

@@ -70,8 +70,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
base: '.',
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
@@ -96,8 +95,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
const contents = Buffer.concat(out);
out.length = 0;
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
base: '.',
path: destFilename,
contents: contents
}));

View File

@@ -87,8 +87,7 @@ export function createAsar(folderPath: string, unpackGlobs: string[], destFilena
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
base: '.',
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
@@ -117,8 +116,7 @@ export function createAsar(folderPath: string, unpackGlobs: string[], destFilena
out.length = 0;
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
base: '.',
path: destFilename,
contents: contents
}));

View File

@@ -21,7 +21,7 @@ function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
extensions: extensions,
iconFile: icon
};

View File

@@ -25,7 +25,7 @@ function darwinBundleDocumentType(extensions: string[], icon: string) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
extensions: extensions,
iconFile: icon
};

View File

@@ -37,7 +37,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
// extract key so that it can be checked later
let key;
if (isStringLiteral(keyNode)) {
doubleQuotedStringLiterals.delete(keyNode); //todo@joh reconsider
doubleQuotedStringLiterals.delete(keyNode);
key = keyNode.value;
}
else if (keyNode.type === experimental_utils_1.AST_NODE_TYPES.ObjectExpression) {
@@ -45,7 +45,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
if (property.type === experimental_utils_1.AST_NODE_TYPES.Property && !property.computed) {
if (property.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier && property.key.name === 'key') {
if (isStringLiteral(property.value)) {
doubleQuotedStringLiterals.delete(property.value); //todo@joh reconsider
doubleQuotedStringLiterals.delete(property.value);
key = property.value.value;
break;
}

View File

@@ -47,7 +47,7 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
// extract key so that it can be checked later
let key: string | undefined;
if (isStringLiteral(keyNode)) {
doubleQuotedStringLiterals.delete(keyNode); //todo@joh reconsider
doubleQuotedStringLiterals.delete(keyNode);
key = keyNode.value;
} else if (keyNode.type === AST_NODE_TYPES.ObjectExpression) {
@@ -55,7 +55,7 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
if (property.type === AST_NODE_TYPES.Property && !property.computed) {
if (property.key.type === AST_NODE_TYPES.Identifier && property.key.name === 'key') {
if (isStringLiteral(property.value)) {
doubleQuotedStringLiterals.delete(property.value); //todo@joh reconsider
doubleQuotedStringLiterals.delete(property.value);
key = property.value.value;
break;
}
@@ -123,4 +123,3 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
};
}
};

View File

@@ -199,7 +199,7 @@ const excludedExtensions = [
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
'vscode-notebook-tests',
'integration-tests',
'integration-tests', // {{SQL CARBON EDIT}}
];
// {{SQL CARBON EDIT}}
const externalExtensions = [

View File

@@ -1038,7 +1038,7 @@ function createI18nFile(originalFilePath, messages) {
contents: Buffer.from(content, 'utf8')
});
}
const i18nPackVersion = "1.0.0";
const i18nPackVersion = '1.0.0';
function pullI18nPackFiles(apiHostname, username, password, language, resultingTranslationPaths) {
return pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language, exports.externalExtensionsWithTranslations)
.pipe(prepareI18nPackFiles(exports.externalExtensionsWithTranslations, resultingTranslationPaths, language.id === 'ps'));

View File

@@ -62,6 +62,10 @@
"name": "vs/workbench/contrib/debug",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/dialogs",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/emmet",
"project": "vscode-workbench"
@@ -210,6 +214,10 @@
"name": "vs/workbench/contrib/webviewPanel",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/workspaces",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/customEditor",
"project": "vscode-workbench"
@@ -361,6 +369,14 @@
{
"name": "vs/workbench/services/authentication",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/extensionRecommendations",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/gettingStarted",
"project": "vscode-workbench"
}
]
}

View File

@@ -1196,7 +1196,7 @@ interface I18nPack {
};
}
const i18nPackVersion = "1.0.0";
const i18nPackVersion = '1.0.0';
export interface TranslationPath {
id: string;

View File

@@ -24,8 +24,8 @@ const minimatch_1 = require("minimatch");
// Feel free to add more core types as you see needed if present in node.js and browsers
const CORE_TYPES = [
'require',
'atob',
'btoa',
// 'atob',
// 'btoa',
'setTimeout',
'clearTimeout',
'setInterval',

View File

@@ -25,8 +25,8 @@ import { match } from 'minimatch';
// Feel free to add more core types as you see needed if present in node.js and browsers
const CORE_TYPES = [
'require', // from our AMD loader
'atob',
'btoa',
// 'atob',
// 'btoa',
'setTimeout',
'clearTimeout',
'setInterval',

627
build/lib/monaco-api.js Normal file
View File

@@ -0,0 +1,627 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.execute = exports.run3 = exports.DeclarationResolver = exports.FSProvider = exports.RECIPE_PATH = void 0;
const fs = require("fs");
const ts = require("typescript");
const path = require("path");
const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const dtsv = '3';
const tsfmt = require('../../tsfmt.json');
const SRC = path.join(__dirname, '../../src');
exports.RECIPE_PATH = path.join(__dirname, '../monaco/monaco.d.ts.recipe');
const DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts');
function logErr(message, ...rest) {
fancyLog(ansiColors.yellow(`[monaco.d.ts]`), message, ...rest);
}
function isDeclaration(a) {
return (a.kind === ts.SyntaxKind.InterfaceDeclaration
|| a.kind === ts.SyntaxKind.EnumDeclaration
|| a.kind === ts.SyntaxKind.ClassDeclaration
|| a.kind === ts.SyntaxKind.TypeAliasDeclaration
|| a.kind === ts.SyntaxKind.FunctionDeclaration
|| a.kind === ts.SyntaxKind.ModuleDeclaration);
}
function visitTopLevelDeclarations(sourceFile, visitor) {
let stop = false;
let visit = (node) => {
if (stop) {
return;
}
switch (node.kind) {
case ts.SyntaxKind.InterfaceDeclaration:
case ts.SyntaxKind.EnumDeclaration:
case ts.SyntaxKind.ClassDeclaration:
case ts.SyntaxKind.VariableStatement:
case ts.SyntaxKind.TypeAliasDeclaration:
case ts.SyntaxKind.FunctionDeclaration:
case ts.SyntaxKind.ModuleDeclaration:
stop = visitor(node);
}
if (stop) {
return;
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
}
function getAllTopLevelDeclarations(sourceFile) {
let all = [];
visitTopLevelDeclarations(sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = node;
let triviaStart = interfaceDeclaration.pos;
let triviaEnd = interfaceDeclaration.name.pos;
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
}
else {
let nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
}
return false /*continue*/;
});
return all;
}
function getTopLevelDeclaration(sourceFile, typeName) {
let result = null;
visitTopLevelDeclarations(sourceFile, (node) => {
if (isDeclaration(node) && node.name) {
if (node.name.text === typeName) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
}
// node is ts.VariableStatement
if (getNodeText(sourceFile, node).indexOf(typeName) >= 0) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
});
return result;
}
function getNodeText(sourceFile, node) {
return sourceFile.getFullText().substring(node.pos, node.end);
}
function hasModifier(modifiers, kind) {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
let mod = modifiers[i];
if (mod.kind === kind) {
return true;
}
}
}
return false;
}
function isStatic(member) {
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
}
function isDefaultExport(declaration) {
return (hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword));
}
function getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage, enums) {
let result = getNodeText(sourceFile, declaration);
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = declaration;
const staticTypeName = (isDefaultExport(interfaceDeclaration)
? `${importName}.default`
: `${importName}.${declaration.name.text}`);
let instanceTypeName = staticTypeName;
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
if (typeParametersCnt > 0) {
let arr = [];
for (let i = 0; i < typeParametersCnt; i++) {
arr.push('any');
}
instanceTypeName = `${instanceTypeName}<${arr.join(',')}>`;
}
const members = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
result = result.replace(memberText, '');
}
else {
const memberName = member.name.text;
const memberAccess = (memberName.indexOf('.') >= 0 ? `['${memberName}']` : `.${memberName}`);
if (isStatic(member)) {
usage.push(`a = ${staticTypeName}${memberAccess};`);
}
else {
usage.push(`a = (<${instanceTypeName}>b)${memberAccess};`);
}
}
}
catch (err) {
// life..
}
});
}
else if (declaration.kind === ts.SyntaxKind.VariableStatement) {
const jsDoc = result.substr(0, declaration.getLeadingTriviaWidth(sourceFile));
if (jsDoc.indexOf('@monacodtsreplace') >= 0) {
const jsDocLines = jsDoc.split(/\r\n|\r|\n/);
let directives = [];
for (const jsDocLine of jsDocLines) {
const m = jsDocLine.match(/^\s*\* \/([^/]+)\/([^/]+)\/$/);
if (m) {
directives.push([new RegExp(m[1], 'g'), m[2]]);
}
}
// remove the jsdoc
result = result.substr(jsDoc.length);
if (directives.length > 0) {
// apply replace directives
const replacer = createReplacerFromDirectives(directives);
result = replacer(result);
}
}
}
result = result.replace(/export default /g, 'export ');
result = result.replace(/export declare /g, 'export ');
result = result.replace(/declare /g, '');
let lines = result.split(/\r\n|\r|\n/);
for (let i = 0; i < lines.length; i++) {
if (/\s*\*/.test(lines[i])) {
// very likely a comment
continue;
}
lines[i] = lines[i].replace(/"/g, '\'');
}
result = lines.join('\n');
if (declaration.kind === ts.SyntaxKind.EnumDeclaration) {
result = result.replace(/const enum/, 'enum');
enums.push({
enumName: declaration.name.getText(sourceFile),
text: result
});
}
return result;
}
function format(text, endl) {
const REALLY_FORMAT = false;
text = preformat(text, endl);
if (!REALLY_FORMAT) {
return text;
}
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
let edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
function countParensCurly(text) {
let cnt = 0;
for (let i = 0; i < text.length; i++) {
if (text.charAt(i) === '(' || text.charAt(i) === '{') {
cnt++;
}
if (text.charAt(i) === ')' || text.charAt(i) === '}') {
cnt--;
}
}
return cnt;
}
function repeatStr(s, cnt) {
let r = '';
for (let i = 0; i < cnt; i++) {
r += s;
}
return r;
}
function preformat(text, endl) {
let lines = text.split(endl);
let inComment = false;
let inCommentDeltaIndent = 0;
let indent = 0;
for (let i = 0; i < lines.length; i++) {
let line = lines[i].replace(/\s$/, '');
let repeat = false;
let lineIndent = 0;
do {
repeat = false;
if (line.substring(0, 4) === ' ') {
line = line.substring(4);
lineIndent++;
repeat = true;
}
if (line.charAt(0) === '\t') {
line = line.substring(1);
lineIndent++;
repeat = true;
}
} while (repeat);
if (line.length === 0) {
continue;
}
if (inComment) {
if (/\*\//.test(line)) {
inComment = false;
}
lines[i] = repeatStr('\t', lineIndent + inCommentDeltaIndent) + line;
continue;
}
if (/\/\*/.test(line)) {
inComment = true;
inCommentDeltaIndent = indent - lineIndent;
lines[i] = repeatStr('\t', indent) + line;
continue;
}
const cnt = countParensCurly(line);
let shouldUnindentAfter = false;
let shouldUnindentBefore = false;
if (cnt < 0) {
if (/[({]/.test(line)) {
shouldUnindentAfter = true;
}
else {
shouldUnindentBefore = true;
}
}
else if (cnt === 0) {
shouldUnindentBefore = /^\}/.test(line);
}
let shouldIndentAfter = false;
if (cnt > 0) {
shouldIndentAfter = true;
}
else if (cnt === 0) {
shouldIndentAfter = /{$/.test(line);
}
if (shouldUnindentBefore) {
indent--;
}
lines[i] = repeatStr('\t', indent) + line;
if (shouldUnindentAfter) {
indent--;
}
if (shouldIndentAfter) {
indent++;
}
}
return lines.join(endl);
}
function getRuleProvider(options) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
return ts.formatting.getFormatContext(options);
}
function applyEdits(text, edits) {
// Apply edits in reverse on the existing text
let result = text;
for (let i = edits.length - 1; i >= 0; i--) {
let change = edits[i];
let head = result.slice(0, change.span.start);
let tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
}
}
function createReplacerFromDirectives(directives) {
return (str) => {
for (let i = 0; i < directives.length; i++) {
str = str.replace(directives[i][0], directives[i][1]);
}
return str;
};
}
function createReplacer(data) {
data = data || '';
let rawDirectives = data.split(';');
let directives = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
}
let pieces = rawDirective.split('=>');
let findStr = pieces[0];
let replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
directives.push([new RegExp(findStr, 'g'), replaceStr]);
});
return createReplacerFromDirectives(directives);
}
function generateDeclarationFile(recipe, sourceFileGetter) {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
let lines = recipe.split(endl);
let result = [];
let usageCounter = 0;
let usageImports = [];
let usage = [];
let failed = false;
usage.push(`var a: any;`);
usage.push(`var b: any;`);
const generateUsageImport = (moduleId) => {
let importName = 'm' + (++usageCounter);
usageImports.push(`import * as ${importName} from './${moduleId.replace(/\.d\.ts$/, '')}';`);
return importName;
};
let enums = [];
let version = null;
lines.forEach(line => {
if (failed) {
return;
}
let m0 = line.match(/^\/\/dtsv=(\d+)$/);
if (m0) {
version = m0[1];
}
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
let moduleId = m1[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${moduleId}`);
failed = true;
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m1[2]);
let typeNames = m1[3].split(/,/);
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
let declaration = getTopLevelDeclaration(sourceFile, typeName);
if (!declaration) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${typeName}`);
failed = true;
return;
}
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage, enums)));
});
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
let moduleId = m2[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${moduleId}`);
failed = true;
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap = {};
let typesToExcludeArr = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
typesToExcludeMap[typeName] = true;
typesToExcludeArr.push(typeName);
});
getAllTopLevelDeclarations(sourceFile).forEach((declaration) => {
if (isDeclaration(declaration) && declaration.name) {
if (typesToExcludeMap[declaration.name.text]) {
return;
}
}
else {
// node is ts.VariableStatement
let nodeText = getNodeText(sourceFile, declaration);
for (let i = 0; i < typesToExcludeArr.length; i++) {
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
return;
}
}
}
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage, enums)));
});
return;
}
result.push(line);
});
if (failed) {
return null;
}
if (version !== dtsv) {
if (!version) {
logErr(`gulp watch restart required. 'monaco.d.ts.recipe' is written before versioning was introduced.`);
}
else {
logErr(`gulp watch restart required. 'monaco.d.ts.recipe' v${version} does not match runtime v${dtsv}.`);
}
return null;
}
let resultTxt = result.join(endl);
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
resultTxt = format(resultTxt, endl);
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
enums.sort((e1, e2) => {
if (e1.enumName < e2.enumName) {
return -1;
}
if (e1.enumName > e2.enumName) {
return 1;
}
return 0;
});
let resultEnums = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Licensed under the Source EULA. See License.txt in the project root for license information.',
' *--------------------------------------------------------------------------------------------*/',
'',
'// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY.',
''
].concat(enums.map(e => e.text)).join(endl);
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
resultEnums = format(resultEnums, endl);
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
return {
result: resultTxt,
usageContent: `${usageImports.join('\n')}\n\n${usage.join('\n')}`,
enums: resultEnums
};
}
function _run(sourceFileGetter) {
const recipe = fs.readFileSync(exports.RECIPE_PATH).toString();
const t = generateDeclarationFile(recipe, sourceFileGetter);
if (!t) {
return null;
}
const result = t.result;
const usageContent = t.usageContent;
const enums = t.enums;
const currentContent = fs.readFileSync(DECLARATION_PATH).toString();
const one = currentContent.replace(/\r\n/gm, '\n');
const other = result.replace(/\r\n/gm, '\n');
const isTheSame = (one === other);
return {
content: result,
usageContent: usageContent,
enums: enums,
filePath: DECLARATION_PATH,
isTheSame
};
}
class FSProvider {
existsSync(filePath) {
return fs.existsSync(filePath);
}
statSync(filePath) {
return fs.statSync(filePath);
}
readFileSync(_moduleId, filePath) {
return fs.readFileSync(filePath);
}
}
exports.FSProvider = FSProvider;
class CacheEntry {
constructor(sourceFile, mtime) {
this.sourceFile = sourceFile;
this.mtime = mtime;
}
}
class DeclarationResolver {
constructor(_fsProvider) {
this._fsProvider = _fsProvider;
this._sourceFileCache = Object.create(null);
}
invalidateCache(moduleId) {
this._sourceFileCache[moduleId] = null;
}
getDeclarationSourceFile(moduleId) {
if (this._sourceFileCache[moduleId]) {
// Since we cannot trust file watching to invalidate the cache, check also the mtime
const fileName = this._getFileName(moduleId);
const mtime = this._fsProvider.statSync(fileName).mtime.getTime();
if (this._sourceFileCache[moduleId].mtime !== mtime) {
this._sourceFileCache[moduleId] = null;
}
}
if (!this._sourceFileCache[moduleId]) {
this._sourceFileCache[moduleId] = this._getDeclarationSourceFile(moduleId);
}
return this._sourceFileCache[moduleId] ? this._sourceFileCache[moduleId].sourceFile : null;
}
_getFileName(moduleId) {
if (/\.d\.ts$/.test(moduleId)) {
return path.join(SRC, moduleId);
}
return path.join(SRC, `${moduleId}.ts`);
}
_getDeclarationSourceFile(moduleId) {
const fileName = this._getFileName(moduleId);
if (!this._fsProvider.existsSync(fileName)) {
return null;
}
const mtime = this._fsProvider.statSync(fileName).mtime.getTime();
if (/\.d\.ts$/.test(moduleId)) {
// const mtime = this._fsProvider.statFileSync()
const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString();
return new CacheEntry(ts.createSourceFile(fileName, fileContents, ts.ScriptTarget.ES5), mtime);
}
const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString();
const fileMap = {
'file.ts': fileContents
};
const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {}));
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text;
return new CacheEntry(ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5), mtime);
}
}
exports.DeclarationResolver = DeclarationResolver;
function run3(resolver) {
const sourceFileGetter = (moduleId) => resolver.getDeclarationSourceFile(moduleId);
return _run(sourceFileGetter);
}
exports.run3 = run3;
class TypeScriptLanguageServiceHost {
constructor(libs, files, compilerOptions) {
this._libs = libs;
this._files = files;
this._compilerOptions = compilerOptions;
}
// --- language service host ---------------
getCompilationSettings() {
return this._compilerOptions;
}
getScriptFileNames() {
return ([]
.concat(Object.keys(this._libs))
.concat(Object.keys(this._files)));
}
getScriptVersion(_fileName) {
return '1';
}
getProjectVersion() {
return '1';
}
getScriptSnapshot(fileName) {
if (this._files.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._files[fileName]);
}
else if (this._libs.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
}
else {
return ts.ScriptSnapshot.fromString('');
}
}
getScriptKind(_fileName) {
return ts.ScriptKind.TS;
}
getCurrentDirectory() {
return '';
}
getDefaultLibFileName(_options) {
return 'defaultLib:es5';
}
isDefaultLibFileName(fileName) {
return fileName === this.getDefaultLibFileName(this._compilerOptions);
}
}
function execute() {
let r = run3(new DeclarationResolver(new FSProvider()));
if (!r) {
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}
return r;
}
exports.execute = execute;

View File

@@ -61,7 +61,7 @@ function loader(src, bundledFileHeader, bundleLoader) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
base: '.',
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
@@ -92,7 +92,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest, fileContentMapper
}
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : '';
const base = source.path ? root + `/${src}` : '.';
const path = source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake';
const contents = source.path ? fileContentMapper(source.contents, path) : source.contents;
return new VinylFile({

View File

@@ -69,7 +69,7 @@ function loader(src: string, bundledFileHeader: string, bundleLoader: boolean):
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
base: '.',
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
@@ -104,7 +104,7 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : '';
const base = source.path ? root + `/${src}` : '.';
const path = source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake';
const contents = source.path ? fileContentMapper(source.contents, path) : source.contents;

View File

@@ -11,65 +11,81 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const fs = require("fs");
const path = require("path");
const allErrors = [];
let startTime = null;
let count = 0;
function onStart() {
if (count++ > 0) {
return;
class ErrorLog {
constructor(id) {
this.id = id;
this.allErrors = [];
this.startTime = null;
this.count = 0;
}
startTime = new Date().getTime();
fancyLog(`Starting ${ansiColors.green('compilation')}...`);
}
function onEnd() {
if (--count > 0) {
return;
onStart() {
if (this.count++ > 0) {
return;
}
this.startTime = new Date().getTime();
fancyLog(`Starting ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''}...`);
}
onEnd() {
if (--this.count > 0) {
return;
}
this.log();
}
log() {
const errors = _.flatten(this.allErrors);
const seen = new Set();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
fancyLog(`${ansiColors.red('Error')}: ${err}`);
}
});
fancyLog(`Finished ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - this.startTime) + ' ms')}`);
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/s;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(x => x)
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
const logFileName = 'log' + (this.id ? `_${this.id}` : '');
fs.writeFileSync(path.join(buildLogFolder, logFileName), JSON.stringify(messages));
}
catch (err) {
//noop
}
}
log();
}
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
const errorLogsById = new Map();
function getErrorLog(id = '') {
let errorLog = errorLogsById.get(id);
if (!errorLog) {
errorLog = new ErrorLog(id);
errorLogsById.set(id, errorLog);
}
return errorLog;
}
const buildLogFolder = path.join(path.dirname(path.dirname(__dirname)), '.build');
try {
fs.mkdirSync(path.dirname(buildLogPath));
fs.mkdirSync(buildLogFolder);
}
catch (err) {
// ignore
}
function log() {
const errors = _.flatten(allErrors);
const seen = new Set();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
fancyLog(`${ansiColors.red('Error')}: ${err}`);
}
});
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(x => x)
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
}
catch (err) {
//noop
}
fancyLog(`Finished ${ansiColors.green('compilation')} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - startTime) + ' ms')}`);
}
function createReporter() {
function createReporter(id) {
const errorLog = getErrorLog(id);
const errors = [];
allErrors.push(errors);
errorLog.allErrors.push(errors);
const result = (err) => errors.push(err);
result.hasErrors = () => errors.length > 0;
result.end = (emitError) => {
errors.length = 0;
onStart();
errorLog.onStart();
return es.through(undefined, function () {
onEnd();
errorLog.onEnd();
if (emitError && errors.length > 0) {
if (!errors.__logged__) {
log();
errorLog.log();
}
errors.__logged__ = true;
const err = new Error(`Found ${errors.length} errors`);

View File

@@ -12,72 +12,89 @@ import * as ansiColors from 'ansi-colors';
import * as fs from 'fs';
import * as path from 'path';
const allErrors: string[][] = [];
let startTime: number | null = null;
let count = 0;
class ErrorLog {
constructor(public id: string) {
}
allErrors: string[][] = [];
startTime: number | null = null;
count = 0;
function onStart(): void {
if (count++ > 0) {
return;
onStart(): void {
if (this.count++ > 0) {
return;
}
this.startTime = new Date().getTime();
fancyLog(`Starting ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''}...`);
}
startTime = new Date().getTime();
fancyLog(`Starting ${ansiColors.green('compilation')}...`);
}
onEnd(): void {
if (--this.count > 0) {
return;
}
function onEnd(): void {
if (--count > 0) {
return;
this.log();
}
log(): void {
const errors = _.flatten(this.allErrors);
const seen = new Set<string>();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
fancyLog(`${ansiColors.red('Error')}: ${err}`);
}
});
fancyLog(`Finished ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - this.startTime!) + ' ms')}`);
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/s;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(x => x as string[])
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
const logFileName = 'log' + (this.id ? `_${this.id}` : '');
fs.writeFileSync(path.join(buildLogFolder, logFileName), JSON.stringify(messages));
} catch (err) {
//noop
}
}
log();
}
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
const errorLogsById = new Map<string, ErrorLog>();
function getErrorLog(id: string = '') {
let errorLog = errorLogsById.get(id);
if (!errorLog) {
errorLog = new ErrorLog(id);
errorLogsById.set(id, errorLog);
}
return errorLog;
}
const buildLogFolder = path.join(path.dirname(path.dirname(__dirname)), '.build');
try {
fs.mkdirSync(path.dirname(buildLogPath));
fs.mkdirSync(buildLogFolder);
} catch (err) {
// ignore
}
function log(): void {
const errors = _.flatten(allErrors);
const seen = new Set<string>();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
fancyLog(`${ansiColors.red('Error')}: ${err}`);
}
});
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(x => x as string[])
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
} catch (err) {
//noop
}
fancyLog(`Finished ${ansiColors.green('compilation')} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - startTime!) + ' ms')}`);
}
export interface IReporter {
(err: string): void;
hasErrors(): boolean;
end(emitError: boolean): NodeJS.ReadWriteStream;
}
export function createReporter(): IReporter {
export function createReporter(id?: string): IReporter {
const errorLog = getErrorLog(id);
const errors: string[] = [];
allErrors.push(errors);
errorLog.allErrors.push(errors);
const result = (err: string) => errors.push(err);
@@ -85,14 +102,14 @@ export function createReporter(): IReporter {
result.end = (emitError: boolean): NodeJS.ReadWriteStream => {
errors.length = 0;
onStart();
errorLog.onStart();
return es.through(undefined, function () {
onEnd();
errorLog.onEnd();
if (emitError && errors.length > 0) {
if (!(errors as any).__logged__) {
log();
errorLog.log();
}
(errors as any).__logged__ = true;

View File

@@ -28,6 +28,7 @@ function writeFile(filePath, contents) {
fs.writeFileSync(filePath, contents);
}
function extractEditor(options) {
var _a;
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString());
let compilerOptions;
if (tsConfig.extends) {
@@ -47,6 +48,12 @@ function extractEditor(options) {
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
// Take the extra included .d.ts files from `tsconfig.monaco.json`
options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile));
// Add extra .d.ts files from `node_modules/@types/`
if (Array.isArray((_a = options.compilerOptions) === null || _a === void 0 ? void 0 : _a.types)) {
options.compilerOptions.types.forEach((type) => {
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
});
}
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {

View File

@@ -55,6 +55,13 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
// Take the extra included .d.ts files from `tsconfig.monaco.json`
options.typings = (<string[]>tsConfig.include).filter(includedFile => /\.d\.ts$/.test(includedFile));
// Add extra .d.ts files from `node_modules/@types/`
if (Array.isArray(options.compilerOptions?.types)) {
options.compilerOptions.types.forEach((type: string) => {
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
});
}
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {

View File

@@ -4,7 +4,7 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.incremental = void 0;
exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.incremental = void 0;
const es = require("event-stream");
const debounce = require("debounce");
const _filter = require("gulp-filter");
@@ -167,6 +167,18 @@ function stripSourceMappingURL() {
return es.duplex(input, output);
}
exports.stripSourceMappingURL = stripSourceMappingURL;
function rewriteSourceMappingURL(sourceMappingURLBase) {
const input = es.through();
const output = input
.pipe(es.mapSync(f => {
const contents = f.contents.toString('utf8');
const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path.dirname(f.relative).replace(/\\/g, '/')}/$1`;
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, str));
return f;
}));
return es.duplex(input, output);
}
exports.rewriteSourceMappingURL = rewriteSourceMappingURL;
function rimraf(dir) {
const result = () => new Promise((c, e) => {
let retries = 0;

View File

@@ -220,6 +220,20 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
return es.duplex(input, output);
}
export function rewriteSourceMappingURL(sourceMappingURLBase: string): NodeJS.ReadWriteStream {
const input = es.through();
const output = input
.pipe(es.mapSync<VinylFile, VinylFile>(f => {
const contents = (<Buffer>f.contents).toString('utf8');
const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path.dirname(f.relative).replace(/\\/g, '/')}/$1`;
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, str));
return f;
}));
return es.duplex(input, output);
}
export function rimraf(dir: string): () => Promise<void> {
const result = () => new Promise<void>((c, e) => {
let retries = 0;

View File

@@ -7,8 +7,8 @@ let err = false;
const majorNodeVersion = parseInt(/^(\d+)\./.exec(process.versions.node)[1]);
if (majorNodeVersion < 10 || majorNodeVersion >= 13) {
console.error('\033[1;31m*** Please use node >=10 and <=12.\033[0;0m');
if (majorNodeVersion < 10 || majorNodeVersion >= 16) {
console.error('\033[1;31m*** Please use node >=10 and <=16.\033[0;0m');
err = true;
}

View File

@@ -13,6 +13,7 @@
"@types/gulp": "^4.0.5",
"@types/gulp-concat": "^0.0.32",
"@types/gulp-filter": "^3.0.32",
"@types/gulp-gzip": "^0.0.31",
"@types/gulp-json-editor": "^2.2.31",
"@types/gulp-rename": "^0.0.33",
"@types/gulp-sourcemaps": "^0.0.32",
@@ -21,7 +22,7 @@
"@types/minimatch": "^3.0.3",
"@types/minimist": "^1.2.0",
"@types/mocha": "2.2.39",
"@types/node": "^10.14.8",
"@types/node": "^12.11.7",
"@types/pump": "^1.0.1",
"@types/request": "^2.47.0",
"@types/rimraf": "^2.0.2",
@@ -31,26 +32,28 @@
"@types/underscore": "^1.8.9",
"@types/xml2js": "0.0.33",
"@typescript-eslint/experimental-utils": "~2.13.0",
"@typescript-eslint/parser": "^2.12.0",
"@typescript-eslint/parser": "^3.3.0",
"applicationinsights": "1.0.8",
"azure-storage": "^2.1.0",
"documentdb": "1.13.0",
"electron-osx-sign": "^0.4.16",
"github-releases": "^0.4.1",
"gulp-azure-storage": "^0.11.1",
"gulp-bom": "^1.0.0",
"gulp-gzip": "^1.4.2",
"gulp-sourcemaps": "^1.11.0",
"gulp-uglify": "^3.0.0",
"iconv-lite-umd": "0.6.8",
"jsonc-parser": "^2.3.0",
"mime": "^1.3.4",
"minimatch": "3.0.4",
"mime": "^1.4.1",
"minimatch": "^3.0.4",
"minimist": "^1.2.3",
"request": "^2.85.0",
"rollup": "^1.20.3",
"rollup-plugin-commonjs": "^10.1.0",
"rollup-plugin-node-resolve": "^5.2.0",
"terser": "4.3.8",
"typescript": "^4.1.0-dev.20200824",
"typescript": "^4.2.0-dev.20201119",
"vsce": "1.48.0",
"vscode-telemetry-extractor": "^1.6.0",
"xml2js": "^0.4.17"
@@ -62,6 +65,6 @@
"npmCheckJs": "tsc --noEmit"
},
"dependencies": {
"@azure/cosmos": "^3.4.0"
"@azure/cosmos": "^3.9.3"
}
}

View File

@@ -112,6 +112,7 @@ Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\.sql\OpenWithProgids"
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; ValueName: ""; ValueData: "{cm:SourceFile,SQL}"; Flags: uninsdeletekey; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; ValueName: "AppUserModelID"; ValueData: "{#AppUserId}"; Flags: uninsdeletekey; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql\DefaultIcon"; ValueType: string; ValueName: ""; ValueData: "{app}\resources\app\resources\win32\sql.ico"; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql\shell\open"; ValueType: string; ValueName: "Icon"; ValueData: """{app}\{#ExeBasename}.exe"""; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""; Tasks: associatewithfiles
; .ipynb

File diff suppressed because it is too large Load Diff