VS Code merge to df8fe74bd55313de0dd2303bc47a4aab0ca56b0e (#17979)

* Merge from vscode 504f934659740e9d41501cad9f162b54d7745ad9

* delete unused folders

* distro

* Bump build node version

* update chokidar

* FIx hygiene errors

* distro

* Fix extension lint issues

* Remove strict-vscode

* Add copyright header exemptions

* Bump vscode-extension-telemetry to fix webpacking issue with zone.js

* distro

* Fix failing tests (revert marked.js back to current one until we decide to update)

* Skip searchmodel test

* Fix mac build

* temp debug script loading

* Try disabling coverage

* log error too

* Revert "log error too"

This reverts commit af0183e5d4ab458fdf44b88fbfab9908d090526f.

* Revert "temp debug script loading"

This reverts commit 3d687d541c76db2c5b55626c78ae448d3c25089c.

* Add comments explaining coverage disabling

* Fix ansi_up loading issue

* Merge latest from ads

* Use newer option

* Fix compile

* add debug logging warn

* Always log stack

* log more

* undo debug

* Update to use correct base path (+cleanup)

* distro

* fix compile errors

* Remove strict-vscode

* Fix sql editors not showing

* Show db dropdown input & fix styling

* Fix more info in gallery

* Fix gallery asset requests

* Delete unused workflow

* Fix tapable resolutions for smoke test compile error

* Fix smoke compile

* Disable crash reporting

* Disable interactive

Co-authored-by: ADS Merger <karlb@microsoft.com>
This commit is contained in:
Charles Gagnon
2022-01-06 09:06:56 -08:00
committed by GitHub
parent fd2736b6a6
commit 2bc6a0cd01
2099 changed files with 79520 additions and 43813 deletions

View File

@@ -30,7 +30,11 @@
],
// Optionally loads a cached yarn install for the repo
"postCreateCommand": ".devcontainer/cache/restore-diff.sh",
"postCreateCommand": ".devcontainer/cache/restore-diff.sh && sudo chown node:node /workspaces",
"remoteUser": "node"
"remoteUser": "node",
"hostRequirements": {
"memory": "6gb"
}
}

View File

@@ -18,3 +18,8 @@
**/extensions/markdown-language-features/notebook-out/**
**/extensions/typescript-basics/test/colorize-fixtures/**
**/extensions/**/dist/**
**/extensions/typescript-language-features/test-workspace/**
# These files are not linted by `yarn eslint`, so we exclude them from being linted in the editor.
# This ensures that if we add new rules and they pass CI, the are also no errors in the editor.
/resources/web/code-web.js

View File

@@ -7,7 +7,8 @@
},
"plugins": [
"@typescript-eslint",
"jsdoc"
"jsdoc",
"header"
],
"rules": {
"constructor-super": "warn",
@@ -133,7 +134,7 @@
"restrictions": [
"vs/nls",
"**/{vs,sql}/base/{common,node}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -175,7 +176,7 @@
"vs/nls",
"**/{vs,sql}/base/{common,node}/**",
"**/{vs,sql}/base/parts/*/{common,node}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -194,7 +195,7 @@
"vs/css!./**/*",
"**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -203,7 +204,7 @@
"vs/nls",
"**/{vs,sql}/base/{common,node,electron-main}/**",
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -251,7 +252,7 @@
"**/{vs,sql}/base/{common,node}/**",
"**/{vs,sql}/base/parts/*/{common,node}/**",
"**/{vs,sql}/platform/*/{common,node}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -273,7 +274,7 @@
"**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/platform/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -284,8 +285,7 @@
"**/{vs,sql}/base/{common,node,electron-main}/**",
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
"**/{vs,sql}/platform/*/{common,node,electron-main}/**",
"**/{vs,sql}/code/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -520,7 +520,7 @@
"**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/services/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -535,7 +535,7 @@
"vs/workbench/contrib/files/browser/editors/fileEditorInput",
"**/{vs,sql}/workbench/services/**",
"**/{vs,sql}/workbench/test/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -589,10 +589,12 @@
"vscode-oniguruma",
"iconv-lite-umd",
"jschardet",
"@vscode/vscode-languagedetection",
"@angular/*",
"rxjs/**",
"sanitize-html",
"ansi_up"
"ansi_up",
"@microsoft/applicationinsights-web"
]
},
{
@@ -605,7 +607,7 @@
"**/{vs,sql}/workbench/{common,node}/**",
"**/{vs,sql}/workbench/api/{common,node}/**",
"**/{vs,sql}/workbench/services/**/{common,node}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -636,7 +638,7 @@
"**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -774,7 +776,7 @@
"**/{vs,sql}/workbench/api/{common,node}/**",
"**/{vs,sql}/workbench/services/**/{common,node}/**",
"**/{vs,sql}/workbench/contrib/**/{common,node}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -807,7 +809,7 @@
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/contrib/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -830,7 +832,7 @@
"**/{vs,sql}/base/parts/**/{common,node}/**",
"**/{vs,sql}/platform/**/{common,node}/**",
"**/{vs,sql}/code/**/{common,node}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -842,7 +844,7 @@
"**/{vs,sql}/base/parts/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/platform/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/code/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -853,7 +855,7 @@
"**/{vs,sql}/base/parts/**/{common,node,electron-main}/**",
"**/{vs,sql}/platform/**/{common,node,electron-main}/**",
"**/{vs,sql}/code/**/{common,node,electron-main}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -865,8 +867,7 @@
"**/{vs,sql}/platform/**/{common,node}/**",
"**/{vs,sql}/workbench/**/{common,node}/**",
"**/{vs,sql}/server/**",
"**/{vs,sql}/code/**/{common,node}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -937,28 +938,28 @@
"target": "**/test/smoke/**",
"restrictions": [
"**/test/smoke/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
"target": "**/test/automation/**",
"restrictions": [
"**/test/automation/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
"target": "**/test/integration/**",
"restrictions": [
"**/test/integration/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
"target": "**/test/monaco/**",
"restrictions": [
"**/test/monaco/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -978,7 +979,7 @@
"target": "**/{node,electron-browser,electron-main}/**/*.test.ts",
"restrictions": [
"**/{vs,sql}/**",
"*", // node modules
"@vscode/*", "*", // node modules
"@angular/*" // {{SQL CARBON EDIT}}
]
},
@@ -986,14 +987,14 @@
"target": "**/{node,electron-browser,electron-main}/**/test/**",
"restrictions": [
"**/{vs,sql}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
"target": "**/test/{node,electron-browser,electron-main}/**",
"restrictions": [
"**/{vs,sql}/**",
"*" // node modules
"@vscode/*", "*" // node modules
]
},
{
@@ -1021,6 +1022,16 @@
"xterm*"
]
}
],
"header/header": [
2,
"block",
[
"---------------------------------------------------------------------------------------------",
" * Copyright (c) Microsoft Corporation. All rights reserved.",
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
" *--------------------------------------------------------------------------------------------"
]
]
},
"overrides": [
@@ -1109,6 +1120,46 @@
}
]
}
},
{
"files": [
"src/{vs,sql}/server/*",
// {{SQL CARBON EDIT}} Ignore our own that don't use our copyright
"extensions/azuremonitor/src/prompts/**",
"extensions/azuremonitor/src/typings/findRemove.d.ts",
"extensions/kusto/src/prompts/**",
"extensions/mssql/src/hdfs/webhdfs.ts",
"extensions/mssql/src/prompts/**",
"extensions/mssql/src/typings/bufferStreamReader.d.ts",
"extensions/mssql/src/typings/findRemove.d.ts",
"extensions/notebook/resources/jupyter_config/**",
"extensions/notebook/src/intellisense/text.ts",
"extensions/notebook/src/prompts/**",
"extensions/resource-deployment/src/typings/linuxReleaseInfo.d.ts",
"src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts",
"src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts",
"src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts",
"src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts",
"src/sql/base/browser/ui/table/plugins/rowDetailView.ts",
"src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts",
"src/sql/workbench/services/notebook/browser/outputs/factories.ts",
"src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts",
"src/sql/workbench/services/notebook/browser/outputs/registry.ts",
"src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts",
"src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts",
"src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts",
"src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts",
"src/sql/workbench/services/notebook/common/nbformat.ts",
"src/sql/workbench/services/notebook/browser/outputs/renderers.ts",
"src/sql/workbench/services/notebook/browser/outputs/tableRenderers.ts"
],
"rules": {
"header/header": [
// hygiene.js still checks that all files (even those in this directory) are MIT licensed.
"off"
]
}
}
]
}

21
.git-blame-ignore Normal file
View File

@@ -0,0 +1,21 @@
# https://git-scm.com/docs/git-blame#Documentation/git-blame.txt---ignore-revs-fileltfilegt
# mjbvz: Fix spacing
13f4f052582bcec3d6c6c6a70d995c9dee2cac13
# mjbvz: Add script to run build with noImplicitOverride
ae1452eea678f5266ef513f22dacebb90955d6c9
# alexdima: Revert "bump version"
537ba0ef1791c090bb18bc68d727816c0451c117
# alexdima: bump version
387a0dcb82df729e316ca2518a9ed81a75482b18
# joaomoreno: add ghooks dev dependency
0dfc06e0f9de5925de792cdf9f0e6597bb25908f
# mjbvz: organize imports
494cbbd02d67e87727ec885f98d19551aa33aad1
a3cb14be7f2cceadb17adf843675b1a59537dbbd
ee1655a82ebdfd38bf8792088a6602c69f7bbd94

3
.gitattributes vendored
View File

@@ -7,4 +7,5 @@ ThirdPartyNotices.txt eol=crlf
*.cmd eol=crlf
*.ps1 eol=lf
*.sh eol=lf
*.rtf -text
*.rtf -text
*.json linguist-language=jsonc

View File

@@ -5,6 +5,8 @@
"greazer",
"donjayamanne",
"jilljac",
"IanMatthewHuff"
"IanMatthewHuff",
"tanhakabir",
"dynamicwebpaige"
]
}

View File

@@ -12,73 +12,73 @@ on:
jobs:
windows:
name: Windows
runs-on: windows-latest
timeout-minutes: 30
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2.2.0
name: Windows
runs-on: windows-latest
timeout-minutes: 30
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 12
- uses: actions/setup-node@v2
with:
node-version: 14
- uses: actions/setup-python@v2
with:
python-version: "2.x"
- uses: actions/setup-python@v2
with:
python-version: "2.x"
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
# id: nodeModulesCacheKey
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
# - name: Cache node_modules archive
# id: cacheNodeModules
# uses: actions/cache@v2
# with:
# path: ".build/node_modules_cache"
# key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
# - name: Extract node_modules archive
# if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
# run: 7z.exe x .build/node_modules_cache/cache.7z -aos
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: echo "::set-output name=dir::$(yarn cache dir)"
# - name: Cache yarn directory
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# uses: actions/cache@v2
# with:
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir-
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
# id: nodeModulesCacheKey
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
# - name: Cache node_modules archive
# id: cacheNodeModules
# uses: actions/cache@v2
# with:
# path: ".build/node_modules_cache"
# key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
# - name: Extract node_modules archive
# if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
# run: 7z.exe x .build/node_modules_cache/cache.7z -aos
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: echo "::set-output name=dir::$(yarn cache dir)"
# - name: Cache yarn directory
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# uses: actions/cache@v2
# with:
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
# - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: |
# mkdir -Force .build
# node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
# mkdir -Force .build/node_modules_cache
# 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
- name: Execute yarn
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
# - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: |
# mkdir -Force .build
# node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
# mkdir -Force .build/node_modules_cache
# 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
- name: Run Unit Tests (Electron)
run: .\scripts\test.bat
- name: Run Unit Tests (Electron)
run: .\scripts\test.bat
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now
# run: yarn test-browser --browser chromium
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now
# run: yarn test-browser --browser chromium
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} disable for now
# run: .\scripts\test-integration.bat
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} disable for now
# run: .\scripts\test-integration.bat
linux:
name: Linux
@@ -101,7 +101,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: 12
node-version: 14
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
# id: nodeModulesCacheKey
@@ -111,8 +111,8 @@ jobs:
# uses: actions/cache@v2
# with:
# path: "**/node_modules"
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
# key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules14-
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -136,7 +136,7 @@ jobs:
- name: Run Unit Tests (Electron)
id: electron-unit-tests
run: DISPLAY=:10 ./scripts/test.sh --coverage --runGlob "**/sql/**/*.test.js" # {{SQL CARBON EDIT}} Run only our tests with coverage
run: DISPLAY=:10 ./scripts/test.sh --runGlob "**/sql/**/*.test.js" # {{SQL CARBON EDIT}} Run only our tests with coverage. Disable for now since it's currently broken --coverage
- name: Run Extension Unit Tests (Electron)
id: electron-extension-unit-tests
@@ -170,7 +170,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: 12
node-version: 14
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
@@ -181,8 +181,8 @@ jobs:
# uses: actions/cache@v2
# with:
# path: "**/node_modules"
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
# key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules14-
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -195,7 +195,7 @@ jobs:
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skip caching for now
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
@@ -232,7 +232,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: 12
node-version: 14
- name: Compute node modules cache key
id: nodeModulesCacheKey
@@ -242,7 +242,8 @@ jobs:
uses: actions/cache@v2
with:
path: "**/node_modules"
key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-cacheNodeModules14-
- name: Get yarn cache directory path
id: yarnCacheDirPath
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -272,15 +273,23 @@ jobs:
- name: Run Valid Layers Checks
run: yarn valid-layers-check
- name: Run Strict Compile Options # {{SQL CARBON EDIT}} add step
run: yarn strict-vscode
# - name: Run Monaco Editor Checks {{SQL CARBON EDIT}} Remove Monaco checks
# run: yarn monaco-compile-check
- name: Compile /build/
run: yarn --cwd build compile
- name: Run eslint
run: yarn eslint
# {{SQL CARBON EDIT}} Don't need this
# - name: Run Monaco Editor Checks
# run: yarn monaco-compile-check
# {{SQL CARBON EDIT}} Don't need this
# - name: Run vscode-dts Compile Checks
# run: yarn vscode-dts-compile-check
- name: Run Trusted Types Checks
run: yarn tsec-compile-check

View File

@@ -0,0 +1,28 @@
name: Create Prebuild
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *'
jobs:
createPrebuild:
runs-on: ubuntu-latest
steps:
- id: create-prebuild-production
run: |
$splat = @{
ErrorAction = 'Stop'
Uri = 'https://api.github.com/vscs_internal/user/vscode-prebuilds-bot/codespaces/prebuild'
Method = 'POST'
Headers = @{
'Content-Type' = 'application/json; charset=utf-8'
'Authorization' = 'token ${{ secrets.CODESPACES_PREBUILD_PAT }}'
}
Body = @{
ref = 'main'
repository_id = 41881900
location = 'WestUs2'
} | ConvertTo-Json
}
Invoke-RestMethod @splat
shell: pwsh

3
.gitignore vendored
View File

@@ -15,3 +15,6 @@ test_data/
test-results/
yarn-error.log
*.vsix
vscode.lsif
vscode.db
/.profile-oss

6
.lsifrc.json Normal file
View File

@@ -0,0 +1,6 @@
{
"project": "src/tsconfig.json",
"source": "./package.json",
"package": "package.json",
"out": "vscode.lsif"
}

2
.vscode/launch.json vendored
View File

@@ -48,7 +48,7 @@
"cascadeTerminateToConfigurations": [
"Attach to Extension Host"
],
"userDataDir": false,
"userDataDir": "${workspaceFolder}/.profile-oss",
"pauseForSourceMap": false,
"outFiles": [
"${workspaceFolder}/out/**/*.js"

View File

@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"June 2021\""
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"August 2021\""
},
{
"kind": 1,

View File

@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\""
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"July 2021\""
},
{
"kind": 1,

View File

@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\"\n\n$MINE=assignee:@me"
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub repo:microsoft/vscode-emmet-helper\n\n$MILESTONE=milestone:\"July 2021\"\n\n$MINE=assignee:@me"
},
{
"kind": 1,
@@ -62,7 +62,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed"
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed -label:verified"
},
{
"kind": 1,

View File

@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"June 2021\""
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"August 2021\""
},
{
"kind": 1,

View File

@@ -2,55 +2,46 @@
{
"kind": 1,
"language": "markdown",
"value": "### Bug Verification Queries\n\nBefore shipping we want to verify _all_ bugs. That means when a bug is fixed we check that the fix actually works. It's always best to start with bugs that you have filed and the proceed with bugs that have been filed from users outside the development team. ",
"editable": true
"value": "### Bug Verification Queries\n\nBefore shipping we want to verify _all_ bugs. That means when a bug is fixed we check that the fix actually works. It's always best to start with bugs that you have filed and the proceed with bugs that have been filed from users outside the development team. "
},
{
"kind": 1,
"language": "markdown",
"value": "#### Config: update list of `repos` and the `milestone`",
"editable": true
"value": "#### Config: update list of `repos` and the `milestone`"
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"March 2021\"",
"editable": true
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-jupyter repo:microsoft/vscode-python\r\n$milestone=milestone:\"July 2021\""
},
{
"kind": 1,
"language": "markdown",
"value": "### Bugs You Filed",
"editable": true
"value": "### Bugs You Filed"
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate author:@me",
"editable": false
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate author:@me"
},
{
"kind": 1,
"language": "markdown",
"value": "### Bugs From Outside",
"editable": true
"value": "### Bugs From Outside"
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh",
"editable": false
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh -author:hediet -author:joyceerhl -author:rchiodo -author:IanMatthewHuff"
},
{
"kind": 1,
"language": "markdown",
"value": "### All",
"editable": true
"value": "### All"
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate",
"editable": false
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate"
}
]

View File

@@ -4,6 +4,7 @@
"files.exclude": {
".git": true,
".build": true,
".profile-oss": true,
"**/.DS_Store": true,
"build/**/*.js": {
"when": "$(basename).ts"

14
.vscode/tasks.json vendored
View File

@@ -100,20 +100,6 @@
"group": "build",
"problemMatcher": []
},
{
"type": "npm",
"script": "strict-vscode-watch",
"label": "TS - Strict VSCode",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-vscode",
"applyTo": "allDocuments"
}
},
{
"type": "npm",
"script": "watch-webd",

View File

@@ -1,3 +1,3 @@
disturl "https://electronjs.org/headers"
target "12.0.9"
target "13.1.8"
runtime "electron"

View File

@@ -8,149 +8,150 @@ The original copyright notices and the licenses under which Microsoft received
such components are set forth below. Microsoft reserves all rights not
expressly granted herein, whether by implication, estoppel or otherwise.
angular: https://github.com/angular/angular
angular2-grid: https://github.com/BTMorton/angular2-grid
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
axios: https://github.com/axios/axios
bootstrap: https://github.com/twbs/bootstrap
chart.js: https://github.com/Timer/chartjs
chokidar: https://github.com/paulmillr/chokidar
comment-json: https://github.com/kaelzhang/node-comment-json
core-js: https://github.com/zloirock/core-js
decompress: https://github.com/kevva/decompress
emmet: https://github.com/emmetio/emmet
error-ex: https://github.com/Qix-/node-error-ex
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
fast-plist: https://github.com/Microsoft/node-fast-plist
figures: https://github.com/sindresorhus/figures
find-remove: https://www.npmjs.com/package/find-remove
fs-extra: https://github.com/jprichardson/node-fs-extra
gc-signals: https://github.com/Microsoft/node-gc-signals
getmac: https://github.com/bevry/getmac
graceful-fs: https://github.com/isaacs/node-graceful-fs
gridstack: https://github.com/gridstack/gridstack.js
html-query-plan: https://github.com/JustinPealing/html-query-plan
http-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
https-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
iconv-lite: https://github.com/ashtuchkin/iconv-lite
jquery: https://github.com/jquery/jquery
jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab
keytar: https://github.com/atom/node-keytar
make-error: https://github.com/JsCommunity/make-error
mark.js: https://github.com/julmot/mark.js
minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment
native-keymap: https://github.com/Microsoft/node-native-keymap
native-watchdog: https://github.com/Microsoft/node-native-watchdog
ng2-charts: https://github.com/valor-software/ng2-charts
node-fetch: https://github.com/bitinn/node-fetch
node-pty: https://github.com/Tyriar/node-pty
nsfw: https://github.com/Axosoft/nsfw
optimist: https://github.com/substack/node-optimist
primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js
pyzmq: https://github.com/zeromq/pyzmq
qs: https://github.com/ljharb/qs
reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS
semver: https://github.com/npm/node-semver
slickgrid: https://github.com/6pac/SlickGrid
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
svg.js: https://github.com/svgdotjs/svg.js
systemjs: https://github.com/systemjs/systemjs
temp-write: https://github.com/sindresorhus/temp-write
turndown: https://github.com/domchristie/turndown
turndown-plugin-gfm: https://github.com/domchristie/turndown-plugin-gfm
underscore: https://github.com/jashkenas/underscore
v8-profiler: https://github.com/node-inspector/v8-profiler
vscode: https://github.com/microsoft/vscode
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
vscode-nls: https://github.com/Microsoft/vscode-nls
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
vscode-textmate: https://github.com/Microsoft/vscode-textmate
winreg: https://github.com/fresc81/node-winreg
xmldom: https://github.com/xmldom/xmldom
xml-formatter: https://github.com/chrisbottin/xml-formatter
xterm: https://github.com/sourcelair/xterm.js
yargs: https://github.com/yargs/yargs
yauzl: https://github.com/thejoshwolfe/yauzl
zone.js: https://www.npmjs.com/package/zone
angular: https://github.com/angular/angular
angular2-grid: https://github.com/BTMorton/angular2-grid
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
axios: https://github.com/axios/axios
bootstrap: https://github.com/twbs/bootstrap
chart.js: https://github.com/Timer/chartjs
chokidar: https://github.com/paulmillr/chokidar
comment-json: https://github.com/kaelzhang/node-comment-json
core-js: https://github.com/zloirock/core-js
decompress: https://github.com/kevva/decompress
emmet: https://github.com/emmetio/emmet
error-ex: https://github.com/Qix-/node-error-ex
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
fast-plist: https://github.com/Microsoft/node-fast-plist
figures: https://github.com/sindresorhus/figures
find-remove: https://www.npmjs.com/package/find-remove
fs-extra: https://github.com/jprichardson/node-fs-extra
gc-signals: https://github.com/Microsoft/node-gc-signals
getmac: https://github.com/bevry/getmac
graceful-fs: https://github.com/isaacs/node-graceful-fs
gridstack: https://github.com/gridstack/gridstack.js
html-query-plan: https://github.com/JustinPealing/html-query-plan
http-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
https-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
iconv-lite: https://github.com/ashtuchkin/iconv-lite
jquery: https://github.com/jquery/jquery
jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab
keytar: https://github.com/atom/node-keytar
make-error: https://github.com/JsCommunity/make-error
mark.js: https://github.com/julmot/mark.js
minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment
native-keymap: https://github.com/Microsoft/node-native-keymap
native-watchdog: https://github.com/Microsoft/node-native-watchdog
ng2-charts: https://github.com/valor-software/ng2-charts
node-fetch: https://github.com/bitinn/node-fetch
node-pty: https://github.com/Tyriar/node-pty
nsfw: https://github.com/Axosoft/nsfw
optimist: https://github.com/substack/node-optimist
primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js
pyzmq: https://github.com/zeromq/pyzmq
qs: https://github.com/ljharb/qs
reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS
semver: https://github.com/npm/node-semver
slickgrid: https://github.com/6pac/SlickGrid
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
svg.js: https://github.com/svgdotjs/svg.js
systemjs: https://github.com/systemjs/systemjs
temp-write: https://github.com/sindresorhus/temp-write
turndown: https://github.com/domchristie/turndown
turndown-plugin-gfm: https://github.com/domchristie/turndown-plugin-gfm
underscore: https://github.com/jashkenas/underscore
v8-profiler: https://github.com/node-inspector/v8-profiler
vscode: https://github.com/microsoft/vscode
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
vscode-nls: https://github.com/Microsoft/vscode-nls
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
vscode-textmate: https://github.com/Microsoft/vscode-textmate
winreg: https://github.com/fresc81/node-winreg
xmldom: https://github.com/xmldom/xmldom
xml-formatter: https://github.com/chrisbottin/xml-formatter
xterm: https://github.com/sourcelair/xterm.js
yargs: https://github.com/yargs/yargs
yauzl: https://github.com/thejoshwolfe/yauzl
zone.js: https://www.npmjs.com/package/zone
Microsoft PROSE SDK: https://microsoft.github.io/prose
Microsoft PROSE SDK: https://microsoft.github.io/prose
atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure)
atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script)
atom/language-css version 0.44.4 (https://github.com/atom/language-css)
atom/language-java version 0.32.1 (https://github.com/atom/language-java)
atom/language-sass version 0.62.1 (https://github.com/atom/language-sass)
atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
better-go-syntax version 1.0.0 (https://github.com/jeff-hykin/better-go-syntax/ )
Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars)
dart-lang/dart-syntax-highlight (https://github.com/dart-lang/dart-syntax-highlight)
davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle)
definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml)
Document Object Model version 4.0.0 (https://www.w3.org/DOM/)
dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage)
expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation)
fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle)
freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
Ikuyadeu/vscode-R version 1.3.0 (https://github.com/Ikuyadeu/vscode-R)
insane version 2.6.2 (https://github.com/bevacqua/insane)
Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site)
ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar)
jeff-hykin/cpp-textmate-grammar version 1.15.5 (https://github.com/jeff-hykin/cpp-textmate-grammar)
js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
JuliaEditorSupport/atom-language-julia version 0.21.0 (https://github.com/JuliaEditorSupport/atom-language-julia)
Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
language-docker (https://github.com/moby/moby)
language-less version 0.34.2 (https://github.com/atom/language-less)
language-php version 0.46.2 (https://github.com/atom/language-php)
MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
marked version 1.1.0 (https://github.com/markedjs/marked)
mdn-data version 1.1.12 (https://github.com/mdn/data)
microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage)
microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage)
microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar)
microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql)
mmims/language-batchfile version 0.7.6 (https://github.com/mmims/language-batchfile)
NVIDIA/cuda-cpp-grammar (https://github.com/NVIDIA/cuda-cpp-grammar)
PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
rust-syntax version 0.4.3 (https://github.com/dustypomerleau/rust-syntax)
seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
TypeScript-TmLanguage version 0.1.8 (https://github.com/microsoft/TypeScript-TmLanguage)
TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage)
Unicode version 12.0.0 (https://home.unicode.org/)
vscode-codicons version 0.0.14 (https://github.com/microsoft/vscode-codicons)
vscode-logfile-highlighter version 2.11.0 (https://github.com/emilast/vscode-logfile-highlighter)
vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
Web Background Synchronization (https://github.com/WICG/background-sync)
1. atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure)
2. atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script)
3. atom/language-css version 0.44.6 (https://github.com/atom/language-css)
4. atom/language-java version 0.32.1 (https://github.com/atom/language-java)
5. atom/language-sass version 0.62.1 (https://github.com/atom/language-sass)
6. atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
7. atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
8. better-go-syntax version 1.0.0 (https://github.com/jeff-hykin/better-go-syntax/ )
9. Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
10. daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars)
11. dart-lang/dart-syntax-highlight (https://github.com/dart-lang/dart-syntax-highlight)
12. davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle)
13. definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
14. demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml)
15. Document Object Model version 4.0.0 (https://www.w3.org/DOM/)
16. dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage)
17. expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation)
18. fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle)
19. freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
20. HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
21. Ikuyadeu/vscode-R version 2.0.0 (https://github.com/Ikuyadeu/vscode-R)
22. insane version 2.6.2 (https://github.com/bevacqua/insane)
23. Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site)
24. ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
25. James-Yu/LaTeX-Workshop version 8.19.1 (https://github.com/James-Yu/LaTeX-Workshop)
26. jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar)
27. jeff-hykin/cpp-textmate-grammar version 1.15.5 (https://github.com/jeff-hykin/cpp-textmate-grammar)
28. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
29. JuliaEditorSupport/atom-language-julia version 0.21.1 (https://github.com/JuliaEditorSupport/atom-language-julia)
30. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
31. language-docker (https://github.com/moby/moby)
32. language-less version 0.34.2 (https://github.com/atom/language-less)
33. language-php version 0.46.2 (https://github.com/atom/language-php)
34. MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
35. marked version 1.1.0 (https://github.com/markedjs/marked)
36. mdn-data version 1.1.12 (https://github.com/mdn/data)
37. microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage)
38. microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage)
39. microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar)
40. microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql)
41. mmims/language-batchfile version 0.7.6 (https://github.com/mmims/language-batchfile)
42. NVIDIA/cuda-cpp-grammar (https://github.com/NVIDIA/cuda-cpp-grammar)
43. PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
44. rust-syntax version 0.5.0 (https://github.com/dustypomerleau/rust-syntax)
45. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
46. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
47. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
48. textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
49. textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
50. textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
51. textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
52. textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
53. textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
54. textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
55. textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
56. textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
57. textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
58. textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
59. textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
60. TypeScript-TmLanguage version 0.1.8 (https://github.com/microsoft/TypeScript-TmLanguage)
61. TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage)
62. Unicode version 12.0.0 (https://home.unicode.org/)
63. vscode-codicons version 0.0.14 (https://github.com/microsoft/vscode-codicons)
64. vscode-logfile-highlighter version 2.11.0 (https://github.com/emilast/vscode-logfile-highlighter)
65. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
66. Web Background Synchronization (https://github.com/WICG/background-sync)
%% atom/language-clojure NOTICES AND INFORMATION BEGIN HERE
@@ -1465,7 +1466,7 @@ END OF make-error NOTICES AND INFORMATION
=========================================
The MIT License (MIT)
Copyright (c) 20142019 Julian Kühnel
Copyright (c) 2021 REditorSupport
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -12,14 +12,14 @@ fsevents/src/**
fsevents/test/**
!fsevents/**/*.node
vscode-sqlite3/binding.gyp
vscode-sqlite3/benchmark/**
vscode-sqlite3/cloudformation/**
vscode-sqlite3/deps/**
vscode-sqlite3/test/**
vscode-sqlite3/build/**
vscode-sqlite3/src/**
!vscode-sqlite3/build/Release/*.node
@vscode/sqlite3/binding.gyp
@vscode/sqlite3/benchmark/**
@vscode/sqlite3/cloudformation/**
@vscode/sqlite3/deps/**
@vscode/sqlite3/test/**
@vscode/sqlite3/build/**
@vscode/sqlite3/src/**
!@vscode/sqlite3/build/Release/*.node
windows-mutex/binding.gyp
windows-mutex/build/**

View File

@@ -29,3 +29,6 @@ xterm-addon-unicode11/out/**
xterm-addon-webgl/src/**
xterm-addon-webgl/out/**
# This makes sure the model is included in the package
!@vscode/vscode-languagedetection/model/**

View File

@@ -0,0 +1,21 @@
{
"codebaseName": "vscode-client",
"ppe": false,
"notificationAliases": [
"sbatten@microsoft.com"
],
"codebaseAdmins": [
"REDMOND\\stbatt",
"REDMOND\\monacotools",
],
"instanceUrl": "https://msazure.visualstudio.com/defaultcollection",
"projectName": "One",
"areaPath": "One\\VSCode\\Client",
"iterationPath": "One",
"notifyAlways": true,
"tools": [
"BinSkim",
"CredScan",
"CodeQL"
]
}

View File

@@ -160,7 +160,7 @@ async function main() {
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
await retry_1.retry(() => Promise.all([
await (0, retry_1.retry)(() => Promise.all([
uploadBlob(blobService, quality, blobName, filePath, fileName),
uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName)
]));
@@ -185,7 +185,7 @@ async function main() {
console.log('Asset:', JSON.stringify(asset, null, ' '));
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
await (0, retry_1.retry)(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
console.log(` Done ✔️`);
}
main().then(() => {

View File

@@ -40,7 +40,7 @@ async function main() {
};
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
}
main().then(() => {
console.log('Build successfully created');

View File

@@ -4,11 +4,9 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const retry_1 = require("./retry");
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
const playwrightPath = path.dirname(require.resolve('playwright'));
async function install() {
await retry_1.retry(() => installBrowsersWithProgressBar(playwrightPath));
await (0, retry_1.retry)(() => installBrowsersWithProgressBar());
}
install();

View File

@@ -1,71 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const azure = require("azure-storage");
const mime = require("mime");
const minimist = require("minimist");
const path_1 = require("path");
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService, container) {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService, container, blobName) {
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService, container, blobName, file) {
const blobOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit, files) {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = path_1.basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main() {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => path_1.join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -1,9 +0,0 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
# Publish webview contents
PACKAGEJSON="$REPO/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"

View File

@@ -1,87 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { basename, join } from 'path';
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
await new Promise<void>((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit: string, files: readonly string[]): Promise<void> {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main(): void {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -39,7 +39,7 @@ async function main() {
}
console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
}
main().then(() => {
console.log('Build successfully released');

View File

@@ -0,0 +1,17 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const sign_1 = require("./sign");
const path = require("path");
(0, sign_1.main)([
process.env['EsrpCliDllPath'],
'windows',
process.env['ESRPPKI'],
process.env['ESRPAADUsername'],
process.env['ESRPAADPassword'],
path.dirname(process.argv[2]),
path.basename(process.argv[2])
]);

View File

@@ -0,0 +1,17 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { main } from './sign';
import * as path from 'path';
main([
process.env['EsrpCliDllPath']!,
'windows',
process.env['ESRPPKI']!,
process.env['ESRPAADUsername']!,
process.env['ESRPAADPassword']!,
path.dirname(process.argv[2]),
path.basename(process.argv[2])
]);

View File

@@ -0,0 +1,77 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.main = void 0;
const cp = require("child_process");
const fs = require("fs");
const tmp = require("tmp");
const crypto = require("crypto");
function getParams(type) {
switch (type) {
case 'windows':
return '[{"keyCode":"CP-230012","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"Append","parameterValue":"/as"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-230012","operationSetCode":"SigntoolVerify","parameters":[{"parameterName":"VerifyAll","parameterValue":"/all"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'rpm':
return '[{ "keyCode": "CP-450779-Pgp", "operationSetCode": "LinuxSign", "parameters": [], "toolName": "sign", "toolVersion": "1.0" }]';
case 'darwin-sign':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppDeveloperSign","parameters":[{"parameterName":"Hardening","parameterValue":"--options=runtime"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'darwin-notarize':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppNotarize","parameters":[{"parameterName":"BundleId","parameterValue":"$(BundleIdentifier)"}],"toolName":"sign","toolVersion":"1.0"}]';
default:
throw new Error(`Sign type ${type} not found`);
}
}
function main([esrpCliPath, type, cert, username, password, folderPath, pattern]) {
tmp.setGracefulCleanup();
const patternPath = tmp.tmpNameSync();
fs.writeFileSync(patternPath, pattern);
const paramsPath = tmp.tmpNameSync();
fs.writeFileSync(paramsPath, getParams(type));
const keyFile = tmp.tmpNameSync();
const key = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
fs.writeFileSync(keyFile, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') }));
const clientkeyPath = tmp.tmpNameSync();
const clientkeyCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientkey = clientkeyCypher.update(password, 'utf8', 'hex');
clientkey += clientkeyCypher.final('hex');
fs.writeFileSync(clientkeyPath, clientkey);
const clientcertPath = tmp.tmpNameSync();
const clientcertCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientcert = clientcertCypher.update(cert, 'utf8', 'hex');
clientcert += clientcertCypher.final('hex');
fs.writeFileSync(clientcertPath, clientcert);
const args = [
esrpCliPath,
'vsts.sign',
'-a', username,
'-k', clientkeyPath,
'-z', clientcertPath,
'-f', folderPath,
'-p', patternPath,
'-u', 'false',
'-x', 'regularSigning',
'-b', 'input.json',
'-l', 'AzSecPack_PublisherPolicyProd.xml',
'-y', 'inlineSignParams',
'-j', paramsPath,
'-c', '9997',
'-t', '120',
'-g', '10',
'-v', 'Tls12',
'-s', 'https://api.esrp.microsoft.com/api/v1',
'-m', '0',
'-o', 'Microsoft',
'-i', 'https://www.microsoft.com',
'-n', '5',
'-r', 'true',
'-e', keyFile,
];
cp.spawnSync('dotnet', args, { stdio: 'inherit' });
}
exports.main = main;
if (require.main === module) {
main(process.argv.slice(2));
}

View File

@@ -0,0 +1,84 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as cp from 'child_process';
import * as fs from 'fs';
import * as tmp from 'tmp';
import * as crypto from 'crypto';
function getParams(type: string): string {
switch (type) {
case 'windows':
return '[{"keyCode":"CP-230012","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"Append","parameterValue":"/as"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-230012","operationSetCode":"SigntoolVerify","parameters":[{"parameterName":"VerifyAll","parameterValue":"/all"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'rpm':
return '[{ "keyCode": "CP-450779-Pgp", "operationSetCode": "LinuxSign", "parameters": [], "toolName": "sign", "toolVersion": "1.0" }]';
case 'darwin-sign':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppDeveloperSign","parameters":[{"parameterName":"Hardening","parameterValue":"--options=runtime"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'darwin-notarize':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppNotarize","parameters":[{"parameterName":"BundleId","parameterValue":"$(BundleIdentifier)"}],"toolName":"sign","toolVersion":"1.0"}]';
default:
throw new Error(`Sign type ${type} not found`);
}
}
export function main([esrpCliPath, type, cert, username, password, folderPath, pattern]: string[]) {
tmp.setGracefulCleanup();
const patternPath = tmp.tmpNameSync();
fs.writeFileSync(patternPath, pattern);
const paramsPath = tmp.tmpNameSync();
fs.writeFileSync(paramsPath, getParams(type));
const keyFile = tmp.tmpNameSync();
const key = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
fs.writeFileSync(keyFile, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') }));
const clientkeyPath = tmp.tmpNameSync();
const clientkeyCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientkey = clientkeyCypher.update(password, 'utf8', 'hex');
clientkey += clientkeyCypher.final('hex');
fs.writeFileSync(clientkeyPath, clientkey);
const clientcertPath = tmp.tmpNameSync();
const clientcertCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientcert = clientcertCypher.update(cert, 'utf8', 'hex');
clientcert += clientcertCypher.final('hex');
fs.writeFileSync(clientcertPath, clientcert);
const args = [
esrpCliPath,
'vsts.sign',
'-a', username,
'-k', clientkeyPath,
'-z', clientcertPath,
'-f', folderPath,
'-p', patternPath,
'-u', 'false',
'-x', 'regularSigning',
'-b', 'input.json',
'-l', 'AzSecPack_PublisherPolicyProd.xml',
'-y', 'inlineSignParams',
'-j', paramsPath,
'-c', '9997',
'-t', '120',
'-g', '10',
'-v', 'Tls12',
'-s', 'https://api.esrp.microsoft.com/api/v1',
'-m', '0',
'-o', 'Microsoft',
'-i', 'https://www.microsoft.com',
'-n', '5',
'-r', 'true',
'-e', keyFile,
];
cp.spawnSync('dotnet', args, { stdio: 'inherit' });
}
if (require.main === module) {
main(process.argv.slice(2));
}

View File

@@ -1,39 +1,39 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
displayName: Download Electron
- script: |
yarn electron x64
displayName: Download Electron
# - script: | {{SQL CARBON EDIT}} remove editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
# - script: | {{SQL CARBON EDIT}} remove editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
@@ -43,21 +43,21 @@ steps:
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
# displayName: Run Unit Tests (Browser)
# - script: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
# displayName: Run Unit Tests (Browser)
# - script: | {{SQL CARBON EDIT}} disable
# ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable
# ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
- task: PublishPipelineArtifact@0
inputs:

View File

@@ -8,6 +8,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- script: |
set -e
@@ -27,12 +28,10 @@ steps:
displayName: Merge distro
- script: |
pushd build \
&& yarn \
&& npm install -g typescript \
&& tsc azure-pipelines/common/createAsset.ts \
&& popd
displayName: Restore modules for just build folder and compile it
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
- download: current
artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
@@ -44,28 +43,16 @@ steps:
mv $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Unzip & move
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
- task: UseDotNet@2
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [
{
"parameterName": "Hardening",
"parameterValue": "--options=runtime"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
version: 2.x
- task: EsrpClientTool@1
displayName: Download ESRPClient
- script: |
set -e
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-sign $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(agent.builddirectory) VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Codesign
- script: |
@@ -75,29 +62,10 @@ steps:
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppNotarize",
"parameters": [
{
"parameterName": "BundleId",
"parameterValue": "$(BundleIdentifier)"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Notarization
- script: |
set -e
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-notarize $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(agent.builddirectory) VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Notarize
- script: |
set -e

View File

@@ -8,6 +8,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password,macos-developer-certificate,macos-developer-certificate-key,ticino-storage-key'
- task: DownloadPipelineArtifact@2
inputs:
@@ -54,7 +55,7 @@ steps:
- task: Cache@2
inputs:
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
key: "nodeModules | $(Agent.OS) | .build/yarnlockhash"
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
@@ -97,6 +98,7 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -185,12 +187,6 @@ steps:
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
@@ -224,17 +220,11 @@ steps:
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/smoke compile
displayName: Compile smoke tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME"
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -244,7 +234,7 @@ steps:
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --remote
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --remote --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5
displayName: Run smoke tests (Remote)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -265,6 +255,14 @@ steps:
continueOnError: true
condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: logs-macos-$(VSCODE_ARCH)-$(System.JobAttempt)
targetPath: .build/logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:

View File

@@ -17,7 +17,7 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -106,7 +106,7 @@ steps:
- script: |
set -e
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests"
./scripts/test.sh --build --tfs "Unit Tests" # Disable code coverage since it's currently broken --coverage
displayName: Run unit tests
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))

View File

@@ -18,6 +18,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password'
- script: |
set -e

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:

View File

@@ -18,6 +18,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password'
- script: |
set -e

View File

@@ -10,7 +10,7 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -22,7 +22,7 @@ steps:
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
@@ -34,7 +34,7 @@ steps:
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
@@ -49,10 +49,6 @@ steps:
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-vscode
displayName: Run Strict Null Check
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks

View File

@@ -12,6 +12,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password'
- task: DownloadPipelineArtifact@2
inputs:
@@ -88,6 +89,7 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -12,6 +12,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,builds-docdb-key-readwrite,vscode-storage-key,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- task: DownloadPipelineArtifact@2
inputs:
@@ -48,7 +49,7 @@ steps:
- task: Cache@2
inputs:
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
key: "nodeModules | $(Agent.OS) | .build/yarnlockhash"
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
@@ -66,14 +67,32 @@ steps:
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
- script: |
set -e
export npm_config_arch=$(NPM_ARCH)
export npm_config_build_from_source=true
if [ -z "$CC" ] || [ -z "$CXX" ]; then
export CC=$(which gcc-5)
export CXX=$(which g++-5)
# Download clang based on chromium revision used by vscode
curl -s https://raw.githubusercontent.com/chromium/chromium/91.0.4472.164/tools/clang/scripts/update.py | python - --output-dir=$PWD/.build/CR_Clang --host-os=linux
# Download libcxx headers and objects from upstream electron releases
DEBUG=libcxx-fetcher \
VSCODE_LIBCXX_OBJECTS_DIR=$PWD/.build/libcxx-objects \
VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \
VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \
VSCODE_ARCH="$(NPM_ARCH)" \
node build/linux/libcxx-fetcher.js
# Set compiler toolchain
export CC=$PWD/.build/CR_Clang/bin/clang
export CXX=$PWD/.build/CR_Clang/bin/clang++
export CXXFLAGS="-nostdinc++ -D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS -isystem$PWD/.build/libcxx_headers/include -isystem$PWD/.build/libcxxabi_headers/include -fPIC -flto=thin -fsplit-lto-unit"
export LDFLAGS="-stdlib=libc++ -fuse-ld=lld -flto=thin -fsplit-lto-unit -L$PWD/.build/libcxx-objects -lc++abi"
fi
if [ "$VSCODE_ARCH" == "x64" ]; then
@@ -92,6 +111,7 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -127,28 +147,33 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
displayName: Download Electron and Playwright
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
ELECTRON_ROOT=.build/electron
sudo chown root $APP_ROOT/chrome-sandbox
sudo chown root $ELECTRON_ROOT/chrome-sandbox
sudo chmod 4755 $APP_ROOT/chrome-sandbox
sudo chmod 4755 $ELECTRON_ROOT/chrome-sandbox
stat $APP_ROOT/chrome-sandbox
stat $ELECTRON_ROOT/chrome-sandbox
displayName: Change setuid helper binary permission
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
@@ -159,7 +184,7 @@ steps:
INTEGRATION_TEST_APP_NAME="$APP_NAME" \
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -167,7 +192,7 @@ steps:
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
./resources/server/test/test-web-integration.sh --browser chromium
displayName: Run integration tests (Browser)
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -179,19 +204,52 @@ steps:
INTEGRATION_TEST_APP_NAME="$APP_NAME" \
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_PATH=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
yarn smoketest-no-compile --build "$APP_PATH" --electronArgs="--disable-dev-shm-usage --use-gl=swiftshader" --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_PATH=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
yarn smoketest-no-compile --build "$APP_PATH" --remote --electronArgs="--disable-dev-shm-usage --use-gl=swiftshader" --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5
displayName: Run smoke tests (Remote)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
yarn smoketest-no-compile --web --headless --electronArgs="--disable-dev-shm-usage --use-gl=swiftshader"
timeoutInMinutes: 5
displayName: Run smoke tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: "crash-dump-linux-$(VSCODE_ARCH)"
artifactName: crash-dump-linux-$(VSCODE_ARCH)
targetPath: .build/crashes
displayName: "Publish Crash Reports"
continueOnError: true
condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: logs-linux-$(VSCODE_ARCH)-$(System.JobAttempt)
targetPath: .build/logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
@@ -212,30 +270,25 @@ steps:
displayName: Prepare snap package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
# needed for code signing
- task: UseDotNet@2
displayName: "Install .NET Core SDK 2.x"
inputs:
version: 2.x
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: ".build/linux/rpm"
Pattern: "*.rpm"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-450779-Pgp",
"operationSetCode": "LinuxSign",
"parameters": [ ],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- task: EsrpClientTool@1
displayName: Download ESRPClient
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" rpm $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) .build/linux/rpm '*.rpm'
displayName: Codesign rpm
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))

View File

@@ -7,12 +7,6 @@ steps:
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: DownloadPipelineArtifact@0
displayName: "Download Pipeline Artifact"
inputs:

View File

@@ -4,7 +4,7 @@ parameters:
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -119,7 +119,7 @@ steps:
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests"
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests" # Disable code coverage since it's currently broken --coverage
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))

View File

@@ -104,19 +104,35 @@ variables:
value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }}
- name: VSCODE_BUILD_MACOS_UNIVERSAL
value: ${{ and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }}
- name: AZURE_CDN_URL
value: https://az764295.vo.msecnd.net
- name: AZURE_DOCUMENTDB_ENDPOINT
value: https://vscode.documents.azure.com:443/
- name: AZURE_STORAGE_ACCOUNT
value: ticino
- name: AZURE_STORAGE_ACCOUNT_2
value: vscode
- name: MOONCAKE_CDN_URL
value: https://vscode.cdn.azure.cn
- name: VSCODE_MIXIN_REPO
value: microsoft/vscode-distro
- name: skipComponentGovernanceDetection
value: true
resources:
containers:
- container: vscode-x64
image: vscodehub.azurecr.io/vscode-linux-build-agent:bionic-x64
endpoint: VSCodeHub
options: --user 0:0
options: --user 0:0 --cap-add SYS_ADMIN
- container: vscode-arm64
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
endpoint: VSCodeHub
options: --user 0:0 --cap-add SYS_ADMIN
- container: vscode-armhf
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-armhf
endpoint: VSCodeHub
options: --user 0:0 --cap-add SYS_ADMIN
- container: snapcraft
image: snapcore/snapcraft:stable
@@ -124,7 +140,7 @@ stages:
- stage: Compile
jobs:
- job: Compile
pool: compile
pool: vscode-1es
variables:
VSCODE_ARCH: x64
steps:
@@ -176,10 +192,11 @@ stages:
variables:
VSCODE_ARCH: x64
NPM_ARCH: x64
DISPLAY: ":10"
steps:
- template: linux/product-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true)) }}:
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true), ne(variables['VSCODE_PUBLISH'], 'false')) }}:
- job: LinuxSnap
dependsOn:
- Linux

View File

@@ -12,6 +12,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password,ticino-storage-key'
- script: |
set -e
@@ -38,7 +39,7 @@ steps:
# using `genericNodeModules` instead of `nodeModules` here to avoid sharing the cache with builds running inside containers
- task: Cache@2
inputs:
key: 'genericNodeModules | $(Agent.OS) | .build/yarnlockhash'
key: "genericNodeModules | $(Agent.OS) | .build/yarnlockhash"
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
@@ -76,6 +77,7 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -98,6 +100,13 @@ steps:
yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check
displayName: Compile & Hygiene
- script: |
set -e
yarn --cwd test/smoke compile
yarn --cwd test/integration/browser compile
displayName: Compile test suites
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
@@ -113,15 +122,7 @@ steps:
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
# we gotta tarball everything in order to preserve file permissions
- script: |
set -e
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
tar -cz --ignore-failed-read -f $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-* test/integration/browser/out test/smoke/out test/automation/out
displayName: Compress compilation artifact
- task: PublishPipelineArtifact@1

View File

@@ -12,6 +12,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'builds-docdb-key-readwrite,github-distro-mixin-password,ticino-storage-key,vscode-storage-key,vscode-mooncake-storage-key'
- pwsh: |
. build/azure-pipelines/win32/exec.ps1
@@ -51,6 +52,7 @@ steps:
- publish: $(Pipeline.Workspace)/artifacts_processed_$(System.StageAttempt)/artifacts_processed_$(System.StageAttempt).txt
artifact: artifacts_processed_$(System.StageAttempt)
displayName: Publish what artifacts were published for this stage attempt
condition: always()
- pwsh: |
$ErrorActionPreference = 'Stop'

View File

@@ -12,6 +12,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'builds-docdb-key-readwrite'
- script: |
set -e

View File

@@ -0,0 +1,243 @@
trigger: none
pr: none
parameters:
- name: ENABLE_TERRAPIN
displayName: "Enable Terrapin"
type: boolean
default: true
- name: SCAN_WINDOWS
displayName: "Scan Windows"
type: boolean
default: true
- name: SCAN_LINUX
displayName: "Scan Linux"
type: boolean
default: false
variables:
- name: ENABLE_TERRAPIN
value: ${{ eq(parameters.ENABLE_TERRAPIN, true) }}
- name: SCAN_WINDOWS
value: ${{ eq(parameters.SCAN_WINDOWS, true) }}
- name: SCAN_LINUX
value: ${{ eq(parameters.SCAN_LINUX, true) }}
- name: VSCODE_MIXIN_REPO
value: microsoft/vscode-distro
- name: skipComponentGovernanceDetection
value: true
- name: NPM_ARCH
value: x64
- name: VSCODE_ARCH
value: x64
stages:
- stage: Windows
condition: eq(variables.SCAN_WINDOWS, 'true')
pool:
vmImage: VS2017-Win2016
jobs:
- job: WindowsJob
timeoutInMinutes: 0
steps:
- task: CredScan@3
continueOnError: true
inputs:
scanFolder: '$(Build.SourcesDirectory)'
outputFormat: 'pre'
- task: NodeTool@0
inputs:
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-SSL-AADAuth,vscode-storage-key,builds-docdb-key-readwrite"
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
displayName: Prepare tooling
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
displayName: Merge distro
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npx https://aka.ms/enablesecurefeed standAlone }
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- task: Semmle@1
inputs:
sourceCodeDirectory: '$(Build.SourcesDirectory)'
language: 'cpp'
buildCommandsString: 'yarn --frozen-lockfile'
querySuite: 'Required'
timeout: '1800'
ram: '16384'
addProjectDirToScanningExclusionList: true
env:
npm_config_arch: "$(NPM_ARCH)"
npm_config_build_from_source: true
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: CodeQL
- powershell: |
. build/azure-pipelines/win32/exec.ps1
. build/azure-pipelines/win32/retry.ps1
$ErrorActionPreference = "Stop"
retry { exec { yarn --frozen-lockfile } }
env:
npm_config_arch: "$(NPM_ARCH)"
npm_config_build_from_source: true
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
CHILD_CONCURRENCY: 1
displayName: Install dependencies
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "vscode-symbols-win32-$(VSCODE_ARCH)" }
displayName: Download Symbols
- task: BinSkim@4
inputs:
InputType: 'Basic'
Function: 'analyze'
TargetPattern: 'guardianGlob'
AnalyzeTargetGlob: '$(agent.builddirectory)\scanbin\**.dll;$(agent.builddirectory)\scanbin\**.exe;$(agent.builddirectory)\scanbin\**.node'
AnalyzeLocalSymbolDirectories: '$(agent.builddirectory)\scanbin\VSCode-win32-$(VSCODE_ARCH)\pdb'
- task: TSAUpload@2
inputs:
GdnPublishTsaOnboard: true
GdnPublishTsaConfigFile: '$(Build.SourcesDirectory)\build\azure-pipelines\.gdntsa'
- stage: Linux
dependsOn: []
condition: eq(variables.SCAN_LINUX, 'true')
pool:
vmImage: "Ubuntu-18.04"
jobs:
- job: LinuxJob
steps:
- task: CredScan@2
inputs:
toolMajorVersion: 'V2'
- task: NodeTool@0
inputs:
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-SSL-AADAuth,vscode-storage-key,builds-docdb-key-readwrite"
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
set -e
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
- script: |
set -e
export npm_config_arch=$(NPM_ARCH)
export npm_config_build_from_source=true
if [ -z "$CC" ] || [ -z "$CXX" ]; then
# Download clang based on chromium revision used by vscode
curl -s https://raw.githubusercontent.com/chromium/chromium/91.0.4472.164/tools/clang/scripts/update.py | python - --output-dir=$PWD/.build/CR_Clang --host-os=linux
# Download libcxx headers and objects from upstream electron releases
DEBUG=libcxx-fetcher \
VSCODE_LIBCXX_OBJECTS_DIR=$PWD/.build/libcxx-objects \
VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \
VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \
VSCODE_ARCH="$(NPM_ARCH)" \
node build/linux/libcxx-fetcher.js
# Set compiler toolchain
export CC=$PWD/.build/CR_Clang/bin/clang
export CXX=$PWD/.build/CR_Clang/bin/clang++
export CXXFLAGS="-nostdinc++ -D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS -isystem$PWD/.build/libcxx_headers/include -isystem$PWD/.build/libcxxabi_headers/include -fPIC -flto=thin -fsplit-lto-unit"
export LDFLAGS="-stdlib=libc++ -fuse-ld=lld -flto=thin -fsplit-lto-unit -L$PWD/.build/libcxx-objects -lc++abi"
fi
if [ "$VSCODE_ARCH" == "x64" ]; then
export VSCODE_REMOTE_CC=$(which gcc-4.8)
export VSCODE_REMOTE_CXX=$(which g++-4.8)
fi
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
echo "Yarn failed too many times" >&2
exit 1
fi
echo "Yarn failed $i, trying again..."
done
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
- script: |
set -e
yarn gulp vscode-symbols-linux-$(VSCODE_ARCH)
displayName: Build
- task: BinSkim@3
inputs:
toolVersion: Latest
InputType: CommandLine
arguments: analyze $(agent.builddirectory)\scanbin\exe\*.* --recurse --local-symbol-directories $(agent.builddirectory)\scanbin\VSCode-linux-$(VSCODE_ARCH)\pdb
- task: TSAUpload@2
inputs:
GdnPublishTsaConfigFile: '$(Build.SourceDirectory)\build\azure-pipelines\.gdntsa'

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -84,7 +84,7 @@ steps:
- script: |
set -e
yarn npm-run-all -lp sqllint extensions-lint strict-vscode
yarn npm-run-all -lp sqllint extensions-lint
displayName: SQL Hygiene
- script: |

View File

@@ -12,6 +12,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password,web-storage-account,web-storage-key,ticino-storage-key'
- task: DownloadPipelineArtifact@2
inputs:
@@ -79,6 +80,7 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -83,7 +83,6 @@ steps:
yarn sqllint
yarn extensions-lint
yarn gulp hygiene
yarn strict-vscode
yarn valid-layers-check
displayName: Run hygiene, eslint
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))

View File

@@ -1,10 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<clear />
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
</packageSources>
<disabledPackageSources>
<clear />
</disabledPackageSources>
</configuration>

View File

@@ -1,4 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Microsoft.ESRPClient" version="1.2.47" />
</packages>

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:

View File

@@ -2,9 +2,6 @@
$ErrorActionPreference = "Stop"
$Arch = "$env:VSCODE_ARCH"
exec { yarn gulp "vscode-win32-$Arch-archive" "vscode-win32-$Arch-system-setup" "vscode-win32-$Arch-user-setup" --sign }
$Repo = "$(pwd)"
$Root = "$Repo\.."
$SystemExe = "$Repo\.build\win32-$Arch\system-setup\VSCodeSetup.exe"

View File

@@ -17,6 +17,7 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,vscode-storage-key,builds-docdb-key-readwrite,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- task: DownloadPipelineArtifact@2
inputs:
@@ -53,7 +54,7 @@ steps:
- task: Cache@2
inputs:
key: 'nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash'
key: "nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash"
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
@@ -84,6 +85,7 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -154,13 +156,6 @@ steps:
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd test/integration/browser compile }
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
@@ -194,6 +189,41 @@ steps:
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd test/smoke compile }
displayName: Compile smoke tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
exec { yarn smoketest-no-compile --build "$AppRoot" --screenshots .build\logs\smoke-tests }
displayName: Run smoke tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
# - powershell: |
# . build/azure-pipelines/win32/exec.ps1
# $ErrorActionPreference = "Stop"
# $AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
# $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"
# exec { yarn smoketest-no-compile --build "$AppRoot" --remote }
# displayName: Run smoke tests (Remote)
# timeoutInMinutes: 5
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"
exec { yarn smoketest-no-compile --web --browser firefox --headless }
displayName: Run smoke tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-windows-$(VSCODE_ARCH)
@@ -202,6 +232,14 @@ steps:
continueOnError: true
condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: logs-windows-$(VSCODE_ARCH)-$(System.JobAttempt)
targetPath: .build\logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
@@ -209,84 +247,58 @@ steps:
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
- task: UseDotNet@2
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(CodeSigningFolderPath)"
Pattern: "*.dll,*.exe,*.node"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
version: 2.x
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: NuGetCommand@2
displayName: Install ESRPClient.exe
inputs:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: "ESRP Nuget"
restoreDirectory: packages
- task: EsrpClientTool@1
displayName: Download ESRPClient
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate
inputs:
ESRP: "ESRP CodeSign"
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd build }
exec { yarn --cwd build compile }
displayName: Compile build tools
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: PowerShell@2
inputs:
targetType: filePath
filePath: .\build\azure-pipelines\win32\import-esrp-auth-cert.ps1
arguments: "$(ESRP-SSL-AADAuth)"
displayName: Import ESRP Auth Certificate
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$EsrpClientTool = (gci -directory -filter EsrpClientTool_* $(Agent.RootDirectory)\_tasks | Select-Object -last 1).FullName
$EsrpCliZip = (gci -recurse -filter esrpcli.*.zip $EsrpClientTool | Select-Object -last 1).FullName
mkdir -p $(Agent.TempDirectory)\esrpcli
Expand-Archive -Path $EsrpCliZip -DestinationPath $(Agent.TempDirectory)\esrpcli
$EsrpCliDllPath = (gci -recurse -filter esrpcli.dll $(Agent.TempDirectory)\esrpcli | Select-Object -last 1).FullName
echo "##vso[task.setvariable variable=EsrpCliDllPath]$EsrpCliDllPath"
displayName: Find ESRP CLI
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build\azure-pipelines\common\sign $env:EsrpCliDllPath windows $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(CodeSigningFolderPath) '*.dll,*.exe,*.node' }
displayName: Codesign
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-archive" }
displayName: Package archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:ESRPPKI = "$(ESRP-PKI)"
$env:ESRPAADUsername = "$(esrp-aad-username)"
$env:ESRPAADPassword = "$(esrp-aad-password)"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-system-setup" --sign }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-user-setup" --sign }
displayName: Package setups
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |

View File

@@ -1,71 +0,0 @@
function Create-TmpJson($Obj) {
$FileName = [System.IO.Path]::GetTempFileName()
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
return $FileName
}
$Auth = Create-TmpJson @{
Version = "1.0.0"
AuthenticationType = "AAD_CERT"
ClientId = $env:ESRPClientId
AuthCert = @{
SubjectName = $env:ESRPAuthCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
SendX5c = "true"
}
RequestSigningCert = @{
SubjectName = $env:ESRPCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
}
}
$Policy = Create-TmpJson @{
Version = "1.0.0"
}
$Input = Create-TmpJson @{
Version = "1.0.0"
SignBatches = @(
@{
SourceLocationType = "UNC"
SignRequestFiles = @(
@{
SourceLocation = $args[0]
}
)
SigningInfo = @{
Operations = @(
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolSign"
Parameters = @{
OpusName = "VS Code"
OpusInfo = "https://code.visualstudio.com/"
Append = "/as"
FileDigest = "/fd `"SHA256`""
PageHash = "/NPH"
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
}
ToolName = "sign"
ToolVersion = "1.0"
},
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolVerify"
Parameters = @{
VerifyAll = "/all"
}
ToolName = "sign"
ToolVersion = "1.0"
}
)
}
}
)
}
$Output = [System.IO.Path]::GetTempFileName()
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
& "$ScriptPath\ESRPClient\packages\Microsoft.ESRPClient.*\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:

View File

@@ -29,7 +29,6 @@ app.once('ready', () => {
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
webviewTag: true,
enableWebSQL: false,
nativeWindowOpen: true
}

View File

@@ -4,7 +4,8 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const vscode_universal_1 = require("vscode-universal");
const vscode_universal_bundler_1 = require("vscode-universal-bundler");
const cross_spawn_promise_1 = require("@malept/cross-spawn-promise");
const fs = require("fs-extra");
const path = require("path");
const plist = require("plist");
@@ -23,7 +24,7 @@ async function main() {
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
await vscode_universal_1.makeUniversalApp({
await (0, vscode_universal_bundler_1.makeUniversalApp)({
x64AppPath,
arm64AppPath,
x64AsarPath,
@@ -50,6 +51,12 @@ async function main() {
LSRequiresNativeExecution: true
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
// Verify if native module architecture is correct
const findOutput = await (0, cross_spawn_promise_1.spawn)('find', [outAppPath, '-name', 'keytar.node']);
const lipoOutput = await (0, cross_spawn_promise_1.spawn)('lipo', ['-archs', findOutput.replace(/\n$/, "")]);
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
throw new Error(`Invalid arch, got : ${lipoOutput}`);
}
}
if (require.main === module) {
main().catch(err => {

View File

@@ -5,7 +5,8 @@
'use strict';
import { makeUniversalApp } from 'vscode-universal';
import { makeUniversalApp } from 'vscode-universal-bundler';
import { spawn } from '@malept/cross-spawn-promise';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as plist from 'plist';
@@ -57,6 +58,13 @@ async function main() {
LSRequiresNativeExecution: true
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
// Verify if native module architecture is correct
const findOutput = await spawn('find', [outAppPath, '-name', 'keytar.node'])
const lipoOutput = await spawn('lipo', ['-archs', findOutput.replace(/\n$/, "")]);
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
throw new Error(`Invalid arch, got : ${lipoOutput}`)
}
}
if (require.main === module) {

View File

@@ -22,6 +22,10 @@ module.exports.all = [
'!out*/**',
'!test/**/out/**',
'!**/node_modules/**',
// {{SQL CARBON EDIT}}
'!build/actions/**/*.js',
'!build/**/*'
];
module.exports.indentationFilter = [
@@ -79,7 +83,7 @@ module.exports.indentationFilter = [
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,darwin}/**/*.js',
'!build/{lib,download,linux,darwin}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
@@ -91,6 +95,31 @@ module.exports.indentationFilter = [
'!extensions/markdown-language-features/notebook-out/*.js',
'!extensions/markdown-math/notebook-out/*.js',
'!extensions/simple-browser/media/*.js',
// {{SQL CARBON EDIT}} Except for our stuff
'!**/*.gif',
'!build/actions/**/*.js',
'!**/*.{xlf,lcl,docx,sql,vsix,bacpac,ipynb,jpg}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/integration-tests/testData/**',
'!extensions/arc/src/controller/generated/**',
'!extensions/sql-database-projects/resources/templates/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.json',
'!extensions/sql-database-projects/src/test/baselines/*.sqlproj',
'!extensions/sql-database-projects/BuildDirectory/SystemDacpacs/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts',
'!resources/linux/snap/electron-launch',
'!extensions/markdown-language-features/media/*.js',
'!extensions/simple-browser/media/*.js',
'!resources/xlf/LocProject.json',
'!build/**/*'
];
module.exports.copyrightFilter = [
@@ -113,6 +142,7 @@ module.exports.copyrightFilter = [
'!**/*.code-workspace',
'!**/*.js.map',
'!build/**/*.init',
'!build/linux/libcxx-fetcher.*',
'!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js',
'!resources/web/code-web.js',
@@ -123,6 +153,47 @@ module.exports.copyrightFilter = [
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
// {{SQL CARBON EDIT}} Except for stuff in our code that doesn't use our copyright
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/azuremonitor/src/prompts/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/kusto/src/prompts/**',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!extensions/mssql/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/notebook/src/prompts/**',
'!extensions/query-history/images/**',
'!extensions/sql/build/update-grammar.js',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/services/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/services/notebook/common/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac',
'!**/*.bacpac',
'!**/*.py'
];
module.exports.jsHygieneFilter = [
@@ -137,6 +208,7 @@ module.exports.jsHygieneFilter = [
'!src/**/marked.js',
'!src/**/semver.js',
'!**/test/**',
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.tsHygieneFilter = [
@@ -154,4 +226,11 @@ module.exports.tsHygieneFilter = [
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
// {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts',
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // skip this because we have no plans on touching this and its not ours
'!src/vs/workbench/contrib/extensions/browser/extensionRecommendationsService.ts', // skip this because known issue
'!build/**/*'
];

View File

@@ -14,6 +14,7 @@ const compilation = require('./lib/compilation');
const compileBuildTask = task.define('compile-build',
task.series(
util.rimraf('out-build'),
util.buildWebNodePaths('out-build'),
compilation.compileTask('src', 'out-build', true)
)
);

View File

@@ -35,42 +35,42 @@ const compilations = glob.sync('**/tsconfig.json', {
ignore: ['**/out/**', '**/node_modules/**']
});
// const compilations = [
// 'configuration-editing/build/tsconfig.json',
// 'configuration-editing/tsconfig.json',
// 'css-language-features/client/tsconfig.json',
// 'css-language-features/server/tsconfig.json',
// 'debug-auto-launch/tsconfig.json',
// 'debug-server-ready/tsconfig.json',
// 'emmet/tsconfig.json',
// 'extension-editing/tsconfig.json',
// 'git/tsconfig.json',
// 'github-authentication/tsconfig.json',
// 'github/tsconfig.json',
// 'grunt/tsconfig.json',
// 'gulp/tsconfig.json',
// 'html-language-features/client/tsconfig.json',
// 'html-language-features/server/tsconfig.json',
// 'image-preview/tsconfig.json',
// 'jake/tsconfig.json',
// 'json-language-features/client/tsconfig.json',
// 'json-language-features/server/tsconfig.json',
// 'markdown-language-features/preview-src/tsconfig.json',
// 'markdown-language-features/tsconfig.json',
// 'markdown-math/tsconfig.json',
// 'merge-conflict/tsconfig.json',
// 'microsoft-authentication/tsconfig.json',
// 'npm/tsconfig.json',
// 'php-language-features/tsconfig.json',
// 'search-result/tsconfig.json',
// 'simple-browser/tsconfig.json',
// 'testing-editor-contributions/tsconfig.json',
// 'typescript-language-features/test-workspace/tsconfig.json',
// 'typescript-language-features/tsconfig.json',
// 'vscode-api-tests/tsconfig.json',
// 'vscode-colorize-tests/tsconfig.json',
// 'vscode-custom-editor-tests/tsconfig.json',
// 'vscode-notebook-tests/tsconfig.json',
// 'vscode-test-resolver/tsconfig.json'
// 'configuration-editing/build/tsconfig.json',
// 'configuration-editing/tsconfig.json',
// 'css-language-features/client/tsconfig.json',
// 'css-language-features/server/tsconfig.json',
// 'debug-auto-launch/tsconfig.json',
// 'debug-server-ready/tsconfig.json',
// 'emmet/tsconfig.json',
// 'extension-editing/tsconfig.json',
// 'git/tsconfig.json',
// 'github-authentication/tsconfig.json',
// 'github/tsconfig.json',
// 'grunt/tsconfig.json',
// 'gulp/tsconfig.json',
// 'html-language-features/client/tsconfig.json',
// 'html-language-features/server/tsconfig.json',
// 'image-preview/tsconfig.json',
// 'ipynb/tsconfig.json',
// 'jake/tsconfig.json',
// 'json-language-features/client/tsconfig.json',
// 'json-language-features/server/tsconfig.json',
// 'markdown-language-features/preview-src/tsconfig.json',
// 'markdown-language-features/tsconfig.json',
// 'markdown-math/tsconfig.json',
// 'merge-conflict/tsconfig.json',
// 'microsoft-authentication/tsconfig.json',
// 'npm/tsconfig.json',
// 'php-language-features/tsconfig.json',
// 'search-result/tsconfig.json',
// 'simple-browser/tsconfig.json',
// 'typescript-language-features/test-workspace/tsconfig.json',
// 'typescript-language-features/tsconfig.json',
// 'vscode-api-tests/tsconfig.json',
// 'vscode-colorize-tests/tsconfig.json',
// 'vscode-custom-editor-tests/tsconfig.json',
// 'vscode-notebook-tests/tsconfig.json',
// 'vscode-test-resolver/tsconfig.json'
// ];
const getBaseUrl = out => `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}/${out}`;

View File

@@ -16,10 +16,10 @@ const { monacoTypecheckTask/* , monacoTypecheckWatchTask */ } = require('./gulpf
const { compileExtensionsTask, watchExtensionsTask, compileExtensionMediaTask } = require('./gulpfile.extensions');
// Fast compile for development time
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), compilation.compileTask('src', 'out', false)));
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), util.buildWebNodePaths('out'), compilation.compileTask('src', 'out', false)));
gulp.task(compileClientTask);
const watchClientTask = task.define('watch-client', task.series(util.rimraf('out'), compilation.watchTask('out', false)));
const watchClientTask = task.define('watch-client', task.series(util.rimraf('out'), util.buildWebNodePaths('out'), compilation.watchTask('out', false)));
gulp.task(watchClientTask);
// All

104
build/gulpfile.scan.js Normal file
View File

@@ -0,0 +1,104 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const path = require('path');
const task = require('./lib/task');
const util = require('./lib/util');
const _ = require('underscore');
const electron = require('gulp-atom-electron');
const { config } = require('./lib/electron');
const filter = require('gulp-filter');
const deps = require('./lib/dependencies');
const root = path.dirname(__dirname);
const BUILD_TARGETS = [
{ platform: 'win32', arch: 'ia32' },
{ platform: 'win32', arch: 'x64' },
{ platform: 'win32', arch: 'arm64' },
{ platform: 'darwin', arch: null, opts: { stats: true } },
{ platform: 'linux', arch: 'ia32' },
{ platform: 'linux', arch: 'x64' },
{ platform: 'linux', arch: 'armhf' },
{ platform: 'linux', arch: 'arm64' },
];
BUILD_TARGETS.forEach(buildTarget => {
const dashed = (str) => (str ? `-${str}` : ``);
const platform = buildTarget.platform;
const arch = buildTarget.arch;
const destinationExe = path.join(path.dirname(root), 'scanbin', `VSCode${dashed(platform)}${dashed(arch)}`, 'bin');
const destinationPdb = path.join(path.dirname(root), 'scanbin', `VSCode${dashed(platform)}${dashed(arch)}`, 'pdb');
const tasks = [];
// removal tasks
tasks.push(util.rimraf(destinationExe), util.rimraf(destinationPdb));
// electron
tasks.push(() => electron.dest(destinationExe, _.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch })));
// pdbs for windows
if (platform === 'win32') {
tasks.push(
() => electron.dest(destinationPdb, _.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, pdbs: true })),
util.rimraf(path.join(destinationExe, 'swiftshader')),
util.rimraf(path.join(destinationExe, 'd3dcompiler_47.dll')));
}
if (platform === 'linux') {
tasks.push(
() => electron.dest(destinationPdb, _.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, symbols: true }))
);
}
// node modules
tasks.push(
nodeModules(destinationExe, destinationPdb, platform)
);
const setupSymbolsTask = task.define(`vscode-symbols${dashed(platform)}${dashed(arch)}`,
task.series(...tasks)
);
gulp.task(setupSymbolsTask);
});
function nodeModules(destinationExe, destinationPdb, platform) {
const productionDependencies = deps.getProductionDependencies(root);
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
const exe = () => {
return gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**/*.node']))
.pipe(gulp.dest(destinationExe));
};
if (platform === 'win32') {
const pdb = () => {
return gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**/*.pdb']))
.pipe(gulp.dest(destinationPdb));
};
return gulp.parallel(exe, pdb);
}
if (platform === 'linux') {
const pdb = () => {
return gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**/*.sym']))
.pipe(gulp.dest(destinationPdb));
};
return gulp.parallel(exe, pdb);
}
return exe;
}

View File

@@ -38,6 +38,7 @@ const vscodeEntryPoints = _.flatten([
buildfile.base,
buildfile.workerExtensionHost,
buildfile.workerNotebook,
buildfile.workerLanguageDetection,
buildfile.workbenchDesktop,
buildfile.code
]);
@@ -64,8 +65,6 @@ const vscodeResources = [
'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/platform/files/**/*.exe',
@@ -288,7 +287,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
'**/node-pty/build/Release/*',
'**/node-pty/lib/worker/conoutSocketWorker.js',
'**/node-pty/lib/shared/conout.js',
'**/*.wasm'
'**/*.wasm',
], 'node_modules.asar'));
let all = es.merge(

View File

@@ -27,7 +27,7 @@ const zipPath = arch => path.join(zipDir(arch), `azuredatastudio-win32-${arch}.z
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
const signWin32Path = path.join(repoPath, 'build', 'azure-pipelines', 'common', 'sign-win32');
function packageInnoSetup(iss, options, cb) {
options = options || {};
@@ -50,7 +50,7 @@ function packageInnoSetup(iss, options, cb) {
const args = [
iss,
...defs,
`/sesrp=powershell.exe -ExecutionPolicy bypass ${signPS1} $f`
`/sesrp=node ${signWin32Path} $f`
];
cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] })

View File

@@ -12,221 +12,7 @@ const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
/**
* Hygiene works by creating cascading subsets of all our files and
* passing them through a sequence of checks. Here are the current subsets,
* named according to the checks performed on them. Each subset contains
* the following one, as described in mathematical notation:
*
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
*/
const all = [
'*',
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*',
'!test/**/out/**',
'!**/node_modules/**',
'!build/actions/**/*.js', // {{SQL CARBON EDIT}}
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.all = all;
const indentationFilter = [
'**',
// except specific files
'!**/ThirdPartyNotices.txt',
'!**/LICENSE.{txt,rtf}',
'!LICENSES.chromium.html',
'!**/LICENSE',
'!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/semver/semver.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/unit/assert.js',
'!resources/linux/snap/electron-launch',
// except specific folders
'!test/automation/out/**',
'!test/smoke/out/**',
'!extensions/typescript-language-features/test-workspace/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/codicon/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,darwin}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
'!**/Dockerfile',
'!**/Dockerfile.*',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
// {{SQL CARBON EDIT}}
'!**/*.gif',
'!build/actions/**/*.js',
'!**/*.{xlf,lcl,docx,sql,vsix,bacpac,ipynb,jpg}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/integration-tests/testData/**',
'!extensions/arc/src/controller/generated/**',
'!extensions/sql-database-projects/resources/templates/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.json',
'!extensions/sql-database-projects/src/test/baselines/*.sqlproj',
'!extensions/sql-database-projects/BuildDirectory/SystemDacpacs/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts',
'!resources/linux/snap/electron-launch',
'!extensions/markdown-language-features/media/*.js',
'!extensions/simple-browser/media/*.js',
'!resources/xlf/LocProject.json', // {{SQL CARBON EDIT}}
'!build/**/*' // {{SQL CARBON EDIT}}
];
const copyrightFilter = [
'**',
'!**/*.desktop',
'!**/*.json',
'!**/*.html',
'!**/*.template',
'!**/*.md',
'!**/*.bat',
'!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml',
'!**/*.sh',
'!**/*.txt',
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!**/*.js.map',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js',
'!resources/web/code-web.js',
'!resources/completions/**',
'!extensions/configuration-editing/build/inline-allOf.ts',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js',
'!resources/serverless/code-web.js',
'!src/vs/editor/test/node/classification/typescript-test.ts',
// {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/services/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/services/notebook/common/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/notebook/src/prompts/**',
'!extensions/mssql/src/prompts/**',
'!extensions/kusto/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/query-history/images/**',
'!extensions/sql/build/update-grammar.js',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac',
'!**/*.bacpac',
'!**/*.py'
];
const jsHygieneFilter = [
'src/**/*.js',
'build/gulpfile.*.js',
'!src/vs/loader.js',
'!src/vs/css.js',
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js',
'!src/**/semver.js',
'!**/test/**',
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.jsHygieneFilter = jsHygieneFilter;
const tsHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/typescript-basics/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}}
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
'!src/vs/workbench/contrib/extensions/browser/extensionRecommendationsService.ts', // {{SQL CARBON EDIT}} skip this because known issue
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.tsHygieneFilter = tsHygieneFilter;
const { all, copyrightFilter, indentationFilter, jsHygieneFilter, tsHygieneFilter } = require('./filters');
const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------',

View File

@@ -25,7 +25,7 @@ async function downloadExtensionDetails(extension) {
const promises = [];
for (const fileName of contentFileNames) {
promises.push(new Promise(resolve => {
got_1.default(`${repositoryContentBaseUrl}/${fileName}`)
(0, got_1.default)(`${repositoryContentBaseUrl}/${fileName}`)
.then(response => {
resolve({ fileName, body: response.rawBody });
})

View File

@@ -17,7 +17,7 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const os = require("os");
const watch = require('./watch');
const reporter = reporter_1.createReporter();
const reporter = (0, reporter_1.createReporter)();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
let options = {};
@@ -37,6 +37,9 @@ function createCompile(src, build, emitError) {
const sourcemaps = require('gulp-sourcemaps');
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
if (!build) {
overrideOptions.inlineSourceMap = true;
}
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
function pipeline(token) {
const bom = require('gulp-bom');

View File

@@ -44,6 +44,9 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
if (!build) {
overrideOptions.inlineSourceMap = true;
}
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));

View File

@@ -11,19 +11,69 @@ const vfs = require("vinyl-fs");
const filter = require("gulp-filter");
const _ = require("underscore");
const util = require("./util");
function isDocumentSuffix(str) {
return str === 'document' || str === 'script' || str === 'file' || str === 'source code';
}
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
/**
* Generate a `DarwinDocumentType` given a list of file extensions, an icon name, and an optional suffix or file type name.
* @param extensions A list of file extensions, such as `['bat', 'cmd']`
* @param icon A sentence-cased file type name that matches the lowercase name of a darwin icon resource.
* For example, `'HTML'` instead of `'html'`, or `'Java'` instead of `'java'`.
* This parameter is lowercased before it is used to reference an icon file.
* @param nameOrSuffix An optional suffix or a string to use as the file type. If a suffix is provided,
* it is used with the icon parameter to generate a file type string. If nothing is provided,
* `'document'` is used with the icon parameter to generate file type string.
*
* For example, if you call `darwinBundleDocumentType(..., 'HTML')`, the resulting file type is `"HTML document"`,
* and the `'html'` darwin icon is used.
*
* If you call `darwinBundleDocumentType(..., 'Javascript', 'file')`, the resulting file type is `"Javascript file"`.
* and the `'javascript'` darwin icon is used.
*
* If you call `darwinBundleDocumentType(..., 'bat', 'Windows command script')`, the file type is `"Windows command script"`,
* and the `'bat'` darwin icon is used.
*/
function darwinBundleDocumentType(extensions, icon, nameOrSuffix) {
// If given a suffix, generate a name from it. If not given anything, default to 'document'
if (isDocumentSuffix(nameOrSuffix) || !nameOrSuffix) {
nameOrSuffix = icon.charAt(0).toUpperCase() + icon.slice(1) + ' ' + (nameOrSuffix !== null && nameOrSuffix !== void 0 ? nameOrSuffix : 'document');
}
return {
name: product.nameLong + ' document',
name: nameOrSuffix,
role: 'Editor',
ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
extensions: extensions,
iconFile: icon
iconFile: 'resources/darwin/' + icon + '.icns'
};
}
/**
* Generate several `DarwinDocumentType`s with unique names and a shared icon.
* @param types A map of file type names to their associated file extensions.
* @param icon A darwin icon resource to use. For example, `'HTML'` would refer to `resources/darwin/html.icns`
*
* Examples:
* ```
* darwinBundleDocumentTypes({ 'C header file': 'h', 'C source code': 'c' },'c')
* darwinBundleDocumentTypes({ 'React source code': ['jsx', 'tsx'] }, 'react')
* ```
*/
// {{SQL CARBON EDIT}} Remove unused
// function darwinBundleDocumentTypes(types: { [name: string]: string | string[] }, icon: string): DarwinDocumentType[] {
// return Object.keys(types).map((name: string): DarwinDocumentType => {
// const extensions = types[name];
// return {
// name: name,
// role: 'Editor',
// ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
// extensions: Array.isArray(extensions) ? extensions : [extensions],
// iconFile: 'resources/darwin/' + icon + '.icns',
// } as DarwinDocumentType;
// });
// }
exports.config = {
version: util.getElectronVersion(),
productAppName: product.nameLong,
@@ -35,7 +85,7 @@ exports.config = {
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
darwinBundleDocumentType(['csv', 'json', 'sqlplan', 'sql', 'xml'], 'code_file'),
],
darwinBundleURLTypes: [{
role: 'Viewer',

View File

@@ -12,22 +12,84 @@ import * as filter from 'gulp-filter';
import * as _ from 'underscore';
import * as util from './util';
type DarwinDocumentSuffix = 'document' | 'script' | 'file' | 'source code';
type DarwinDocumentType = {
name: string,
role: string,
ostypes: string[],
extensions: string[],
iconFile: string,
};
function isDocumentSuffix(str?: string): str is DarwinDocumentSuffix {
return str === 'document' || str === 'script' || str === 'file' || str === 'source code';
}
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions: string[], icon: string) {
/**
* Generate a `DarwinDocumentType` given a list of file extensions, an icon name, and an optional suffix or file type name.
* @param extensions A list of file extensions, such as `['bat', 'cmd']`
* @param icon A sentence-cased file type name that matches the lowercase name of a darwin icon resource.
* For example, `'HTML'` instead of `'html'`, or `'Java'` instead of `'java'`.
* This parameter is lowercased before it is used to reference an icon file.
* @param nameOrSuffix An optional suffix or a string to use as the file type. If a suffix is provided,
* it is used with the icon parameter to generate a file type string. If nothing is provided,
* `'document'` is used with the icon parameter to generate file type string.
*
* For example, if you call `darwinBundleDocumentType(..., 'HTML')`, the resulting file type is `"HTML document"`,
* and the `'html'` darwin icon is used.
*
* If you call `darwinBundleDocumentType(..., 'Javascript', 'file')`, the resulting file type is `"Javascript file"`.
* and the `'javascript'` darwin icon is used.
*
* If you call `darwinBundleDocumentType(..., 'bat', 'Windows command script')`, the file type is `"Windows command script"`,
* and the `'bat'` darwin icon is used.
*/
function darwinBundleDocumentType(extensions: string[], icon: string, nameOrSuffix?: string | DarwinDocumentSuffix): DarwinDocumentType {
// If given a suffix, generate a name from it. If not given anything, default to 'document'
if (isDocumentSuffix(nameOrSuffix) || !nameOrSuffix) {
nameOrSuffix = icon.charAt(0).toUpperCase() + icon.slice(1) + ' ' + (nameOrSuffix ?? 'document');
}
return {
name: product.nameLong + ' document',
name: nameOrSuffix,
role: 'Editor',
ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
extensions: extensions,
iconFile: icon
iconFile: 'resources/darwin/' + icon + '.icns'
};
}
/**
* Generate several `DarwinDocumentType`s with unique names and a shared icon.
* @param types A map of file type names to their associated file extensions.
* @param icon A darwin icon resource to use. For example, `'HTML'` would refer to `resources/darwin/html.icns`
*
* Examples:
* ```
* darwinBundleDocumentTypes({ 'C header file': 'h', 'C source code': 'c' },'c')
* darwinBundleDocumentTypes({ 'React source code': ['jsx', 'tsx'] }, 'react')
* ```
*/
// {{SQL CARBON EDIT}} Remove unused
// function darwinBundleDocumentTypes(types: { [name: string]: string | string[] }, icon: string): DarwinDocumentType[] {
// return Object.keys(types).map((name: string): DarwinDocumentType => {
// const extensions = types[name];
// return {
// name: name,
// role: 'Editor',
// ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
// extensions: Array.isArray(extensions) ? extensions : [extensions],
// iconFile: 'resources/darwin/' + icon + '.icns',
// } as DarwinDocumentType;
// });
// }
export const config = {
version: util.getElectronVersion(),
productAppName: product.nameLong,
@@ -39,7 +101,7 @@ export const config = {
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
darwinBundleDocumentType(['csv', 'json', 'sqlplan', 'sql', 'xml'], 'code_file'),
],
darwinBundleURLTypes: [{
role: 'Viewer',

View File

@@ -21,7 +21,7 @@ module.exports = new class {
const configs = context.options;
for (const config of configs) {
if (minimatch(context.getFilename(), config.target)) {
return utils_1.createImportRuleListener((node, value) => this._checkImport(context, config, node, value));
return (0, utils_1.createImportRuleListener)((node, value) => this._checkImport(context, config, node, value));
}
}
return {};
@@ -29,7 +29,7 @@ module.exports = new class {
_checkImport(context, config, node, path) {
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(context.getFilename(), path);
path = (0, path_1.join)(context.getFilename(), path);
}
let restrictions;
if (typeof config.restrictions === 'string') {

View File

@@ -17,7 +17,7 @@ module.exports = new class {
};
}
create(context) {
const fileDirname = path_1.dirname(context.getFilename());
const fileDirname = (0, path_1.dirname)(context.getFilename());
const parts = fileDirname.split(/\\|\//);
const ruleArgs = context.options[0];
let config;
@@ -39,11 +39,11 @@ module.exports = new class {
// nothing
return {};
}
return utils_1.createImportRuleListener((node, path) => {
return (0, utils_1.createImportRuleListener)((node, path) => {
if (path[0] === '.') {
path = path_1.join(path_1.dirname(context.getFilename()), path);
path = (0, path_1.join)((0, path_1.dirname)(context.getFilename()), path);
}
const parts = path_1.dirname(path).split(/\\|\//);
const parts = (0, path_1.dirname)(path).split(/\\|\//);
for (let i = parts.length - 1; i >= 0; i--) {
const part = parts[i];
if (config.allowed.has(part)) {

View File

@@ -20,10 +20,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
return utils_1.createImportRuleListener((node, path) => {
return (0, utils_1.createImportRuleListener)((node, path) => {
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(context.getFilename(), path);
path = (0, path_1.join)(context.getFilename(), path);
}
if (/vs(\/|\\)nls/.test(path)) {
context.report({

View File

@@ -21,10 +21,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
// the vs/editor folder is allowed to use the standalone editor
return {};
}
return utils_1.createImportRuleListener((node, path) => {
return (0, utils_1.createImportRuleListener)((node, path) => {
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(context.getFilename(), path);
path = (0, path_1.join)(context.getFilename(), path);
}
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path)
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path)

View File

@@ -15,7 +15,7 @@ module.exports = new (_a = class TranslationRemind {
};
}
create(context) {
return utils_1.createImportRuleListener((node, path) => this._checkImport(context, node, path));
return (0, utils_1.createImportRuleListener)((node, path) => this._checkImport(context, node, path));
}
_checkImport(context, node, path) {
if (path !== TranslationRemind.NLS_MODULE) {
@@ -31,7 +31,7 @@ module.exports = new (_a = class TranslationRemind {
let resourceDefined = false;
let json;
try {
json = fs_1.readFileSync('./build/lib/i18n.resources.json', 'utf8');
json = (0, fs_1.readFileSync)('./build/lib/i18n.resources.json', 'utf8');
}
catch (e) {
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
module.exports = new class ApiVsCodeInComments {
constructor() {

View File

@@ -21,6 +21,8 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const buffer = require('gulp-buffer');
const jsoncParser = require("jsonc-parser");
const dependencies_1 = require("./dependencies");
const _ = require("underscore");
const util = require('./util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
@@ -145,7 +147,7 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath) {
const result = es.through();
@@ -163,7 +165,7 @@ function fromLocalNormal(extensionPath) {
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
}
exports.fromLocalNormal = fromLocalNormal;
const baseHeaders = {
@@ -254,14 +256,30 @@ const productJson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../produ
const builtInExtensions = productJson.builtInExtensions || [];
const webBuiltInExtensions = productJson.webBuiltInExtensions || [];
/**
* Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionsUtil.ts`
* Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionManifestPropertiesService.ts`
*/
function isWebExtension(manifest) {
if (Boolean(manifest.browser)) {
return true;
}
if (Boolean(manifest.main)) {
return false;
}
// neither browser nor main
if (typeof manifest.extensionKind !== 'undefined') {
const extensionKind = Array.isArray(manifest.extensionKind) ? manifest.extensionKind : [manifest.extensionKind];
return (extensionKind.indexOf('web') >= 0);
if (extensionKind.indexOf('web') >= 0) {
return true;
}
}
return (!Boolean(manifest.main) || Boolean(manifest.browser));
if (typeof manifest.contributes !== 'undefined') {
for (const id of ['debuggers', 'terminal', 'typescriptServerPlugins']) {
if (manifest.contributes.hasOwnProperty(id)) {
return false;
}
}
}
return true;
}
function packageLocalExtensionsStream(forWeb) {
const localExtensionsDescriptions = (glob.sync('extensions/*/package.json')
@@ -284,8 +302,10 @@ function packageLocalExtensionsStream(forWeb) {
result = localExtensionsStream;
}
else {
// also include shared node modules
result = es.merge(localExtensionsStream, gulp.src('extensions/node_modules/**', { base: '.' }));
// also include shared production node modules
const productionDependencies = (0, dependencies_1.getProductionDependencies)('extensions/');
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
result = es.merge(localExtensionsStream, gulp.src(dependenciesSrc, { base: '.' }));
}
return (result
.pipe(util2.setExecutableBit(['**/*.sh'])));
@@ -400,7 +420,7 @@ function translatePackageJSON(packageJSON, packageNLSPath) {
else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
const translated = packageNls[val.substr(1, val.length - 2)];
if (translated) {
obj[key] = translated;
obj[key] = typeof translated === 'string' ? translated : (typeof translated.message === 'string' ? translated.message : val);
}
}
}
@@ -470,7 +490,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
reject();
}
else {
reporter(stats.toJson());
reporter(stats === null || stats === void 0 ? void 0 : stats.toJson());
}
});
}
@@ -481,7 +501,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
reject();
}
else {
reporter(stats.toJson());
reporter(stats === null || stats === void 0 ? void 0 : stats.toJson());
resolve();
}
});

View File

@@ -21,6 +21,8 @@ import * as ansiColors from 'ansi-colors';
const buffer = require('gulp-buffer');
import * as jsoncParser from 'jsonc-parser';
import webpack = require('webpack');
import { getProductionDependencies } from './dependencies';
import _ = require('underscore');
const util = require('./util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
@@ -303,19 +305,38 @@ const webBuiltInExtensions: IBuiltInExtension[] = productJson.webBuiltInExtensio
type ExtensionKind = 'ui' | 'workspace' | 'web';
interface IExtensionManifest {
main: string;
browser: string;
main?: string;
browser?: string;
extensionKind?: ExtensionKind | ExtensionKind[];
extensionPack?: string[];
extensionDependencies?: string[];
contributes?: { [id: string]: any };
}
/**
* Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionsUtil.ts`
* Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionManifestPropertiesService.ts`
*/
function isWebExtension(manifest: IExtensionManifest): boolean {
if (Boolean(manifest.browser)) {
return true;
}
if (Boolean(manifest.main)) {
return false;
}
// neither browser nor main
if (typeof manifest.extensionKind !== 'undefined') {
const extensionKind = Array.isArray(manifest.extensionKind) ? manifest.extensionKind : [manifest.extensionKind];
return (extensionKind.indexOf('web') >= 0);
if (extensionKind.indexOf('web') >= 0) {
return true;
}
}
return (!Boolean(manifest.main) || Boolean(manifest.browser));
if (typeof manifest.contributes !== 'undefined') {
for (const id of ['debuggers', 'terminal', 'typescriptServerPlugins']) {
if (manifest.contributes.hasOwnProperty(id)) {
return false;
}
}
}
return true;
}
export function packageLocalExtensionsStream(forWeb: boolean): Stream {
@@ -344,8 +365,10 @@ export function packageLocalExtensionsStream(forWeb: boolean): Stream {
if (forWeb) {
result = localExtensionsStream;
} else {
// also include shared node modules
result = es.merge(localExtensionsStream, gulp.src('extensions/node_modules/**', { base: '.' }));
// also include shared production node modules
const productionDependencies = getProductionDependencies('extensions/');
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
result = es.merge(localExtensionsStream, gulp.src(dependenciesSrc, { base: '.' }));
}
return (
@@ -469,8 +492,11 @@ export function packageRebuildExtensionsStream(): NodeJS.ReadWriteStream {
// {{SQL CARBON EDIT}} end
export function translatePackageJSON(packageJSON: string, packageNLSPath: string) {
interface NLSFormat {
[key: string]: string | { message: string, comment: string[] };
}
const CharCode_PC = '%'.charCodeAt(0);
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const packageNls: NLSFormat = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const translate = (obj: any) => {
for (let key in obj) {
const val = obj[key];
@@ -481,7 +507,7 @@ export function translatePackageJSON(packageJSON: string, packageNLSPath: string
} else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
const translated = packageNls[val.substr(1, val.length - 2)];
if (translated) {
obj[key] = translated;
obj[key] = typeof translated === 'string' ? translated : (typeof translated.message === 'string' ? translated.message : val);
}
}
}
@@ -556,7 +582,7 @@ export async function webpackExtensions(taskName: string, isWatch: boolean, webp
if (err) {
reject();
} else {
reporter(stats.toJson());
reporter(stats?.toJson());
}
});
} else {
@@ -565,7 +591,7 @@ export async function webpackExtensions(taskName: string, isWatch: boolean, webp
fancyLog.error(err);
reject();
} else {
reporter(stats.toJson());
reporter(stats?.toJson());
resolve();
}
});

View File

@@ -466,7 +466,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
});
}
function processNlsFiles(opts) {
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
let fileName = path.basename(file.path);
if (fileName === 'nls.metadata.json') {
let json = null;
@@ -524,7 +524,7 @@ function getResource(sourceFile) {
}
exports.getResource = getResource;
function createXlfFilesForCoreBundle() {
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
const basename = path.basename(file.path);
if (basename === 'nls.metadata.json') {
if (file.isBuffer()) {
@@ -579,7 +579,7 @@ function createXlfFilesForExtensions() {
let counter = 0;
let folderStreamEnded = false;
let folderStreamEndEmitted = false;
return event_stream_1.through(function (extensionFolder) {
return (0, event_stream_1.through)(function (extensionFolder) {
const folderStream = this;
const stat = fs.statSync(extensionFolder.path);
if (!stat.isDirectory()) {
@@ -597,7 +597,7 @@ function createXlfFilesForExtensions() {
}
return _xlf;
}
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe((0, event_stream_1.through)(function (file) {
if (file.isBuffer()) {
const buffer = file.contents;
const basename = path.basename(file.path);
@@ -656,7 +656,7 @@ function createXlfFilesForExtensions() {
}
exports.createXlfFilesForExtensions = createXlfFilesForExtensions;
function createXlfFilesForIsl() {
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
let projectName, resourceFile;
if (path.basename(file.path) === 'messages.en.isl') {
projectName = setupProject;
@@ -709,7 +709,7 @@ exports.createXlfFilesForIsl = createXlfFilesForIsl;
function pushXlfFiles(apiHostname, username, password) {
let tryGetPromises = [];
let updateCreatePromises = [];
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
const fileName = path.basename(file.path);
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
@@ -771,7 +771,7 @@ function getAllResources(project, apiHostname, username, password) {
function findObsoleteResources(apiHostname, username, password) {
let resourcesByProject = Object.create(null);
resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
const fileName = path.basename(file.path);
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
@@ -923,7 +923,7 @@ function pullXlfFiles(apiHostname, username, password, language, resources) {
const credentials = `${username}:${password}`;
let expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false;
return event_stream_1.readable(function (_count, callback) {
return (0, event_stream_1.readable)(function (_count, callback) {
// Mark end of stream when all resources were retrieved
if (translationsRetrieved === expectedTranslationsCount) {
return this.emit('end');
@@ -981,7 +981,7 @@ function retrieveResource(language, resource, apiHostname, credentials) {
}
function prepareI18nFiles() {
let parsePromises = [];
return event_stream_1.through(function (xlf) {
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
@@ -1026,7 +1026,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
let mainPack = { version: exports.i18nPackVersion, contents: {} };
let extensionsPacks = {};
let errors = [];
return event_stream_1.through(function (xlf) {
return (0, event_stream_1.through)(function (xlf) {
let project = path.basename(path.dirname(path.dirname(xlf.relative)));
let resource = path.basename(xlf.relative, '.xlf');
let contents = xlf.contents.toString();
@@ -1088,7 +1088,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
exports.prepareI18nPackFiles = prepareI18nPackFiles;
function prepareIslFiles(language, innoSetupConfig) {
let parsePromises = [];
return event_stream_1.through(function (xlf) {
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);

View File

@@ -46,6 +46,10 @@
"name": "vs/workbench/contrib/callHierarchy",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/typeHierarchy",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/codeActions",
"project": "vscode-workbench"
@@ -94,6 +98,10 @@
"name": "vs/workbench/contrib/issue",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/interactive",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/keybindings",
"project": "vscode-workbench"
@@ -246,6 +254,10 @@
"name": "vs/workbench/contrib/views",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/languageDetection",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"

View File

@@ -199,7 +199,7 @@ const RULES = [
]
}
];
const TS_CONFIG_PATH = path_1.join(__dirname, '../../', 'src', 'tsconfig.json');
const TS_CONFIG_PATH = (0, path_1.join)(__dirname, '../../', 'src', 'tsconfig.json');
let hasErrors = false;
function checkFile(program, sourceFile, rule) {
checkNode(sourceFile);
@@ -250,8 +250,8 @@ function checkFile(program, sourceFile, rule) {
}
function createProgram(tsconfigPath) {
const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile);
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => fs_1.readFileSync(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, path_1.resolve(path_1.dirname(tsconfigPath)), { noEmit: true });
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => (0, fs_1.readFileSync)(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, (0, path_1.resolve)((0, path_1.dirname)(tsconfigPath)), { noEmit: true });
const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true);
return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost);
}
@@ -261,7 +261,7 @@ function createProgram(tsconfigPath) {
const program = createProgram(TS_CONFIG_PATH);
for (const sourceFile of program.getSourceFiles()) {
for (const rule of RULES) {
if (minimatch_1.match([sourceFile.fileName], rule.target).length > 0) {
if ((0, minimatch_1.match)([sourceFile.fileName], rule.target).length > 0) {
if (!rule.skip) {
checkFile(program, sourceFile, rule);
}

View File

@@ -96,7 +96,7 @@ function modifyI18nPackFiles(existingTranslationFolder, resultingTranslationPath
let mainPack = { version: i18n.i18nPackVersion, contents: {} };
let extensionsPacks = {};
let errors = [];
return event_stream_1.through(function (xlf) {
return (0, event_stream_1.through)(function (xlf) {
let rawResource = path.basename(xlf.relative, '.xlf');
let resource = rawResource.substring(0, rawResource.lastIndexOf('.'));
let contents = xlf.contents.toString();

View File

@@ -53,8 +53,8 @@ define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
* Returns a stream containing the patched JavaScript and source maps.
*/
function nls() {
const input = event_stream_1.through();
const output = input.pipe(event_stream_1.through(function (f) {
const input = (0, event_stream_1.through)();
const output = input.pipe((0, event_stream_1.through)(function (f) {
if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
}
@@ -72,7 +72,7 @@ function nls() {
}
_nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
}));
return event_stream_1.duplex(input, output);
return (0, event_stream_1.duplex)(input, output);
}
exports.nls = nls;
function isImportNode(ts, node) {

View File

@@ -98,7 +98,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest, fileContentMapper
return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest))
.pipe(stats_1.createStatsStream(dest));
.pipe((0, stats_1.createStatsStream)(dest));
}
function toBundleStream(src, bundledFileHeader, bundles, fileContentMapper) {
return es.merge(bundles.map(function (bundle) {
@@ -155,7 +155,7 @@ function optimizeTask(opts) {
addComment: true,
includeContent: true
}))
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({
.pipe(opts.languages && opts.languages.length ? (0, i18n_1.processNlsFiles)({
fileHeader: bundledFileHeader,
languages: opts.languages
}) : es.through())
@@ -179,7 +179,7 @@ function minifyTask(src, sourceMapBaseUrl) {
sourcemap: 'external',
outdir: '.',
platform: 'node',
target: ['node14.16'],
target: ['esnext'],
write: false
}).then(res => {
const jsFile = res.outputFiles.find(f => /\.js$/.test(f.path));

View File

@@ -256,7 +256,7 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
sourcemap: 'external',
outdir: '.',
platform: 'node',
target: ['node14.16'],
target: ['esnext'],
write: false
}).then(res => {
const jsFile = res.outputFiles.find(f => /\.js$/.test(f.path))!;

View File

@@ -12,7 +12,7 @@ const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
const rootDir = path.resolve(__dirname, '..', '..');
function runProcess(command, args = []) {
return new Promise((resolve, reject) => {
const child = child_process_1.spawn(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
const child = (0, child_process_1.spawn)(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
child.on('exit', err => !err ? resolve() : process.exit(err !== null && err !== void 0 ? err : 1));
child.on('error', reject);
});

View File

@@ -4,7 +4,7 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.incremental = void 0;
exports.buildWebNodePaths = exports.acquireWebNodePaths = exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.incremental = void 0;
const es = require("event-stream");
const debounce = require("debounce");
const _filter = require("gulp-filter");
@@ -274,3 +274,51 @@ function getElectronVersion() {
return target;
}
exports.getElectronVersion = getElectronVersion;
function acquireWebNodePaths() {
var _a;
const root = path.join(__dirname, '..', '..');
const webPackageJSON = path.join(root, '/remote/web', 'package.json');
const webPackages = JSON.parse(fs.readFileSync(webPackageJSON, 'utf8')).dependencies;
const nodePaths = {};
for (const key of Object.keys(webPackages)) {
const packageJSON = path.join(root, 'node_modules', key, 'package.json');
const packageData = JSON.parse(fs.readFileSync(packageJSON, 'utf8'));
let entryPoint = typeof packageData.browser === 'string' ? packageData.browser : (_a = packageData.main) !== null && _a !== void 0 ? _a : packageData.main; // {{SQL CARBON EDIT}} Some packages (like Turndown) have objects in this field instead of the entry point, fall back to main in that case
// On rare cases a package doesn't have an entrypoint so we assume it has a dist folder with a min.js
if (!entryPoint) {
console.warn(`No entry point for ${key} assuming dist/${key}.min.js`);
entryPoint = `dist/${key}.min.js`;
}
// Remove any starting path information so it's all relative info
if (entryPoint.startsWith('./')) {
entryPoint = entryPoint.substr(2);
}
else if (entryPoint.startsWith('/')) {
entryPoint = entryPoint.substr(1);
}
nodePaths[key] = entryPoint;
}
return nodePaths;
}
exports.acquireWebNodePaths = acquireWebNodePaths;
function buildWebNodePaths(outDir) {
const result = () => new Promise((resolve, _) => {
const root = path.join(__dirname, '..', '..');
const nodePaths = acquireWebNodePaths();
// Now we write the node paths to out/vs
const outDirectory = path.join(root, outDir, 'vs');
fs.mkdirSync(outDirectory, { recursive: true });
const headerWithGeneratedFileWarning = `/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
// This file is generated by build/npm/postinstall.js. Do not edit.`;
const fileContents = `${headerWithGeneratedFileWarning}\nself.webPackagePaths = ${JSON.stringify(nodePaths, null, 2)};`;
fs.writeFileSync(path.join(outDirectory, 'webPackagePaths.js'), fileContents, 'utf8');
resolve();
});
result.taskName = 'build-web-node-paths';
return result;
}
exports.buildWebNodePaths = buildWebNodePaths;

View File

@@ -340,3 +340,50 @@ export function getElectronVersion(): string {
const target = /^target "(.*)"$/m.exec(yarnrc)![1];
return target;
}
export function acquireWebNodePaths() {
const root = path.join(__dirname, '..', '..');
const webPackageJSON = path.join(root, '/remote/web', 'package.json');
const webPackages = JSON.parse(fs.readFileSync(webPackageJSON, 'utf8')).dependencies;
const nodePaths: { [key: string]: string } = {};
for (const key of Object.keys(webPackages)) {
const packageJSON = path.join(root, 'node_modules', key, 'package.json');
const packageData = JSON.parse(fs.readFileSync(packageJSON, 'utf8'));
let entryPoint = typeof packageData.browser === 'string' ? packageData.browser : packageData.main ?? packageData.main; // {{SQL CARBON EDIT}} Some packages (like Turndown) have objects in this field instead of the entry point, fall back to main in that case
// On rare cases a package doesn't have an entrypoint so we assume it has a dist folder with a min.js
if (!entryPoint) {
console.warn(`No entry point for ${key} assuming dist/${key}.min.js`);
entryPoint = `dist/${key}.min.js`;
}
// Remove any starting path information so it's all relative info
if (entryPoint.startsWith('./')) {
entryPoint = entryPoint.substr(2);
} else if (entryPoint.startsWith('/')) {
entryPoint = entryPoint.substr(1);
}
nodePaths[key] = entryPoint;
}
return nodePaths;
}
export function buildWebNodePaths(outDir: string) {
const result = () => new Promise<void>((resolve, _) => {
const root = path.join(__dirname, '..', '..');
const nodePaths = acquireWebNodePaths();
// Now we write the node paths to out/vs
const outDirectory = path.join(root, outDir, 'vs');
fs.mkdirSync(outDirectory, { recursive: true });
const headerWithGeneratedFileWarning = `/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
// This file is generated by build/npm/postinstall.js. Do not edit.`;
const fileContents = `${headerWithGeneratedFileWarning}\nself.webPackagePaths = ${JSON.stringify(nodePaths, null, 2)};`;
fs.writeFileSync(path.join(outDirectory, 'webPackagePaths.js'), fileContents, 'utf8');
resolve();
});
result.taskName = 'build-web-node-paths';
return result;
}

View File

@@ -0,0 +1,61 @@
// Can be removed once https://github.com/electron/electron-rebuild/pull/703 is available.
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadLibcxxObjects = exports.downloadLibcxxHeaders = void 0;
const debug = require("debug");
const extract = require("extract-zip");
const fs = require("fs-extra");
const path = require("path");
const packageJSON = require("../../package.json");
const get_1 = require("@electron/get");
const d = debug('libcxx-fetcher');
async function downloadLibcxxHeaders(outDir, electronVersion, lib_name) {
if (await fs.pathExists(path.resolve(outDir, 'include')))
return;
if (!await fs.pathExists(outDir))
await fs.mkdirp(outDir);
d(`downloading ${lib_name}_headers`);
const headers = await (0, get_1.downloadArtifact)({
version: electronVersion,
isGeneric: true,
artifactName: `${lib_name}_headers.zip`,
});
d(`unpacking ${lib_name}_headers from ${headers}`);
await extract(headers, { dir: outDir });
}
exports.downloadLibcxxHeaders = downloadLibcxxHeaders;
async function downloadLibcxxObjects(outDir, electronVersion, targetArch = 'x64') {
if (await fs.pathExists(path.resolve(outDir, 'libc++.a')))
return;
if (!await fs.pathExists(outDir))
await fs.mkdirp(outDir);
d(`downloading libcxx-objects-linux-${targetArch}`);
const objects = await (0, get_1.downloadArtifact)({
version: electronVersion,
platform: 'linux',
artifactName: 'libcxx-objects',
arch: targetArch,
});
d(`unpacking libcxx-objects from ${objects}`);
await extract(objects, { dir: outDir });
}
exports.downloadLibcxxObjects = downloadLibcxxObjects;
async function main() {
const libcxxObjectsDirPath = process.env['VSCODE_LIBCXX_OBJECTS_DIR'];
const libcxxHeadersDownloadDir = process.env['VSCODE_LIBCXX_HEADERS_DIR'];
const libcxxabiHeadersDownloadDir = process.env['VSCODE_LIBCXXABI_HEADERS_DIR'];
const arch = process.env['VSCODE_ARCH'];
const electronVersion = packageJSON.devDependencies.electron;
if (!libcxxObjectsDirPath || !libcxxHeadersDownloadDir || !libcxxabiHeadersDownloadDir) {
throw new Error('Required build env not set');
}
await downloadLibcxxObjects(libcxxObjectsDirPath, electronVersion, arch);
await downloadLibcxxHeaders(libcxxHeadersDownloadDir, electronVersion, 'libcxx');
await downloadLibcxxHeaders(libcxxabiHeadersDownloadDir, electronVersion, 'libcxxabi');
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -0,0 +1,66 @@
// Can be removed once https://github.com/electron/electron-rebuild/pull/703 is available.
'use strict';
import * as debug from 'debug';
import * as extract from 'extract-zip';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as packageJSON from '../../package.json';
import { downloadArtifact } from '@electron/get';
const d = debug('libcxx-fetcher');
export async function downloadLibcxxHeaders(outDir: string, electronVersion: string, lib_name: string): Promise<void> {
if (await fs.pathExists(path.resolve(outDir, 'include'))) return;
if (!await fs.pathExists(outDir)) await fs.mkdirp(outDir);
d(`downloading ${lib_name}_headers`);
const headers = await downloadArtifact({
version: electronVersion,
isGeneric: true,
artifactName: `${lib_name}_headers.zip`,
});
d(`unpacking ${lib_name}_headers from ${headers}`);
await extract(headers, { dir: outDir });
}
export async function downloadLibcxxObjects(outDir: string, electronVersion: string, targetArch: string = 'x64'): Promise<void> {
if (await fs.pathExists(path.resolve(outDir, 'libc++.a'))) return;
if (!await fs.pathExists(outDir)) await fs.mkdirp(outDir);
d(`downloading libcxx-objects-linux-${targetArch}`);
const objects = await downloadArtifact({
version: electronVersion,
platform: 'linux',
artifactName: 'libcxx-objects',
arch: targetArch,
});
d(`unpacking libcxx-objects from ${objects}`);
await extract(objects, { dir: outDir });
}
async function main(): Promise<void> {
const libcxxObjectsDirPath = process.env['VSCODE_LIBCXX_OBJECTS_DIR'];
const libcxxHeadersDownloadDir = process.env['VSCODE_LIBCXX_HEADERS_DIR'];
const libcxxabiHeadersDownloadDir = process.env['VSCODE_LIBCXXABI_HEADERS_DIR'];
const arch = process.env['VSCODE_ARCH'];
const electronVersion = packageJSON.devDependencies.electron;
if (!libcxxObjectsDirPath || !libcxxHeadersDownloadDir || !libcxxabiHeadersDownloadDir) {
throw new Error('Required build env not set');
}
await downloadLibcxxObjects(libcxxObjectsDirPath, electronVersion, arch);
await downloadLibcxxHeaders(libcxxHeadersDownloadDir, electronVersion, 'libcxx');
await downloadLibcxxHeaders(libcxxabiHeadersDownloadDir, electronVersion, 'libcxxabi');
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -5,6 +5,10 @@
declare let MonacoEnvironment: monaco.Environment | undefined;
interface Window {
MonacoEnvironment?: monaco.Environment | undefined;
}
declare namespace monaco {
export type Thenable<T> = PromiseLike<T>;

View File

@@ -33,7 +33,6 @@ module.exports = {
stats: {
all: false,
modules: true,
maxModules: 0,
errors: true,
warnings: true,
// our additional options

Some files were not shown because too many files have changed in this diff Show More