VS Code merge to df8fe74bd55313de0dd2303bc47a4aab0ca56b0e (#17979)

* Merge from vscode 504f934659740e9d41501cad9f162b54d7745ad9

* delete unused folders

* distro

* Bump build node version

* update chokidar

* FIx hygiene errors

* distro

* Fix extension lint issues

* Remove strict-vscode

* Add copyright header exemptions

* Bump vscode-extension-telemetry to fix webpacking issue with zone.js

* distro

* Fix failing tests (revert marked.js back to current one until we decide to update)

* Skip searchmodel test

* Fix mac build

* temp debug script loading

* Try disabling coverage

* log error too

* Revert "log error too"

This reverts commit af0183e5d4ab458fdf44b88fbfab9908d090526f.

* Revert "temp debug script loading"

This reverts commit 3d687d541c76db2c5b55626c78ae448d3c25089c.

* Add comments explaining coverage disabling

* Fix ansi_up loading issue

* Merge latest from ads

* Use newer option

* Fix compile

* add debug logging warn

* Always log stack

* log more

* undo debug

* Update to use correct base path (+cleanup)

* distro

* fix compile errors

* Remove strict-vscode

* Fix sql editors not showing

* Show db dropdown input & fix styling

* Fix more info in gallery

* Fix gallery asset requests

* Delete unused workflow

* Fix tapable resolutions for smoke test compile error

* Fix smoke compile

* Disable crash reporting

* Disable interactive

Co-authored-by: ADS Merger <karlb@microsoft.com>
This commit is contained in:
Charles Gagnon
2022-01-06 09:06:56 -08:00
committed by GitHub
parent fd2736b6a6
commit 2bc6a0cd01
2099 changed files with 79520 additions and 43813 deletions

View File

@@ -30,7 +30,11 @@
], ],
// Optionally loads a cached yarn install for the repo // Optionally loads a cached yarn install for the repo
"postCreateCommand": ".devcontainer/cache/restore-diff.sh", "postCreateCommand": ".devcontainer/cache/restore-diff.sh && sudo chown node:node /workspaces",
"remoteUser": "node" "remoteUser": "node",
"hostRequirements": {
"memory": "6gb"
}
} }

View File

@@ -18,3 +18,8 @@
**/extensions/markdown-language-features/notebook-out/** **/extensions/markdown-language-features/notebook-out/**
**/extensions/typescript-basics/test/colorize-fixtures/** **/extensions/typescript-basics/test/colorize-fixtures/**
**/extensions/**/dist/** **/extensions/**/dist/**
**/extensions/typescript-language-features/test-workspace/**
# These files are not linted by `yarn eslint`, so we exclude them from being linted in the editor.
# This ensures that if we add new rules and they pass CI, the are also no errors in the editor.
/resources/web/code-web.js

View File

@@ -7,7 +7,8 @@
}, },
"plugins": [ "plugins": [
"@typescript-eslint", "@typescript-eslint",
"jsdoc" "jsdoc",
"header"
], ],
"rules": { "rules": {
"constructor-super": "warn", "constructor-super": "warn",
@@ -133,7 +134,7 @@
"restrictions": [ "restrictions": [
"vs/nls", "vs/nls",
"**/{vs,sql}/base/{common,node}/**", "**/{vs,sql}/base/{common,node}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -175,7 +176,7 @@
"vs/nls", "vs/nls",
"**/{vs,sql}/base/{common,node}/**", "**/{vs,sql}/base/{common,node}/**",
"**/{vs,sql}/base/parts/*/{common,node}/**", "**/{vs,sql}/base/parts/*/{common,node}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -194,7 +195,7 @@
"vs/css!./**/*", "vs/css!./**/*",
"**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -203,7 +204,7 @@
"vs/nls", "vs/nls",
"**/{vs,sql}/base/{common,node,electron-main}/**", "**/{vs,sql}/base/{common,node,electron-main}/**",
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**", "**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -251,7 +252,7 @@
"**/{vs,sql}/base/{common,node}/**", "**/{vs,sql}/base/{common,node}/**",
"**/{vs,sql}/base/parts/*/{common,node}/**", "**/{vs,sql}/base/parts/*/{common,node}/**",
"**/{vs,sql}/platform/*/{common,node}/**", "**/{vs,sql}/platform/*/{common,node}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -273,7 +274,7 @@
"**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/platform/*/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/platform/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -284,8 +285,7 @@
"**/{vs,sql}/base/{common,node,electron-main}/**", "**/{vs,sql}/base/{common,node,electron-main}/**",
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**", "**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
"**/{vs,sql}/platform/*/{common,node,electron-main}/**", "**/{vs,sql}/platform/*/{common,node,electron-main}/**",
"**/{vs,sql}/code/**", "@vscode/*", "*" // node modules
"*" // node modules
] ]
}, },
{ {
@@ -520,7 +520,7 @@
"**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/services/*/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/workbench/services/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -535,7 +535,7 @@
"vs/workbench/contrib/files/browser/editors/fileEditorInput", "vs/workbench/contrib/files/browser/editors/fileEditorInput",
"**/{vs,sql}/workbench/services/**", "**/{vs,sql}/workbench/services/**",
"**/{vs,sql}/workbench/test/**", "**/{vs,sql}/workbench/test/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -589,10 +589,12 @@
"vscode-oniguruma", "vscode-oniguruma",
"iconv-lite-umd", "iconv-lite-umd",
"jschardet", "jschardet",
"@vscode/vscode-languagedetection",
"@angular/*", "@angular/*",
"rxjs/**", "rxjs/**",
"sanitize-html", "sanitize-html",
"ansi_up" "ansi_up",
"@microsoft/applicationinsights-web"
] ]
}, },
{ {
@@ -605,7 +607,7 @@
"**/{vs,sql}/workbench/{common,node}/**", "**/{vs,sql}/workbench/{common,node}/**",
"**/{vs,sql}/workbench/api/{common,node}/**", "**/{vs,sql}/workbench/api/{common,node}/**",
"**/{vs,sql}/workbench/services/**/{common,node}/**", "**/{vs,sql}/workbench/services/**/{common,node}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -636,7 +638,7 @@
"**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -774,7 +776,7 @@
"**/{vs,sql}/workbench/api/{common,node}/**", "**/{vs,sql}/workbench/api/{common,node}/**",
"**/{vs,sql}/workbench/services/**/{common,node}/**", "**/{vs,sql}/workbench/services/**/{common,node}/**",
"**/{vs,sql}/workbench/contrib/**/{common,node}/**", "**/{vs,sql}/workbench/contrib/**/{common,node}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -807,7 +809,7 @@
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/contrib/**/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/workbench/contrib/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -830,7 +832,7 @@
"**/{vs,sql}/base/parts/**/{common,node}/**", "**/{vs,sql}/base/parts/**/{common,node}/**",
"**/{vs,sql}/platform/**/{common,node}/**", "**/{vs,sql}/platform/**/{common,node}/**",
"**/{vs,sql}/code/**/{common,node}/**", "**/{vs,sql}/code/**/{common,node}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -842,7 +844,7 @@
"**/{vs,sql}/base/parts/**/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/base/parts/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/platform/**/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/platform/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/code/**/{common,browser,node,electron-sandbox,electron-browser}/**", "**/{vs,sql}/code/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -853,7 +855,7 @@
"**/{vs,sql}/base/parts/**/{common,node,electron-main}/**", "**/{vs,sql}/base/parts/**/{common,node,electron-main}/**",
"**/{vs,sql}/platform/**/{common,node,electron-main}/**", "**/{vs,sql}/platform/**/{common,node,electron-main}/**",
"**/{vs,sql}/code/**/{common,node,electron-main}/**", "**/{vs,sql}/code/**/{common,node,electron-main}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -865,8 +867,7 @@
"**/{vs,sql}/platform/**/{common,node}/**", "**/{vs,sql}/platform/**/{common,node}/**",
"**/{vs,sql}/workbench/**/{common,node}/**", "**/{vs,sql}/workbench/**/{common,node}/**",
"**/{vs,sql}/server/**", "**/{vs,sql}/server/**",
"**/{vs,sql}/code/**/{common,node}/**", "@vscode/*", "*" // node modules
"*" // node modules
] ]
}, },
{ {
@@ -937,28 +938,28 @@
"target": "**/test/smoke/**", "target": "**/test/smoke/**",
"restrictions": [ "restrictions": [
"**/test/smoke/**", "**/test/smoke/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
"target": "**/test/automation/**", "target": "**/test/automation/**",
"restrictions": [ "restrictions": [
"**/test/automation/**", "**/test/automation/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
"target": "**/test/integration/**", "target": "**/test/integration/**",
"restrictions": [ "restrictions": [
"**/test/integration/**", "**/test/integration/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
"target": "**/test/monaco/**", "target": "**/test/monaco/**",
"restrictions": [ "restrictions": [
"**/test/monaco/**", "**/test/monaco/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -978,7 +979,7 @@
"target": "**/{node,electron-browser,electron-main}/**/*.test.ts", "target": "**/{node,electron-browser,electron-main}/**/*.test.ts",
"restrictions": [ "restrictions": [
"**/{vs,sql}/**", "**/{vs,sql}/**",
"*", // node modules "@vscode/*", "*", // node modules
"@angular/*" // {{SQL CARBON EDIT}} "@angular/*" // {{SQL CARBON EDIT}}
] ]
}, },
@@ -986,14 +987,14 @@
"target": "**/{node,electron-browser,electron-main}/**/test/**", "target": "**/{node,electron-browser,electron-main}/**/test/**",
"restrictions": [ "restrictions": [
"**/{vs,sql}/**", "**/{vs,sql}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
"target": "**/test/{node,electron-browser,electron-main}/**", "target": "**/test/{node,electron-browser,electron-main}/**",
"restrictions": [ "restrictions": [
"**/{vs,sql}/**", "**/{vs,sql}/**",
"*" // node modules "@vscode/*", "*" // node modules
] ]
}, },
{ {
@@ -1021,6 +1022,16 @@
"xterm*" "xterm*"
] ]
} }
],
"header/header": [
2,
"block",
[
"---------------------------------------------------------------------------------------------",
" * Copyright (c) Microsoft Corporation. All rights reserved.",
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
" *--------------------------------------------------------------------------------------------"
]
] ]
}, },
"overrides": [ "overrides": [
@@ -1109,6 +1120,46 @@
} }
] ]
} }
},
{
"files": [
"src/{vs,sql}/server/*",
// {{SQL CARBON EDIT}} Ignore our own that don't use our copyright
"extensions/azuremonitor/src/prompts/**",
"extensions/azuremonitor/src/typings/findRemove.d.ts",
"extensions/kusto/src/prompts/**",
"extensions/mssql/src/hdfs/webhdfs.ts",
"extensions/mssql/src/prompts/**",
"extensions/mssql/src/typings/bufferStreamReader.d.ts",
"extensions/mssql/src/typings/findRemove.d.ts",
"extensions/notebook/resources/jupyter_config/**",
"extensions/notebook/src/intellisense/text.ts",
"extensions/notebook/src/prompts/**",
"extensions/resource-deployment/src/typings/linuxReleaseInfo.d.ts",
"src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts",
"src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts",
"src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts",
"src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts",
"src/sql/base/browser/ui/table/plugins/rowDetailView.ts",
"src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts",
"src/sql/workbench/services/notebook/browser/outputs/factories.ts",
"src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts",
"src/sql/workbench/services/notebook/browser/outputs/registry.ts",
"src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts",
"src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts",
"src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts",
"src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts",
"src/sql/workbench/services/notebook/common/nbformat.ts",
"src/sql/workbench/services/notebook/browser/outputs/renderers.ts",
"src/sql/workbench/services/notebook/browser/outputs/tableRenderers.ts"
],
"rules": {
"header/header": [
// hygiene.js still checks that all files (even those in this directory) are MIT licensed.
"off"
]
}
} }
] ]
} }

21
.git-blame-ignore Normal file
View File

@@ -0,0 +1,21 @@
# https://git-scm.com/docs/git-blame#Documentation/git-blame.txt---ignore-revs-fileltfilegt
# mjbvz: Fix spacing
13f4f052582bcec3d6c6c6a70d995c9dee2cac13
# mjbvz: Add script to run build with noImplicitOverride
ae1452eea678f5266ef513f22dacebb90955d6c9
# alexdima: Revert "bump version"
537ba0ef1791c090bb18bc68d727816c0451c117
# alexdima: bump version
387a0dcb82df729e316ca2518a9ed81a75482b18
# joaomoreno: add ghooks dev dependency
0dfc06e0f9de5925de792cdf9f0e6597bb25908f
# mjbvz: organize imports
494cbbd02d67e87727ec885f98d19551aa33aad1
a3cb14be7f2cceadb17adf843675b1a59537dbbd
ee1655a82ebdfd38bf8792088a6602c69f7bbd94

3
.gitattributes vendored
View File

@@ -7,4 +7,5 @@ ThirdPartyNotices.txt eol=crlf
*.cmd eol=crlf *.cmd eol=crlf
*.ps1 eol=lf *.ps1 eol=lf
*.sh eol=lf *.sh eol=lf
*.rtf -text *.rtf -text
*.json linguist-language=jsonc

View File

@@ -5,6 +5,8 @@
"greazer", "greazer",
"donjayamanne", "donjayamanne",
"jilljac", "jilljac",
"IanMatthewHuff" "IanMatthewHuff",
"tanhakabir",
"dynamicwebpaige"
] ]
} }

View File

@@ -12,73 +12,73 @@ on:
jobs: jobs:
windows: windows:
name: Windows name: Windows
runs-on: windows-latest runs-on: windows-latest
timeout-minutes: 30 timeout-minutes: 30
env: env:
CHILD_CONCURRENCY: "1" CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps: steps:
- uses: actions/checkout@v2.2.0 - uses: actions/checkout@v2
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: 12 node-version: 14
- uses: actions/setup-python@v2 - uses: actions/setup-python@v2
with: with:
python-version: "2.x" python-version: "2.x"
# {{SQL CARBON EDIT}} Skip caching for now # {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key # - name: Compute node modules cache key
# id: nodeModulesCacheKey # id: nodeModulesCacheKey
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)" # run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
# - name: Cache node_modules archive # - name: Cache node_modules archive
# id: cacheNodeModules # id: cacheNodeModules
# uses: actions/cache@v2 # uses: actions/cache@v2
# with: # with:
# path: ".build/node_modules_cache" # path: ".build/node_modules_cache"
# key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}" # key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
# - name: Extract node_modules archive # - name: Extract node_modules archive
# if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }} # if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
# run: 7z.exe x .build/node_modules_cache/cache.7z -aos # run: 7z.exe x .build/node_modules_cache/cache.7z -aos
# - name: Get yarn cache directory path # - name: Get yarn cache directory path
# id: yarnCacheDirPath # id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} # if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: echo "::set-output name=dir::$(yarn cache dir)" # run: echo "::set-output name=dir::$(yarn cache dir)"
# - name: Cache yarn directory # - name: Cache yarn directory
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} # if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# uses: actions/cache@v2 # uses: actions/cache@v2
# with: # with:
# path: ${{ steps.yarnCacheDirPath.outputs.dir }} # path: ${{ steps.yarnCacheDirPath.outputs.dir }}
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }} # key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir- # restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn - name: Execute yarn
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now # if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now
env: env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1 ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000 run: yarn --frozen-lockfile --network-timeout 180000
# - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now # - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} # if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: | # run: |
# mkdir -Force .build # mkdir -Force .build
# node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt # node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
# mkdir -Force .build/node_modules_cache # mkdir -Force .build/node_modules_cache
# 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt # 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
- name: Compile and Download - name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
- name: Run Unit Tests (Electron) - name: Run Unit Tests (Electron)
run: .\scripts\test.bat run: .\scripts\test.bat
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now # - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now
# run: yarn test-browser --browser chromium # run: yarn test-browser --browser chromium
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} disable for now # - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} disable for now
# run: .\scripts\test-integration.bat # run: .\scripts\test-integration.bat
linux: linux:
name: Linux name: Linux
@@ -101,7 +101,7 @@ jobs:
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: 12 node-version: 14
# {{SQL CARBON EDIT}} Skip caching for now # {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key # - name: Compute node modules cache key
# id: nodeModulesCacheKey # id: nodeModulesCacheKey
@@ -111,8 +111,8 @@ jobs:
# uses: actions/cache@v2 # uses: actions/cache@v2
# with: # with:
# path: "**/node_modules" # path: "**/node_modules"
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }} # key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules13- # restore-keys: ${{ runner.os }}-cacheNodeModules14-
# - name: Get yarn cache directory path # - name: Get yarn cache directory path
# id: yarnCacheDirPath # id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} # if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -136,7 +136,7 @@ jobs:
- name: Run Unit Tests (Electron) - name: Run Unit Tests (Electron)
id: electron-unit-tests id: electron-unit-tests
run: DISPLAY=:10 ./scripts/test.sh --coverage --runGlob "**/sql/**/*.test.js" # {{SQL CARBON EDIT}} Run only our tests with coverage run: DISPLAY=:10 ./scripts/test.sh --runGlob "**/sql/**/*.test.js" # {{SQL CARBON EDIT}} Run only our tests with coverage. Disable for now since it's currently broken --coverage
- name: Run Extension Unit Tests (Electron) - name: Run Extension Unit Tests (Electron)
id: electron-extension-unit-tests id: electron-extension-unit-tests
@@ -170,7 +170,7 @@ jobs:
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: 12 node-version: 14
# {{SQL CARBON EDIT}} Skip caching for now # {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key # - name: Compute node modules cache key
@@ -181,8 +181,8 @@ jobs:
# uses: actions/cache@v2 # uses: actions/cache@v2
# with: # with:
# path: "**/node_modules" # path: "**/node_modules"
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }} # key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules13- # restore-keys: ${{ runner.os }}-cacheNodeModules14-
# - name: Get yarn cache directory path # - name: Get yarn cache directory path
# id: yarnCacheDirPath # id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} # if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -195,7 +195,7 @@ jobs:
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }} # key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir- # restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn - name: Execute yarn
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} # if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skip caching for now
env: env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1 ELECTRON_SKIP_BINARY_DOWNLOAD: 1
@@ -232,7 +232,7 @@ jobs:
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: 12 node-version: 14
- name: Compute node modules cache key - name: Compute node modules cache key
id: nodeModulesCacheKey id: nodeModulesCacheKey
@@ -242,7 +242,8 @@ jobs:
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
path: "**/node_modules" path: "**/node_modules"
key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }} key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-cacheNodeModules14-
- name: Get yarn cache directory path - name: Get yarn cache directory path
id: yarnCacheDirPath id: yarnCacheDirPath
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -272,15 +273,23 @@ jobs:
- name: Run Valid Layers Checks - name: Run Valid Layers Checks
run: yarn valid-layers-check run: yarn valid-layers-check
- name: Run Strict Compile Options # {{SQL CARBON EDIT}} add step
run: yarn strict-vscode
# - name: Run Monaco Editor Checks {{SQL CARBON EDIT}} Remove Monaco checks # - name: Run Monaco Editor Checks {{SQL CARBON EDIT}} Remove Monaco checks
# run: yarn monaco-compile-check # run: yarn monaco-compile-check
- name: Compile /build/ - name: Compile /build/
run: yarn --cwd build compile run: yarn --cwd build compile
- name: Run eslint
run: yarn eslint
# {{SQL CARBON EDIT}} Don't need this
# - name: Run Monaco Editor Checks
# run: yarn monaco-compile-check
# {{SQL CARBON EDIT}} Don't need this
# - name: Run vscode-dts Compile Checks
# run: yarn vscode-dts-compile-check
- name: Run Trusted Types Checks - name: Run Trusted Types Checks
run: yarn tsec-compile-check run: yarn tsec-compile-check

View File

@@ -0,0 +1,28 @@
name: Create Prebuild
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *'
jobs:
createPrebuild:
runs-on: ubuntu-latest
steps:
- id: create-prebuild-production
run: |
$splat = @{
ErrorAction = 'Stop'
Uri = 'https://api.github.com/vscs_internal/user/vscode-prebuilds-bot/codespaces/prebuild'
Method = 'POST'
Headers = @{
'Content-Type' = 'application/json; charset=utf-8'
'Authorization' = 'token ${{ secrets.CODESPACES_PREBUILD_PAT }}'
}
Body = @{
ref = 'main'
repository_id = 41881900
location = 'WestUs2'
} | ConvertTo-Json
}
Invoke-RestMethod @splat
shell: pwsh

3
.gitignore vendored
View File

@@ -15,3 +15,6 @@ test_data/
test-results/ test-results/
yarn-error.log yarn-error.log
*.vsix *.vsix
vscode.lsif
vscode.db
/.profile-oss

6
.lsifrc.json Normal file
View File

@@ -0,0 +1,6 @@
{
"project": "src/tsconfig.json",
"source": "./package.json",
"package": "package.json",
"out": "vscode.lsif"
}

2
.vscode/launch.json vendored
View File

@@ -48,7 +48,7 @@
"cascadeTerminateToConfigurations": [ "cascadeTerminateToConfigurations": [
"Attach to Extension Host" "Attach to Extension Host"
], ],
"userDataDir": false, "userDataDir": "${workspaceFolder}/.profile-oss",
"pauseForSourceMap": false, "pauseForSourceMap": false,
"outFiles": [ "outFiles": [
"${workspaceFolder}/out/**/*.js" "${workspaceFolder}/out/**/*.js"

View File

@@ -7,7 +7,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"June 2021\"" "value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"August 2021\""
}, },
{ {
"kind": 1, "kind": 1,

View File

@@ -7,7 +7,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\"" "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"July 2021\""
}, },
{ {
"kind": 1, "kind": 1,

View File

@@ -7,7 +7,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\"\n\n$MINE=assignee:@me" "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub repo:microsoft/vscode-emmet-helper\n\n$MILESTONE=milestone:\"July 2021\"\n\n$MINE=assignee:@me"
}, },
{ {
"kind": 1, "kind": 1,
@@ -62,7 +62,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed" "value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed -label:verified"
}, },
{ {
"kind": 1, "kind": 1,

View File

@@ -7,7 +7,7 @@
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"June 2021\"" "value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"August 2021\""
}, },
{ {
"kind": 1, "kind": 1,

View File

@@ -2,55 +2,46 @@
{ {
"kind": 1, "kind": 1,
"language": "markdown", "language": "markdown",
"value": "### Bug Verification Queries\n\nBefore shipping we want to verify _all_ bugs. That means when a bug is fixed we check that the fix actually works. It's always best to start with bugs that you have filed and the proceed with bugs that have been filed from users outside the development team. ", "value": "### Bug Verification Queries\n\nBefore shipping we want to verify _all_ bugs. That means when a bug is fixed we check that the fix actually works. It's always best to start with bugs that you have filed and the proceed with bugs that have been filed from users outside the development team. "
"editable": true
}, },
{ {
"kind": 1, "kind": 1,
"language": "markdown", "language": "markdown",
"value": "#### Config: update list of `repos` and the `milestone`", "value": "#### Config: update list of `repos` and the `milestone`"
"editable": true
}, },
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"March 2021\"", "value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-jupyter repo:microsoft/vscode-python\r\n$milestone=milestone:\"July 2021\""
"editable": true
}, },
{ {
"kind": 1, "kind": 1,
"language": "markdown", "language": "markdown",
"value": "### Bugs You Filed", "value": "### Bugs You Filed"
"editable": true
}, },
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate author:@me", "value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate author:@me"
"editable": false
}, },
{ {
"kind": 1, "kind": 1,
"language": "markdown", "language": "markdown",
"value": "### Bugs From Outside", "value": "### Bugs From Outside"
"editable": true
}, },
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh", "value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh -author:hediet -author:joyceerhl -author:rchiodo -author:IanMatthewHuff"
"editable": false
}, },
{ {
"kind": 1, "kind": 1,
"language": "markdown", "language": "markdown",
"value": "### All", "value": "### All"
"editable": true
}, },
{ {
"kind": 2, "kind": 2,
"language": "github-issues", "language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate", "value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate"
"editable": false
} }
] ]

View File

@@ -4,6 +4,7 @@
"files.exclude": { "files.exclude": {
".git": true, ".git": true,
".build": true, ".build": true,
".profile-oss": true,
"**/.DS_Store": true, "**/.DS_Store": true,
"build/**/*.js": { "build/**/*.js": {
"when": "$(basename).ts" "when": "$(basename).ts"

14
.vscode/tasks.json vendored
View File

@@ -100,20 +100,6 @@
"group": "build", "group": "build",
"problemMatcher": [] "problemMatcher": []
}, },
{
"type": "npm",
"script": "strict-vscode-watch",
"label": "TS - Strict VSCode",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-vscode",
"applyTo": "allDocuments"
}
},
{ {
"type": "npm", "type": "npm",
"script": "watch-webd", "script": "watch-webd",

View File

@@ -1,3 +1,3 @@
disturl "https://electronjs.org/headers" disturl "https://electronjs.org/headers"
target "12.0.9" target "13.1.8"
runtime "electron" runtime "electron"

View File

@@ -8,149 +8,150 @@ The original copyright notices and the licenses under which Microsoft received
such components are set forth below. Microsoft reserves all rights not such components are set forth below. Microsoft reserves all rights not
expressly granted herein, whether by implication, estoppel or otherwise. expressly granted herein, whether by implication, estoppel or otherwise.
angular: https://github.com/angular/angular angular: https://github.com/angular/angular
angular2-grid: https://github.com/BTMorton/angular2-grid angular2-grid: https://github.com/BTMorton/angular2-grid
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
axios: https://github.com/axios/axios axios: https://github.com/axios/axios
bootstrap: https://github.com/twbs/bootstrap bootstrap: https://github.com/twbs/bootstrap
chart.js: https://github.com/Timer/chartjs chart.js: https://github.com/Timer/chartjs
chokidar: https://github.com/paulmillr/chokidar chokidar: https://github.com/paulmillr/chokidar
comment-json: https://github.com/kaelzhang/node-comment-json comment-json: https://github.com/kaelzhang/node-comment-json
core-js: https://github.com/zloirock/core-js core-js: https://github.com/zloirock/core-js
decompress: https://github.com/kevva/decompress decompress: https://github.com/kevva/decompress
emmet: https://github.com/emmetio/emmet emmet: https://github.com/emmetio/emmet
error-ex: https://github.com/Qix-/node-error-ex error-ex: https://github.com/Qix-/node-error-ex
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
fast-plist: https://github.com/Microsoft/node-fast-plist fast-plist: https://github.com/Microsoft/node-fast-plist
figures: https://github.com/sindresorhus/figures figures: https://github.com/sindresorhus/figures
find-remove: https://www.npmjs.com/package/find-remove find-remove: https://www.npmjs.com/package/find-remove
fs-extra: https://github.com/jprichardson/node-fs-extra fs-extra: https://github.com/jprichardson/node-fs-extra
gc-signals: https://github.com/Microsoft/node-gc-signals gc-signals: https://github.com/Microsoft/node-gc-signals
getmac: https://github.com/bevry/getmac getmac: https://github.com/bevry/getmac
graceful-fs: https://github.com/isaacs/node-graceful-fs graceful-fs: https://github.com/isaacs/node-graceful-fs
gridstack: https://github.com/gridstack/gridstack.js gridstack: https://github.com/gridstack/gridstack.js
html-query-plan: https://github.com/JustinPealing/html-query-plan html-query-plan: https://github.com/JustinPealing/html-query-plan
http-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent http-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
https-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent https-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
iconv-lite: https://github.com/ashtuchkin/iconv-lite iconv-lite: https://github.com/ashtuchkin/iconv-lite
jquery: https://github.com/jquery/jquery jquery: https://github.com/jquery/jquery
jquery-ui: https://github.com/jquery/jquery-ui jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab JupyterLab: https://github.com/jupyterlab/jupyterlab
keytar: https://github.com/atom/node-keytar keytar: https://github.com/atom/node-keytar
make-error: https://github.com/JsCommunity/make-error make-error: https://github.com/JsCommunity/make-error
mark.js: https://github.com/julmot/mark.js mark.js: https://github.com/julmot/mark.js
minimist: https://github.com/substack/minimist minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment moment: https://github.com/moment/moment
native-keymap: https://github.com/Microsoft/node-native-keymap native-keymap: https://github.com/Microsoft/node-native-keymap
native-watchdog: https://github.com/Microsoft/node-native-watchdog native-watchdog: https://github.com/Microsoft/node-native-watchdog
ng2-charts: https://github.com/valor-software/ng2-charts ng2-charts: https://github.com/valor-software/ng2-charts
node-fetch: https://github.com/bitinn/node-fetch node-fetch: https://github.com/bitinn/node-fetch
node-pty: https://github.com/Tyriar/node-pty node-pty: https://github.com/Tyriar/node-pty
nsfw: https://github.com/Axosoft/nsfw nsfw: https://github.com/Axosoft/nsfw
optimist: https://github.com/substack/node-optimist optimist: https://github.com/substack/node-optimist
primeng: https://github.com/primefaces/primeng primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js pty.js: https://github.com/chjj/pty.js
pyzmq: https://github.com/zeromq/pyzmq pyzmq: https://github.com/zeromq/pyzmq
qs: https://github.com/ljharb/qs qs: https://github.com/ljharb/qs
reflect-metadata: https://github.com/rbuckton/reflect-metadata reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS rxjs: https://github.com/ReactiveX/RxJS
semver: https://github.com/npm/node-semver semver: https://github.com/npm/node-semver
slickgrid: https://github.com/6pac/SlickGrid slickgrid: https://github.com/6pac/SlickGrid
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
svg.js: https://github.com/svgdotjs/svg.js svg.js: https://github.com/svgdotjs/svg.js
systemjs: https://github.com/systemjs/systemjs systemjs: https://github.com/systemjs/systemjs
temp-write: https://github.com/sindresorhus/temp-write temp-write: https://github.com/sindresorhus/temp-write
turndown: https://github.com/domchristie/turndown turndown: https://github.com/domchristie/turndown
turndown-plugin-gfm: https://github.com/domchristie/turndown-plugin-gfm turndown-plugin-gfm: https://github.com/domchristie/turndown-plugin-gfm
underscore: https://github.com/jashkenas/underscore underscore: https://github.com/jashkenas/underscore
v8-profiler: https://github.com/node-inspector/v8-profiler v8-profiler: https://github.com/node-inspector/v8-profiler
vscode: https://github.com/microsoft/vscode vscode: https://github.com/microsoft/vscode
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
vscode-nls: https://github.com/Microsoft/vscode-nls vscode-nls: https://github.com/Microsoft/vscode-nls
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
vscode-textmate: https://github.com/Microsoft/vscode-textmate vscode-textmate: https://github.com/Microsoft/vscode-textmate
winreg: https://github.com/fresc81/node-winreg winreg: https://github.com/fresc81/node-winreg
xmldom: https://github.com/xmldom/xmldom xmldom: https://github.com/xmldom/xmldom
xml-formatter: https://github.com/chrisbottin/xml-formatter xml-formatter: https://github.com/chrisbottin/xml-formatter
xterm: https://github.com/sourcelair/xterm.js xterm: https://github.com/sourcelair/xterm.js
yargs: https://github.com/yargs/yargs yargs: https://github.com/yargs/yargs
yauzl: https://github.com/thejoshwolfe/yauzl yauzl: https://github.com/thejoshwolfe/yauzl
zone.js: https://www.npmjs.com/package/zone zone.js: https://www.npmjs.com/package/zone
Microsoft PROSE SDK: https://microsoft.github.io/prose Microsoft PROSE SDK: https://microsoft.github.io/prose
atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure) 1. atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure)
atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script) 2. atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script)
atom/language-css version 0.44.4 (https://github.com/atom/language-css) 3. atom/language-css version 0.44.6 (https://github.com/atom/language-css)
atom/language-java version 0.32.1 (https://github.com/atom/language-java) 4. atom/language-java version 0.32.1 (https://github.com/atom/language-java)
atom/language-sass version 0.62.1 (https://github.com/atom/language-sass) 5. atom/language-sass version 0.62.1 (https://github.com/atom/language-sass)
atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript) 6. atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
atom/language-xml version 0.35.2 (https://github.com/atom/language-xml) 7. atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
better-go-syntax version 1.0.0 (https://github.com/jeff-hykin/better-go-syntax/ ) 8. better-go-syntax version 1.0.0 (https://github.com/jeff-hykin/better-go-syntax/ )
Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes) 9. Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars) 10. daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars)
dart-lang/dart-syntax-highlight (https://github.com/dart-lang/dart-syntax-highlight) 11. dart-lang/dart-syntax-highlight (https://github.com/dart-lang/dart-syntax-highlight)
davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle) 12. davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle)
definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped) 13. definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml) 14. demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml)
Document Object Model version 4.0.0 (https://www.w3.org/DOM/) 15. Document Object Model version 4.0.0 (https://www.w3.org/DOM/)
dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage) 16. dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage)
expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation) 17. expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation)
fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle) 18. fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle)
freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift) 19. freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/) 20. HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
Ikuyadeu/vscode-R version 1.3.0 (https://github.com/Ikuyadeu/vscode-R) 21. Ikuyadeu/vscode-R version 2.0.0 (https://github.com/Ikuyadeu/vscode-R)
insane version 2.6.2 (https://github.com/bevacqua/insane) 22. insane version 2.6.2 (https://github.com/bevacqua/insane)
Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site) 23. Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site)
ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar) 24. ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar) 25. James-Yu/LaTeX-Workshop version 8.19.1 (https://github.com/James-Yu/LaTeX-Workshop)
jeff-hykin/cpp-textmate-grammar version 1.15.5 (https://github.com/jeff-hykin/cpp-textmate-grammar) 26. jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar)
js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify) 27. jeff-hykin/cpp-textmate-grammar version 1.15.5 (https://github.com/jeff-hykin/cpp-textmate-grammar)
JuliaEditorSupport/atom-language-julia version 0.21.0 (https://github.com/JuliaEditorSupport/atom-language-julia) 28. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
Jxck/assert version 1.0.0 (https://github.com/Jxck/assert) 29. JuliaEditorSupport/atom-language-julia version 0.21.1 (https://github.com/JuliaEditorSupport/atom-language-julia)
language-docker (https://github.com/moby/moby) 30. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
language-less version 0.34.2 (https://github.com/atom/language-less) 31. language-docker (https://github.com/moby/moby)
language-php version 0.46.2 (https://github.com/atom/language-php) 32. language-less version 0.34.2 (https://github.com/atom/language-less)
MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython) 33. language-php version 0.46.2 (https://github.com/atom/language-php)
marked version 1.1.0 (https://github.com/markedjs/marked) 34. MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
mdn-data version 1.1.12 (https://github.com/mdn/data) 35. marked version 1.1.0 (https://github.com/markedjs/marked)
microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage) 36. mdn-data version 1.1.12 (https://github.com/mdn/data)
microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage) 37. microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage)
microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar) 38. microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage)
microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql) 39. microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar)
mmims/language-batchfile version 0.7.6 (https://github.com/mmims/language-batchfile) 40. microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql)
NVIDIA/cuda-cpp-grammar (https://github.com/NVIDIA/cuda-cpp-grammar) 41. mmims/language-batchfile version 0.7.6 (https://github.com/mmims/language-batchfile)
PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax) 42. NVIDIA/cuda-cpp-grammar (https://github.com/NVIDIA/cuda-cpp-grammar)
rust-syntax version 0.4.3 (https://github.com/dustypomerleau/rust-syntax) 43. PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui) 44. rust-syntax version 0.5.0 (https://github.com/dustypomerleau/rust-syntax)
shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage) 45. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle) 46. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
textmate/c.tmbundle (https://github.com/textmate/c.tmbundle) 47. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle) 48. textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
textmate/git.tmbundle (https://github.com/textmate/git.tmbundle) 49. textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle) 50. textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
textmate/html.tmbundle (https://github.com/textmate/html.tmbundle) 51. textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle) 52. textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle) 53. textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle) 54. textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle) 55. textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle) 56. textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle) 57. textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle) 58. textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
TypeScript-TmLanguage version 0.1.8 (https://github.com/microsoft/TypeScript-TmLanguage) 59. textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage) 60. TypeScript-TmLanguage version 0.1.8 (https://github.com/microsoft/TypeScript-TmLanguage)
Unicode version 12.0.0 (https://home.unicode.org/) 61. TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage)
vscode-codicons version 0.0.14 (https://github.com/microsoft/vscode-codicons) 62. Unicode version 12.0.0 (https://home.unicode.org/)
vscode-logfile-highlighter version 2.11.0 (https://github.com/emilast/vscode-logfile-highlighter) 63. vscode-codicons version 0.0.14 (https://github.com/microsoft/vscode-codicons)
vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift) 64. vscode-logfile-highlighter version 2.11.0 (https://github.com/emilast/vscode-logfile-highlighter)
Web Background Synchronization (https://github.com/WICG/background-sync) 65. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
66. Web Background Synchronization (https://github.com/WICG/background-sync)
%% atom/language-clojure NOTICES AND INFORMATION BEGIN HERE %% atom/language-clojure NOTICES AND INFORMATION BEGIN HERE
@@ -1465,7 +1466,7 @@ END OF make-error NOTICES AND INFORMATION
========================================= =========================================
The MIT License (MIT) The MIT License (MIT)
Copyright (c) 20142019 Julian Kühnel Copyright (c) 2021 REditorSupport
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

View File

@@ -12,14 +12,14 @@ fsevents/src/**
fsevents/test/** fsevents/test/**
!fsevents/**/*.node !fsevents/**/*.node
vscode-sqlite3/binding.gyp @vscode/sqlite3/binding.gyp
vscode-sqlite3/benchmark/** @vscode/sqlite3/benchmark/**
vscode-sqlite3/cloudformation/** @vscode/sqlite3/cloudformation/**
vscode-sqlite3/deps/** @vscode/sqlite3/deps/**
vscode-sqlite3/test/** @vscode/sqlite3/test/**
vscode-sqlite3/build/** @vscode/sqlite3/build/**
vscode-sqlite3/src/** @vscode/sqlite3/src/**
!vscode-sqlite3/build/Release/*.node !@vscode/sqlite3/build/Release/*.node
windows-mutex/binding.gyp windows-mutex/binding.gyp
windows-mutex/build/** windows-mutex/build/**

View File

@@ -29,3 +29,6 @@ xterm-addon-unicode11/out/**
xterm-addon-webgl/src/** xterm-addon-webgl/src/**
xterm-addon-webgl/out/** xterm-addon-webgl/out/**
# This makes sure the model is included in the package
!@vscode/vscode-languagedetection/model/**

View File

@@ -0,0 +1,21 @@
{
"codebaseName": "vscode-client",
"ppe": false,
"notificationAliases": [
"sbatten@microsoft.com"
],
"codebaseAdmins": [
"REDMOND\\stbatt",
"REDMOND\\monacotools",
],
"instanceUrl": "https://msazure.visualstudio.com/defaultcollection",
"projectName": "One",
"areaPath": "One\\VSCode\\Client",
"iterationPath": "One",
"notifyAlways": true,
"tools": [
"BinSkim",
"CredScan",
"CodeQL"
]
}

View File

@@ -160,7 +160,7 @@ async function main() {
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000; blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000; mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
console.log('Uploading blobs to Azure storage and Mooncake Azure storage...'); console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
await retry_1.retry(() => Promise.all([ await (0, retry_1.retry)(() => Promise.all([
uploadBlob(blobService, quality, blobName, filePath, fileName), uploadBlob(blobService, quality, blobName, filePath, fileName),
uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName) uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName)
])); ]));
@@ -185,7 +185,7 @@ async function main() {
console.log('Asset:', JSON.stringify(asset, null, ' ')); console.log('Asset:', JSON.stringify(asset, null, ' '));
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] }); const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts; const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true])); await (0, retry_1.retry)(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
console.log(` Done ✔️`); console.log(` Done ✔️`);
} }
main().then(() => { main().then(() => {

View File

@@ -40,7 +40,7 @@ async function main() {
}; };
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] }); const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts; const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })])); await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
} }
main().then(() => { main().then(() => {
console.log('Build successfully created'); console.log('Build successfully created');

View File

@@ -4,11 +4,9 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const retry_1 = require("./retry"); const retry_1 = require("./retry");
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer'); const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
const playwrightPath = path.dirname(require.resolve('playwright'));
async function install() { async function install() {
await retry_1.retry(() => installBrowsersWithProgressBar(playwrightPath)); await (0, retry_1.retry)(() => installBrowsersWithProgressBar());
} }
install(); install();

View File

@@ -1,71 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const azure = require("azure-storage");
const mime = require("mime");
const minimist = require("minimist");
const path_1 = require("path");
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService, container) {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService, container, blobName) {
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService, container, blobName, file) {
const blobOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit, files) {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = path_1.basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main() {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => path_1.join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -1,9 +0,0 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
# Publish webview contents
PACKAGEJSON="$REPO/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"

View File

@@ -1,87 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { basename, join } from 'path';
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
await new Promise<void>((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit: string, files: readonly string[]): Promise<void> {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main(): void {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -39,7 +39,7 @@ async function main() {
} }
console.log(`Releasing build ${commit}...`); console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts; const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit])); await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
} }
main().then(() => { main().then(() => {
console.log('Build successfully released'); console.log('Build successfully released');

View File

@@ -0,0 +1,17 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const sign_1 = require("./sign");
const path = require("path");
(0, sign_1.main)([
process.env['EsrpCliDllPath'],
'windows',
process.env['ESRPPKI'],
process.env['ESRPAADUsername'],
process.env['ESRPAADPassword'],
path.dirname(process.argv[2]),
path.basename(process.argv[2])
]);

View File

@@ -0,0 +1,17 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { main } from './sign';
import * as path from 'path';
main([
process.env['EsrpCliDllPath']!,
'windows',
process.env['ESRPPKI']!,
process.env['ESRPAADUsername']!,
process.env['ESRPAADPassword']!,
path.dirname(process.argv[2]),
path.basename(process.argv[2])
]);

View File

@@ -0,0 +1,77 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.main = void 0;
const cp = require("child_process");
const fs = require("fs");
const tmp = require("tmp");
const crypto = require("crypto");
function getParams(type) {
switch (type) {
case 'windows':
return '[{"keyCode":"CP-230012","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"Append","parameterValue":"/as"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-230012","operationSetCode":"SigntoolVerify","parameters":[{"parameterName":"VerifyAll","parameterValue":"/all"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'rpm':
return '[{ "keyCode": "CP-450779-Pgp", "operationSetCode": "LinuxSign", "parameters": [], "toolName": "sign", "toolVersion": "1.0" }]';
case 'darwin-sign':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppDeveloperSign","parameters":[{"parameterName":"Hardening","parameterValue":"--options=runtime"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'darwin-notarize':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppNotarize","parameters":[{"parameterName":"BundleId","parameterValue":"$(BundleIdentifier)"}],"toolName":"sign","toolVersion":"1.0"}]';
default:
throw new Error(`Sign type ${type} not found`);
}
}
function main([esrpCliPath, type, cert, username, password, folderPath, pattern]) {
tmp.setGracefulCleanup();
const patternPath = tmp.tmpNameSync();
fs.writeFileSync(patternPath, pattern);
const paramsPath = tmp.tmpNameSync();
fs.writeFileSync(paramsPath, getParams(type));
const keyFile = tmp.tmpNameSync();
const key = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
fs.writeFileSync(keyFile, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') }));
const clientkeyPath = tmp.tmpNameSync();
const clientkeyCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientkey = clientkeyCypher.update(password, 'utf8', 'hex');
clientkey += clientkeyCypher.final('hex');
fs.writeFileSync(clientkeyPath, clientkey);
const clientcertPath = tmp.tmpNameSync();
const clientcertCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientcert = clientcertCypher.update(cert, 'utf8', 'hex');
clientcert += clientcertCypher.final('hex');
fs.writeFileSync(clientcertPath, clientcert);
const args = [
esrpCliPath,
'vsts.sign',
'-a', username,
'-k', clientkeyPath,
'-z', clientcertPath,
'-f', folderPath,
'-p', patternPath,
'-u', 'false',
'-x', 'regularSigning',
'-b', 'input.json',
'-l', 'AzSecPack_PublisherPolicyProd.xml',
'-y', 'inlineSignParams',
'-j', paramsPath,
'-c', '9997',
'-t', '120',
'-g', '10',
'-v', 'Tls12',
'-s', 'https://api.esrp.microsoft.com/api/v1',
'-m', '0',
'-o', 'Microsoft',
'-i', 'https://www.microsoft.com',
'-n', '5',
'-r', 'true',
'-e', keyFile,
];
cp.spawnSync('dotnet', args, { stdio: 'inherit' });
}
exports.main = main;
if (require.main === module) {
main(process.argv.slice(2));
}

View File

@@ -0,0 +1,84 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as cp from 'child_process';
import * as fs from 'fs';
import * as tmp from 'tmp';
import * as crypto from 'crypto';
function getParams(type: string): string {
switch (type) {
case 'windows':
return '[{"keyCode":"CP-230012","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"Append","parameterValue":"/as"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-230012","operationSetCode":"SigntoolVerify","parameters":[{"parameterName":"VerifyAll","parameterValue":"/all"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'rpm':
return '[{ "keyCode": "CP-450779-Pgp", "operationSetCode": "LinuxSign", "parameters": [], "toolName": "sign", "toolVersion": "1.0" }]';
case 'darwin-sign':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppDeveloperSign","parameters":[{"parameterName":"Hardening","parameterValue":"--options=runtime"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'darwin-notarize':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppNotarize","parameters":[{"parameterName":"BundleId","parameterValue":"$(BundleIdentifier)"}],"toolName":"sign","toolVersion":"1.0"}]';
default:
throw new Error(`Sign type ${type} not found`);
}
}
export function main([esrpCliPath, type, cert, username, password, folderPath, pattern]: string[]) {
tmp.setGracefulCleanup();
const patternPath = tmp.tmpNameSync();
fs.writeFileSync(patternPath, pattern);
const paramsPath = tmp.tmpNameSync();
fs.writeFileSync(paramsPath, getParams(type));
const keyFile = tmp.tmpNameSync();
const key = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
fs.writeFileSync(keyFile, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') }));
const clientkeyPath = tmp.tmpNameSync();
const clientkeyCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientkey = clientkeyCypher.update(password, 'utf8', 'hex');
clientkey += clientkeyCypher.final('hex');
fs.writeFileSync(clientkeyPath, clientkey);
const clientcertPath = tmp.tmpNameSync();
const clientcertCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientcert = clientcertCypher.update(cert, 'utf8', 'hex');
clientcert += clientcertCypher.final('hex');
fs.writeFileSync(clientcertPath, clientcert);
const args = [
esrpCliPath,
'vsts.sign',
'-a', username,
'-k', clientkeyPath,
'-z', clientcertPath,
'-f', folderPath,
'-p', patternPath,
'-u', 'false',
'-x', 'regularSigning',
'-b', 'input.json',
'-l', 'AzSecPack_PublisherPolicyProd.xml',
'-y', 'inlineSignParams',
'-j', paramsPath,
'-c', '9997',
'-t', '120',
'-g', '10',
'-v', 'Tls12',
'-s', 'https://api.esrp.microsoft.com/api/v1',
'-m', '0',
'-o', 'Microsoft',
'-i', 'https://www.microsoft.com',
'-n', '5',
'-r', 'true',
'-e', keyFile,
];
cp.spawnSync('dotnet', args, { stdio: 'inherit' });
}
if (require.main === module) {
main(process.argv.slice(2));
}

View File

@@ -1,39 +1,39 @@
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.18.3" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}} displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs: inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock' keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules' targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- script: | - script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}} displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs: inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock' keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules' targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: | - script: |
yarn electron x64 yarn electron x64
displayName: Download Electron displayName: Download Electron
# - script: | {{SQL CARBON EDIT}} remove editor checks # - script: | {{SQL CARBON EDIT}} remove editor checks
# yarn monaco-compile-check # yarn monaco-compile-check
# displayName: Run Monaco Editor Checks # displayName: Run Monaco Editor Checks
- script: | - script: |
yarn valid-layers-check yarn valid-layers-check
@@ -43,21 +43,21 @@ steps:
yarn compile yarn compile
displayName: Compile Sources displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step # - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions # yarn download-builtin-extensions
# displayName: Download Built-in Extensions # displayName: Download Built-in Extensions
- script: | - script: |
./scripts/test.sh --tfs "Unit Tests" ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron) displayName: Run Unit Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable # - script: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests" # yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
# displayName: Run Unit Tests (Browser) # displayName: Run Unit Tests (Browser)
# - script: | {{SQL CARBON EDIT}} disable # - script: | {{SQL CARBON EDIT}} disable
# ./scripts/test-integration.sh --tfs "Integration Tests" # ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron) # displayName: Run Integration Tests (Electron)
- task: PublishPipelineArtifact@0 - task: PublishPipelineArtifact@0
inputs: inputs:

View File

@@ -8,6 +8,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- script: | - script: |
set -e set -e
@@ -27,12 +28,10 @@ steps:
displayName: Merge distro displayName: Merge distro
- script: | - script: |
pushd build \ set -e
&& yarn \ yarn --cwd build
&& npm install -g typescript \ yarn --cwd build compile
&& tsc azure-pipelines/common/createAsset.ts \ displayName: Compile build tools
&& popd
displayName: Restore modules for just build folder and compile it
- download: current - download: current
artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
@@ -44,28 +43,16 @@ steps:
mv $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip mv $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Unzip & move displayName: Unzip & move
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1 - task: UseDotNet@2
inputs: inputs:
ConnectedServiceName: "ESRP CodeSign" version: 2.x
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip" - task: EsrpClientTool@1
signConfigType: inlineSignParams displayName: Download ESRPClient
inlineOperation: |
[ - script: |
{ set -e
"keyCode": "CP-401337-Apple", node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-sign $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(agent.builddirectory) VSCode-darwin-$(VSCODE_ARCH).zip
"operationSetCode": "MacAppDeveloperSign",
"parameters": [
{
"parameterName": "Hardening",
"parameterValue": "--options=runtime"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Codesign displayName: Codesign
- script: | - script: |
@@ -75,29 +62,10 @@ steps:
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER" echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier displayName: Export bundle identifier
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1 - script: |
inputs: set -e
ConnectedServiceName: "ESRP CodeSign" node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-notarize $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(agent.builddirectory) VSCode-darwin-$(VSCODE_ARCH).zip
FolderPath: "$(agent.builddirectory)" displayName: Notarize
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppNotarize",
"parameters": [
{
"parameterName": "BundleId",
"parameterValue": "$(BundleIdentifier)"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Notarization
- script: | - script: |
set -e set -e

View File

@@ -8,6 +8,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password,macos-developer-certificate,macos-developer-certificate-key,ticino-storage-key'
- task: DownloadPipelineArtifact@2 - task: DownloadPipelineArtifact@2
inputs: inputs:
@@ -54,7 +55,7 @@ steps:
- task: Cache@2 - task: Cache@2
inputs: inputs:
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash' key: "nodeModules | $(Agent.OS) | .build/yarnlockhash"
path: .build/node_modules_cache path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache displayName: Restore node_modules cache
@@ -97,6 +98,7 @@ steps:
env: env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1 ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -185,12 +187,6 @@ steps:
timeoutInMinutes: 7 timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
# Figure out the full absolute path of the product we just built # Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests # including the remote server and configure the integration tests
@@ -224,17 +220,11 @@ steps:
timeoutInMinutes: 7 timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/smoke compile
displayName: Compile smoke tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`" APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5 timeoutInMinutes: 5
displayName: Run smoke tests (Electron) displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -244,7 +234,7 @@ steps:
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`" APP_NAME="`ls $APP_ROOT | head -n 1`"
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --remote yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --remote --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5 timeoutInMinutes: 5
displayName: Run smoke tests (Remote) displayName: Run smoke tests (Remote)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -265,6 +255,14 @@ steps:
continueOnError: true continueOnError: true
condition: failed() condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: logs-macos-$(VSCODE_ARCH)-$(System.JobAttempt)
targetPath: .build/logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishTestResults@2 - task: PublishTestResults@2
displayName: Publish Tests Results displayName: Publish Tests Results
inputs: inputs:

View File

@@ -17,7 +17,7 @@ steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.13.0" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs: inputs:
@@ -106,7 +106,7 @@ steps:
- script: | - script: |
set -e set -e
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests" ./scripts/test.sh --build --tfs "Unit Tests" # Disable code coverage since it's currently broken --coverage
displayName: Run unit tests displayName: Run unit tests
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true')) condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))

View File

@@ -18,6 +18,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password'
- script: | - script: |
set -e set -e

View File

@@ -1,7 +1,7 @@
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.13.0" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs: inputs:

View File

@@ -18,6 +18,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password'
- script: | - script: |
set -e set -e

View File

@@ -10,7 +10,7 @@ steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.18.3" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs: inputs:
@@ -22,7 +22,7 @@ steps:
displayName: Prepare yarn cache flags displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}} displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs: inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock" keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules" targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
@@ -34,7 +34,7 @@ steps:
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}} displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs: inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock" keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules" targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
@@ -49,10 +49,6 @@ steps:
yarn gulp hygiene yarn gulp hygiene
displayName: Run Hygiene Checks displayName: Run Hygiene Checks
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-vscode
displayName: Run Strict Null Check
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks # - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check # yarn monaco-compile-check
# displayName: Run Monaco Editor Checks # displayName: Run Monaco Editor Checks

View File

@@ -12,6 +12,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password'
- task: DownloadPipelineArtifact@2 - task: DownloadPipelineArtifact@2
inputs: inputs:
@@ -88,6 +89,7 @@ steps:
env: env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1 ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -12,6 +12,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,builds-docdb-key-readwrite,vscode-storage-key,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- task: DownloadPipelineArtifact@2 - task: DownloadPipelineArtifact@2
inputs: inputs:
@@ -48,7 +49,7 @@ steps:
- task: Cache@2 - task: Cache@2
inputs: inputs:
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash' key: "nodeModules | $(Agent.OS) | .build/yarnlockhash"
path: .build/node_modules_cache path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache displayName: Restore node_modules cache
@@ -66,14 +67,32 @@ steps:
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages displayName: Switch to Terrapin packages
- script: |
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
- script: | - script: |
set -e set -e
export npm_config_arch=$(NPM_ARCH) export npm_config_arch=$(NPM_ARCH)
export npm_config_build_from_source=true export npm_config_build_from_source=true
if [ -z "$CC" ] || [ -z "$CXX" ]; then if [ -z "$CC" ] || [ -z "$CXX" ]; then
export CC=$(which gcc-5) # Download clang based on chromium revision used by vscode
export CXX=$(which g++-5) curl -s https://raw.githubusercontent.com/chromium/chromium/91.0.4472.164/tools/clang/scripts/update.py | python - --output-dir=$PWD/.build/CR_Clang --host-os=linux
# Download libcxx headers and objects from upstream electron releases
DEBUG=libcxx-fetcher \
VSCODE_LIBCXX_OBJECTS_DIR=$PWD/.build/libcxx-objects \
VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \
VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \
VSCODE_ARCH="$(NPM_ARCH)" \
node build/linux/libcxx-fetcher.js
# Set compiler toolchain
export CC=$PWD/.build/CR_Clang/bin/clang
export CXX=$PWD/.build/CR_Clang/bin/clang++
export CXXFLAGS="-nostdinc++ -D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS -isystem$PWD/.build/libcxx_headers/include -isystem$PWD/.build/libcxxabi_headers/include -fPIC -flto=thin -fsplit-lto-unit"
export LDFLAGS="-stdlib=libc++ -fuse-ld=lld -flto=thin -fsplit-lto-unit -L$PWD/.build/libcxx-objects -lc++abi"
fi fi
if [ "$VSCODE_ARCH" == "x64" ]; then if [ "$VSCODE_ARCH" == "x64" ]; then
@@ -92,6 +111,7 @@ steps:
env: env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1 ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -127,28 +147,33 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \ VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install" yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
displayName: Download Electron and Playwright displayName: Download Electron and Playwright
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests" APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
ELECTRON_ROOT=.build/electron
sudo chown root $APP_ROOT/chrome-sandbox
sudo chown root $ELECTRON_ROOT/chrome-sandbox
sudo chmod 4755 $APP_ROOT/chrome-sandbox
sudo chmod 4755 $ELECTRON_ROOT/chrome-sandbox
stat $APP_ROOT/chrome-sandbox
stat $ELECTRON_ROOT/chrome-sandbox
displayName: Change setuid helper binary permission
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron) displayName: Run unit tests (Electron)
timeoutInMinutes: 7 timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests" yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser) displayName: Run unit tests (Browser)
timeoutInMinutes: 7 timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
# Figure out the full absolute path of the product we just built # Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests # including the remote server and configure the integration tests
@@ -159,7 +184,7 @@ steps:
INTEGRATION_TEST_APP_NAME="$APP_NAME" \ INTEGRATION_TEST_APP_NAME="$APP_NAME" \
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \ INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests" ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron) displayName: Run integration tests (Electron)
timeoutInMinutes: 10 timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -167,7 +192,7 @@ steps:
- script: | - script: |
set -e set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium ./resources/server/test/test-web-integration.sh --browser chromium
displayName: Run integration tests (Browser) displayName: Run integration tests (Browser)
timeoutInMinutes: 10 timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -179,19 +204,52 @@ steps:
INTEGRATION_TEST_APP_NAME="$APP_NAME" \ INTEGRATION_TEST_APP_NAME="$APP_NAME" \
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \ INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \ VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh ./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron) displayName: Run remote integration tests (Electron)
timeoutInMinutes: 7 timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_PATH=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
yarn smoketest-no-compile --build "$APP_PATH" --electronArgs="--disable-dev-shm-usage --use-gl=swiftshader" --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_PATH=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
yarn smoketest-no-compile --build "$APP_PATH" --remote --electronArgs="--disable-dev-shm-usage --use-gl=swiftshader" --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5
displayName: Run smoke tests (Remote)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
yarn smoketest-no-compile --web --headless --electronArgs="--disable-dev-shm-usage --use-gl=swiftshader"
timeoutInMinutes: 5
displayName: Run smoke tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0 - task: PublishPipelineArtifact@0
inputs: inputs:
artifactName: "crash-dump-linux-$(VSCODE_ARCH)" artifactName: crash-dump-linux-$(VSCODE_ARCH)
targetPath: .build/crashes targetPath: .build/crashes
displayName: "Publish Crash Reports" displayName: "Publish Crash Reports"
continueOnError: true continueOnError: true
condition: failed() condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: logs-linux-$(VSCODE_ARCH)-$(System.JobAttempt)
targetPath: .build/logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishTestResults@2 - task: PublishTestResults@2
displayName: Publish Tests Results displayName: Publish Tests Results
inputs: inputs:
@@ -212,30 +270,25 @@ steps:
displayName: Prepare snap package displayName: Prepare snap package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false')) condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
# needed for code signing
- task: UseDotNet@2 - task: UseDotNet@2
displayName: "Install .NET Core SDK 2.x"
inputs: inputs:
version: 2.x version: 2.x
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false')) condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1 - task: EsrpClientTool@1
inputs: displayName: Download ESRPClient
ConnectedServiceName: "ESRP CodeSign" condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
FolderPath: ".build/linux/rpm"
Pattern: "*.rpm" - script: |
signConfigType: inlineSignParams set -e
inlineOperation: | yarn --cwd build
[ yarn --cwd build compile
{ displayName: Compile build tools
"keyCode": "CP-450779-Pgp", condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
"operationSetCode": "LinuxSign",
"parameters": [ ], - script: |
"toolName": "sign", set -e
"toolVersion": "1.0" node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" rpm $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) .build/linux/rpm '*.rpm'
}
]
SessionTimeout: 120
displayName: Codesign rpm displayName: Codesign rpm
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false')) condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))

View File

@@ -7,12 +7,6 @@ steps:
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: DownloadPipelineArtifact@0 - task: DownloadPipelineArtifact@0
displayName: "Download Pipeline Artifact" displayName: "Download Pipeline Artifact"
inputs: inputs:

View File

@@ -4,7 +4,7 @@ parameters:
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.13.0" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs: inputs:
@@ -119,7 +119,7 @@ steps:
- script: | - script: |
set -e set -e
DISPLAY=:10 ./scripts/test.sh --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests" DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests" # Disable code coverage since it's currently broken --coverage
displayName: Run unit tests (Electron) displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true')) condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))

View File

@@ -104,19 +104,35 @@ variables:
value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }} value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }}
- name: VSCODE_BUILD_MACOS_UNIVERSAL - name: VSCODE_BUILD_MACOS_UNIVERSAL
value: ${{ and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }} value: ${{ and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }}
- name: AZURE_CDN_URL
value: https://az764295.vo.msecnd.net
- name: AZURE_DOCUMENTDB_ENDPOINT
value: https://vscode.documents.azure.com:443/
- name: AZURE_STORAGE_ACCOUNT
value: ticino
- name: AZURE_STORAGE_ACCOUNT_2
value: vscode
- name: MOONCAKE_CDN_URL
value: https://vscode.cdn.azure.cn
- name: VSCODE_MIXIN_REPO
value: microsoft/vscode-distro
- name: skipComponentGovernanceDetection
value: true
resources: resources:
containers: containers:
- container: vscode-x64 - container: vscode-x64
image: vscodehub.azurecr.io/vscode-linux-build-agent:bionic-x64 image: vscodehub.azurecr.io/vscode-linux-build-agent:bionic-x64
endpoint: VSCodeHub endpoint: VSCodeHub
options: --user 0:0 options: --user 0:0 --cap-add SYS_ADMIN
- container: vscode-arm64 - container: vscode-arm64
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64 image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
endpoint: VSCodeHub endpoint: VSCodeHub
options: --user 0:0 --cap-add SYS_ADMIN
- container: vscode-armhf - container: vscode-armhf
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-armhf image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-armhf
endpoint: VSCodeHub endpoint: VSCodeHub
options: --user 0:0 --cap-add SYS_ADMIN
- container: snapcraft - container: snapcraft
image: snapcore/snapcraft:stable image: snapcore/snapcraft:stable
@@ -124,7 +140,7 @@ stages:
- stage: Compile - stage: Compile
jobs: jobs:
- job: Compile - job: Compile
pool: compile pool: vscode-1es
variables: variables:
VSCODE_ARCH: x64 VSCODE_ARCH: x64
steps: steps:
@@ -176,10 +192,11 @@ stages:
variables: variables:
VSCODE_ARCH: x64 VSCODE_ARCH: x64
NPM_ARCH: x64 NPM_ARCH: x64
DISPLAY: ":10"
steps: steps:
- template: linux/product-build-linux.yml - template: linux/product-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true)) }}: - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true), ne(variables['VSCODE_PUBLISH'], 'false')) }}:
- job: LinuxSnap - job: LinuxSnap
dependsOn: dependsOn:
- Linux - Linux

View File

@@ -12,6 +12,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password,ticino-storage-key'
- script: | - script: |
set -e set -e
@@ -38,7 +39,7 @@ steps:
# using `genericNodeModules` instead of `nodeModules` here to avoid sharing the cache with builds running inside containers # using `genericNodeModules` instead of `nodeModules` here to avoid sharing the cache with builds running inside containers
- task: Cache@2 - task: Cache@2
inputs: inputs:
key: 'genericNodeModules | $(Agent.OS) | .build/yarnlockhash' key: "genericNodeModules | $(Agent.OS) | .build/yarnlockhash"
path: .build/node_modules_cache path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache displayName: Restore node_modules cache
@@ -76,6 +77,7 @@ steps:
env: env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1 ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -98,6 +100,13 @@ steps:
yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check
displayName: Compile & Hygiene displayName: Compile & Hygiene
- script: |
set -e
yarn --cwd test/smoke compile
yarn --cwd test/integration/browser compile
displayName: Compile test suites
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \ AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
@@ -113,15 +122,7 @@ steps:
- script: | - script: |
set -e set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \ tar -cz --ignore-failed-read -f $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-* test/integration/browser/out test/smoke/out test/automation/out
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
# we gotta tarball everything in order to preserve file permissions
- script: |
set -e
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
displayName: Compress compilation artifact displayName: Compress compilation artifact
- task: PublishPipelineArtifact@1 - task: PublishPipelineArtifact@1

View File

@@ -12,6 +12,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: 'builds-docdb-key-readwrite,github-distro-mixin-password,ticino-storage-key,vscode-storage-key,vscode-mooncake-storage-key'
- pwsh: | - pwsh: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
@@ -51,6 +52,7 @@ steps:
- publish: $(Pipeline.Workspace)/artifacts_processed_$(System.StageAttempt)/artifacts_processed_$(System.StageAttempt).txt - publish: $(Pipeline.Workspace)/artifacts_processed_$(System.StageAttempt)/artifacts_processed_$(System.StageAttempt).txt
artifact: artifacts_processed_$(System.StageAttempt) artifact: artifacts_processed_$(System.StageAttempt)
displayName: Publish what artifacts were published for this stage attempt displayName: Publish what artifacts were published for this stage attempt
condition: always()
- pwsh: | - pwsh: |
$ErrorActionPreference = 'Stop' $ErrorActionPreference = 'Stop'

View File

@@ -12,6 +12,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: 'builds-docdb-key-readwrite'
- script: | - script: |
set -e set -e

View File

@@ -0,0 +1,243 @@
trigger: none
pr: none
parameters:
- name: ENABLE_TERRAPIN
displayName: "Enable Terrapin"
type: boolean
default: true
- name: SCAN_WINDOWS
displayName: "Scan Windows"
type: boolean
default: true
- name: SCAN_LINUX
displayName: "Scan Linux"
type: boolean
default: false
variables:
- name: ENABLE_TERRAPIN
value: ${{ eq(parameters.ENABLE_TERRAPIN, true) }}
- name: SCAN_WINDOWS
value: ${{ eq(parameters.SCAN_WINDOWS, true) }}
- name: SCAN_LINUX
value: ${{ eq(parameters.SCAN_LINUX, true) }}
- name: VSCODE_MIXIN_REPO
value: microsoft/vscode-distro
- name: skipComponentGovernanceDetection
value: true
- name: NPM_ARCH
value: x64
- name: VSCODE_ARCH
value: x64
stages:
- stage: Windows
condition: eq(variables.SCAN_WINDOWS, 'true')
pool:
vmImage: VS2017-Win2016
jobs:
- job: WindowsJob
timeoutInMinutes: 0
steps:
- task: CredScan@3
continueOnError: true
inputs:
scanFolder: '$(Build.SourcesDirectory)'
outputFormat: 'pre'
- task: NodeTool@0
inputs:
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-SSL-AADAuth,vscode-storage-key,builds-docdb-key-readwrite"
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
displayName: Prepare tooling
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
displayName: Merge distro
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npx https://aka.ms/enablesecurefeed standAlone }
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- task: Semmle@1
inputs:
sourceCodeDirectory: '$(Build.SourcesDirectory)'
language: 'cpp'
buildCommandsString: 'yarn --frozen-lockfile'
querySuite: 'Required'
timeout: '1800'
ram: '16384'
addProjectDirToScanningExclusionList: true
env:
npm_config_arch: "$(NPM_ARCH)"
npm_config_build_from_source: true
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: CodeQL
- powershell: |
. build/azure-pipelines/win32/exec.ps1
. build/azure-pipelines/win32/retry.ps1
$ErrorActionPreference = "Stop"
retry { exec { yarn --frozen-lockfile } }
env:
npm_config_arch: "$(NPM_ARCH)"
npm_config_build_from_source: true
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
CHILD_CONCURRENCY: 1
displayName: Install dependencies
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "vscode-symbols-win32-$(VSCODE_ARCH)" }
displayName: Download Symbols
- task: BinSkim@4
inputs:
InputType: 'Basic'
Function: 'analyze'
TargetPattern: 'guardianGlob'
AnalyzeTargetGlob: '$(agent.builddirectory)\scanbin\**.dll;$(agent.builddirectory)\scanbin\**.exe;$(agent.builddirectory)\scanbin\**.node'
AnalyzeLocalSymbolDirectories: '$(agent.builddirectory)\scanbin\VSCode-win32-$(VSCODE_ARCH)\pdb'
- task: TSAUpload@2
inputs:
GdnPublishTsaOnboard: true
GdnPublishTsaConfigFile: '$(Build.SourcesDirectory)\build\azure-pipelines\.gdntsa'
- stage: Linux
dependsOn: []
condition: eq(variables.SCAN_LINUX, 'true')
pool:
vmImage: "Ubuntu-18.04"
jobs:
- job: LinuxJob
steps:
- task: CredScan@2
inputs:
toolMajorVersion: 'V2'
- task: NodeTool@0
inputs:
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-SSL-AADAuth,vscode-storage-key,builds-docdb-key-readwrite"
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
set -e
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
- script: |
set -e
export npm_config_arch=$(NPM_ARCH)
export npm_config_build_from_source=true
if [ -z "$CC" ] || [ -z "$CXX" ]; then
# Download clang based on chromium revision used by vscode
curl -s https://raw.githubusercontent.com/chromium/chromium/91.0.4472.164/tools/clang/scripts/update.py | python - --output-dir=$PWD/.build/CR_Clang --host-os=linux
# Download libcxx headers and objects from upstream electron releases
DEBUG=libcxx-fetcher \
VSCODE_LIBCXX_OBJECTS_DIR=$PWD/.build/libcxx-objects \
VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \
VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \
VSCODE_ARCH="$(NPM_ARCH)" \
node build/linux/libcxx-fetcher.js
# Set compiler toolchain
export CC=$PWD/.build/CR_Clang/bin/clang
export CXX=$PWD/.build/CR_Clang/bin/clang++
export CXXFLAGS="-nostdinc++ -D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS -isystem$PWD/.build/libcxx_headers/include -isystem$PWD/.build/libcxxabi_headers/include -fPIC -flto=thin -fsplit-lto-unit"
export LDFLAGS="-stdlib=libc++ -fuse-ld=lld -flto=thin -fsplit-lto-unit -L$PWD/.build/libcxx-objects -lc++abi"
fi
if [ "$VSCODE_ARCH" == "x64" ]; then
export VSCODE_REMOTE_CC=$(which gcc-4.8)
export VSCODE_REMOTE_CXX=$(which g++-4.8)
fi
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
echo "Yarn failed too many times" >&2
exit 1
fi
echo "Yarn failed $i, trying again..."
done
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
- script: |
set -e
yarn gulp vscode-symbols-linux-$(VSCODE_ARCH)
displayName: Build
- task: BinSkim@3
inputs:
toolVersion: Latest
InputType: CommandLine
arguments: analyze $(agent.builddirectory)\scanbin\exe\*.* --recurse --local-symbol-directories $(agent.builddirectory)\scanbin\VSCode-linux-$(VSCODE_ARCH)\pdb
- task: TSAUpload@2
inputs:
GdnPublishTsaConfigFile: '$(Build.SourceDirectory)\build\azure-pipelines\.gdntsa'

View File

@@ -1,7 +1,7 @@
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.13.0" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs: inputs:
@@ -84,7 +84,7 @@ steps:
- script: | - script: |
set -e set -e
yarn npm-run-all -lp sqllint extensions-lint strict-vscode yarn npm-run-all -lp sqllint extensions-lint
displayName: SQL Hygiene displayName: SQL Hygiene
- script: | - script: |

View File

@@ -12,6 +12,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password,web-storage-account,web-storage-key,ticino-storage-key'
- task: DownloadPipelineArtifact@2 - task: DownloadPipelineArtifact@2
inputs: inputs:
@@ -79,6 +80,7 @@ steps:
env: env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1 ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -1,7 +1,7 @@
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.13.0" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs: inputs:
@@ -83,7 +83,6 @@ steps:
yarn sqllint yarn sqllint
yarn extensions-lint yarn extensions-lint
yarn gulp hygiene yarn gulp hygiene
yarn strict-vscode
yarn valid-layers-check yarn valid-layers-check
displayName: Run hygiene, eslint displayName: Run hygiene, eslint
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))

View File

@@ -1,10 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<clear />
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
</packageSources>
<disabledPackageSources>
<clear />
</disabledPackageSources>
</configuration>

View File

@@ -1,4 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Microsoft.ESRPClient" version="1.2.47" />
</packages>

View File

@@ -1,7 +1,7 @@
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.18.3" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs: inputs:

View File

@@ -2,9 +2,6 @@
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
$Arch = "$env:VSCODE_ARCH" $Arch = "$env:VSCODE_ARCH"
exec { yarn gulp "vscode-win32-$Arch-archive" "vscode-win32-$Arch-system-setup" "vscode-win32-$Arch-user-setup" --sign }
$Repo = "$(pwd)" $Repo = "$(pwd)"
$Root = "$Repo\.." $Root = "$Repo\.."
$SystemExe = "$Repo\.build\win32-$Arch\system-setup\VSCodeSetup.exe" $SystemExe = "$Repo\.build\win32-$Arch\system-setup\VSCodeSetup.exe"

View File

@@ -17,6 +17,7 @@ steps:
inputs: inputs:
azureSubscription: "vscode-builds-subscription" azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,vscode-storage-key,builds-docdb-key-readwrite,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- task: DownloadPipelineArtifact@2 - task: DownloadPipelineArtifact@2
inputs: inputs:
@@ -53,7 +54,7 @@ steps:
- task: Cache@2 - task: Cache@2
inputs: inputs:
key: 'nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash' key: "nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash"
path: .build/node_modules_cache path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache displayName: Restore node_modules cache
@@ -84,6 +85,7 @@ steps:
env: env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1 ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -154,13 +156,6 @@ steps:
timeoutInMinutes: 7 timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd test/integration/browser compile }
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: | - powershell: |
# Figure out the full absolute path of the product we just built # Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests # including the remote server and configure the integration tests
@@ -194,6 +189,41 @@ steps:
timeoutInMinutes: 7 timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd test/smoke compile }
displayName: Compile smoke tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
exec { yarn smoketest-no-compile --build "$AppRoot" --screenshots .build\logs\smoke-tests }
displayName: Run smoke tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
# - powershell: |
# . build/azure-pipelines/win32/exec.ps1
# $ErrorActionPreference = "Stop"
# $AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
# $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"
# exec { yarn smoketest-no-compile --build "$AppRoot" --remote }
# displayName: Run smoke tests (Remote)
# timeoutInMinutes: 5
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"
exec { yarn smoketest-no-compile --web --browser firefox --headless }
displayName: Run smoke tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishPipelineArtifact@0 - task: PublishPipelineArtifact@0
inputs: inputs:
artifactName: crash-dump-windows-$(VSCODE_ARCH) artifactName: crash-dump-windows-$(VSCODE_ARCH)
@@ -202,6 +232,14 @@ steps:
continueOnError: true continueOnError: true
condition: failed() condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: logs-windows-$(VSCODE_ARCH)-$(System.JobAttempt)
targetPath: .build\logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishTestResults@2 - task: PublishTestResults@2
displayName: Publish Tests Results displayName: Publish Tests Results
inputs: inputs:
@@ -209,84 +247,58 @@ steps:
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results" searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64')) condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1 - task: UseDotNet@2
inputs: inputs:
ConnectedServiceName: "ESRP CodeSign" version: 2.x
FolderPath: "$(CodeSigningFolderPath)"
Pattern: "*.dll,*.exe,*.node"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false')) condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: NuGetCommand@2 - task: EsrpClientTool@1
displayName: Install ESRPClient.exe displayName: Download ESRPClient
inputs:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: "ESRP Nuget"
restoreDirectory: packages
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false')) condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ESRPImportCertTask@1 - powershell: |
displayName: Import ESRP Request Signing Certificate . build/azure-pipelines/win32/exec.ps1
inputs: $ErrorActionPreference = "Stop"
ESRP: "ESRP CodeSign" exec { yarn --cwd build }
exec { yarn --cwd build compile }
displayName: Compile build tools
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false')) condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: PowerShell@2 - powershell: |
inputs: . build/azure-pipelines/win32/exec.ps1
targetType: filePath $ErrorActionPreference = "Stop"
filePath: .\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 $EsrpClientTool = (gci -directory -filter EsrpClientTool_* $(Agent.RootDirectory)\_tasks | Select-Object -last 1).FullName
arguments: "$(ESRP-SSL-AADAuth)" $EsrpCliZip = (gci -recurse -filter esrpcli.*.zip $EsrpClientTool | Select-Object -last 1).FullName
displayName: Import ESRP Auth Certificate mkdir -p $(Agent.TempDirectory)\esrpcli
Expand-Archive -Path $EsrpCliZip -DestinationPath $(Agent.TempDirectory)\esrpcli
$EsrpCliDllPath = (gci -recurse -filter esrpcli.dll $(Agent.TempDirectory)\esrpcli | Select-Object -last 1).FullName
echo "##vso[task.setvariable variable=EsrpCliDllPath]$EsrpCliDllPath"
displayName: Find ESRP CLI
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build\azure-pipelines\common\sign $env:EsrpCliDllPath windows $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(CodeSigningFolderPath) '*.dll,*.exe,*.node' }
displayName: Codesign
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-archive" }
displayName: Package archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:ESRPPKI = "$(ESRP-PKI)"
$env:ESRPAADUsername = "$(esrp-aad-username)"
$env:ESRPAADPassword = "$(esrp-aad-password)"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-system-setup" --sign }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-user-setup" --sign }
displayName: Package setups
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false')) condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: | - powershell: |

View File

@@ -1,71 +0,0 @@
function Create-TmpJson($Obj) {
$FileName = [System.IO.Path]::GetTempFileName()
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
return $FileName
}
$Auth = Create-TmpJson @{
Version = "1.0.0"
AuthenticationType = "AAD_CERT"
ClientId = $env:ESRPClientId
AuthCert = @{
SubjectName = $env:ESRPAuthCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
SendX5c = "true"
}
RequestSigningCert = @{
SubjectName = $env:ESRPCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
}
}
$Policy = Create-TmpJson @{
Version = "1.0.0"
}
$Input = Create-TmpJson @{
Version = "1.0.0"
SignBatches = @(
@{
SourceLocationType = "UNC"
SignRequestFiles = @(
@{
SourceLocation = $args[0]
}
)
SigningInfo = @{
Operations = @(
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolSign"
Parameters = @{
OpusName = "VS Code"
OpusInfo = "https://code.visualstudio.com/"
Append = "/as"
FileDigest = "/fd `"SHA256`""
PageHash = "/NPH"
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
}
ToolName = "sign"
ToolVersion = "1.0"
},
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolVerify"
Parameters = @{
VerifyAll = "/all"
}
ToolName = "sign"
ToolVersion = "1.0"
}
)
}
}
)
}
$Output = [System.IO.Path]::GetTempFileName()
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
& "$ScriptPath\ESRPClient\packages\Microsoft.ESRPClient.*\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output

View File

@@ -1,7 +1,7 @@
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.13.0" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs: inputs:

View File

@@ -1,7 +1,7 @@
steps: steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "12.13.0" versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs: inputs:

View File

@@ -29,7 +29,6 @@ app.once('ready', () => {
webPreferences: { webPreferences: {
nodeIntegration: true, nodeIntegration: true,
contextIsolation: false, contextIsolation: false,
webviewTag: true,
enableWebSQL: false, enableWebSQL: false,
nativeWindowOpen: true nativeWindowOpen: true
} }

View File

@@ -4,7 +4,8 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const vscode_universal_1 = require("vscode-universal"); const vscode_universal_bundler_1 = require("vscode-universal-bundler");
const cross_spawn_promise_1 = require("@malept/cross-spawn-promise");
const fs = require("fs-extra"); const fs = require("fs-extra");
const path = require("path"); const path = require("path");
const plist = require("plist"); const plist = require("plist");
@@ -23,7 +24,7 @@ async function main() {
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName); const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json'); const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist'); const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
await vscode_universal_1.makeUniversalApp({ await (0, vscode_universal_bundler_1.makeUniversalApp)({
x64AppPath, x64AppPath,
arm64AppPath, arm64AppPath,
x64AsarPath, x64AsarPath,
@@ -50,6 +51,12 @@ async function main() {
LSRequiresNativeExecution: true LSRequiresNativeExecution: true
}); });
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8'); await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
// Verify if native module architecture is correct
const findOutput = await (0, cross_spawn_promise_1.spawn)('find', [outAppPath, '-name', 'keytar.node']);
const lipoOutput = await (0, cross_spawn_promise_1.spawn)('lipo', ['-archs', findOutput.replace(/\n$/, "")]);
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
throw new Error(`Invalid arch, got : ${lipoOutput}`);
}
} }
if (require.main === module) { if (require.main === module) {
main().catch(err => { main().catch(err => {

View File

@@ -5,7 +5,8 @@
'use strict'; 'use strict';
import { makeUniversalApp } from 'vscode-universal'; import { makeUniversalApp } from 'vscode-universal-bundler';
import { spawn } from '@malept/cross-spawn-promise';
import * as fs from 'fs-extra'; import * as fs from 'fs-extra';
import * as path from 'path'; import * as path from 'path';
import * as plist from 'plist'; import * as plist from 'plist';
@@ -57,6 +58,13 @@ async function main() {
LSRequiresNativeExecution: true LSRequiresNativeExecution: true
}); });
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8'); await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
// Verify if native module architecture is correct
const findOutput = await spawn('find', [outAppPath, '-name', 'keytar.node'])
const lipoOutput = await spawn('lipo', ['-archs', findOutput.replace(/\n$/, "")]);
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
throw new Error(`Invalid arch, got : ${lipoOutput}`)
}
} }
if (require.main === module) { if (require.main === module) {

View File

@@ -22,6 +22,10 @@ module.exports.all = [
'!out*/**', '!out*/**',
'!test/**/out/**', '!test/**/out/**',
'!**/node_modules/**', '!**/node_modules/**',
// {{SQL CARBON EDIT}}
'!build/actions/**/*.js',
'!build/**/*'
]; ];
module.exports.indentationFilter = [ module.exports.indentationFilter = [
@@ -79,7 +83,7 @@ module.exports.indentationFilter = [
'!src/typings/**/*.d.ts', '!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts', '!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}', '!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,darwin}/**/*.js', '!build/{lib,download,linux,darwin}/**/*.js',
'!build/**/*.sh', '!build/**/*.sh',
'!build/azure-pipelines/**/*.js', '!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config', '!build/azure-pipelines/**/*.config',
@@ -91,6 +95,31 @@ module.exports.indentationFilter = [
'!extensions/markdown-language-features/notebook-out/*.js', '!extensions/markdown-language-features/notebook-out/*.js',
'!extensions/markdown-math/notebook-out/*.js', '!extensions/markdown-math/notebook-out/*.js',
'!extensions/simple-browser/media/*.js', '!extensions/simple-browser/media/*.js',
// {{SQL CARBON EDIT}} Except for our stuff
'!**/*.gif',
'!build/actions/**/*.js',
'!**/*.{xlf,lcl,docx,sql,vsix,bacpac,ipynb,jpg}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/integration-tests/testData/**',
'!extensions/arc/src/controller/generated/**',
'!extensions/sql-database-projects/resources/templates/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.json',
'!extensions/sql-database-projects/src/test/baselines/*.sqlproj',
'!extensions/sql-database-projects/BuildDirectory/SystemDacpacs/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts',
'!resources/linux/snap/electron-launch',
'!extensions/markdown-language-features/media/*.js',
'!extensions/simple-browser/media/*.js',
'!resources/xlf/LocProject.json',
'!build/**/*'
]; ];
module.exports.copyrightFilter = [ module.exports.copyrightFilter = [
@@ -113,6 +142,7 @@ module.exports.copyrightFilter = [
'!**/*.code-workspace', '!**/*.code-workspace',
'!**/*.js.map', '!**/*.js.map',
'!build/**/*.init', '!build/**/*.init',
'!build/linux/libcxx-fetcher.*',
'!resources/linux/snap/snapcraft.yaml', '!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js', '!resources/win32/bin/code.js',
'!resources/web/code-web.js', '!resources/web/code-web.js',
@@ -123,6 +153,47 @@ module.exports.copyrightFilter = [
'!extensions/html-language-features/server/src/modes/typescript/*', '!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*', '!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts', '!src/vs/editor/test/node/classification/typescript-test.ts',
// {{SQL CARBON EDIT}} Except for stuff in our code that doesn't use our copyright
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/azuremonitor/src/prompts/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/kusto/src/prompts/**',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!extensions/mssql/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/notebook/src/prompts/**',
'!extensions/query-history/images/**',
'!extensions/sql/build/update-grammar.js',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/services/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/services/notebook/common/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac',
'!**/*.bacpac',
'!**/*.py'
]; ];
module.exports.jsHygieneFilter = [ module.exports.jsHygieneFilter = [
@@ -137,6 +208,7 @@ module.exports.jsHygieneFilter = [
'!src/**/marked.js', '!src/**/marked.js',
'!src/**/semver.js', '!src/**/semver.js',
'!**/test/**', '!**/test/**',
'!build/**/*' // {{SQL CARBON EDIT}}
]; ];
module.exports.tsHygieneFilter = [ module.exports.tsHygieneFilter = [
@@ -154,4 +226,11 @@ module.exports.tsHygieneFilter = [
'!extensions/vscode-api-tests/testWorkspace2/**', '!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts', '!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts', '!extensions/html-language-features/server/lib/jquery.d.ts',
// {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts',
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // skip this because we have no plans on touching this and its not ours
'!src/vs/workbench/contrib/extensions/browser/extensionRecommendationsService.ts', // skip this because known issue
'!build/**/*'
]; ];

View File

@@ -14,6 +14,7 @@ const compilation = require('./lib/compilation');
const compileBuildTask = task.define('compile-build', const compileBuildTask = task.define('compile-build',
task.series( task.series(
util.rimraf('out-build'), util.rimraf('out-build'),
util.buildWebNodePaths('out-build'),
compilation.compileTask('src', 'out-build', true) compilation.compileTask('src', 'out-build', true)
) )
); );

View File

@@ -35,42 +35,42 @@ const compilations = glob.sync('**/tsconfig.json', {
ignore: ['**/out/**', '**/node_modules/**'] ignore: ['**/out/**', '**/node_modules/**']
}); });
// const compilations = [ // const compilations = [
// 'configuration-editing/build/tsconfig.json', // 'configuration-editing/build/tsconfig.json',
// 'configuration-editing/tsconfig.json', // 'configuration-editing/tsconfig.json',
// 'css-language-features/client/tsconfig.json', // 'css-language-features/client/tsconfig.json',
// 'css-language-features/server/tsconfig.json', // 'css-language-features/server/tsconfig.json',
// 'debug-auto-launch/tsconfig.json', // 'debug-auto-launch/tsconfig.json',
// 'debug-server-ready/tsconfig.json', // 'debug-server-ready/tsconfig.json',
// 'emmet/tsconfig.json', // 'emmet/tsconfig.json',
// 'extension-editing/tsconfig.json', // 'extension-editing/tsconfig.json',
// 'git/tsconfig.json', // 'git/tsconfig.json',
// 'github-authentication/tsconfig.json', // 'github-authentication/tsconfig.json',
// 'github/tsconfig.json', // 'github/tsconfig.json',
// 'grunt/tsconfig.json', // 'grunt/tsconfig.json',
// 'gulp/tsconfig.json', // 'gulp/tsconfig.json',
// 'html-language-features/client/tsconfig.json', // 'html-language-features/client/tsconfig.json',
// 'html-language-features/server/tsconfig.json', // 'html-language-features/server/tsconfig.json',
// 'image-preview/tsconfig.json', // 'image-preview/tsconfig.json',
// 'jake/tsconfig.json', // 'ipynb/tsconfig.json',
// 'json-language-features/client/tsconfig.json', // 'jake/tsconfig.json',
// 'json-language-features/server/tsconfig.json', // 'json-language-features/client/tsconfig.json',
// 'markdown-language-features/preview-src/tsconfig.json', // 'json-language-features/server/tsconfig.json',
// 'markdown-language-features/tsconfig.json', // 'markdown-language-features/preview-src/tsconfig.json',
// 'markdown-math/tsconfig.json', // 'markdown-language-features/tsconfig.json',
// 'merge-conflict/tsconfig.json', // 'markdown-math/tsconfig.json',
// 'microsoft-authentication/tsconfig.json', // 'merge-conflict/tsconfig.json',
// 'npm/tsconfig.json', // 'microsoft-authentication/tsconfig.json',
// 'php-language-features/tsconfig.json', // 'npm/tsconfig.json',
// 'search-result/tsconfig.json', // 'php-language-features/tsconfig.json',
// 'simple-browser/tsconfig.json', // 'search-result/tsconfig.json',
// 'testing-editor-contributions/tsconfig.json', // 'simple-browser/tsconfig.json',
// 'typescript-language-features/test-workspace/tsconfig.json', // 'typescript-language-features/test-workspace/tsconfig.json',
// 'typescript-language-features/tsconfig.json', // 'typescript-language-features/tsconfig.json',
// 'vscode-api-tests/tsconfig.json', // 'vscode-api-tests/tsconfig.json',
// 'vscode-colorize-tests/tsconfig.json', // 'vscode-colorize-tests/tsconfig.json',
// 'vscode-custom-editor-tests/tsconfig.json', // 'vscode-custom-editor-tests/tsconfig.json',
// 'vscode-notebook-tests/tsconfig.json', // 'vscode-notebook-tests/tsconfig.json',
// 'vscode-test-resolver/tsconfig.json' // 'vscode-test-resolver/tsconfig.json'
// ]; // ];
const getBaseUrl = out => `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}/${out}`; const getBaseUrl = out => `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}/${out}`;

View File

@@ -16,10 +16,10 @@ const { monacoTypecheckTask/* , monacoTypecheckWatchTask */ } = require('./gulpf
const { compileExtensionsTask, watchExtensionsTask, compileExtensionMediaTask } = require('./gulpfile.extensions'); const { compileExtensionsTask, watchExtensionsTask, compileExtensionMediaTask } = require('./gulpfile.extensions');
// Fast compile for development time // Fast compile for development time
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), compilation.compileTask('src', 'out', false))); const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), util.buildWebNodePaths('out'), compilation.compileTask('src', 'out', false)));
gulp.task(compileClientTask); gulp.task(compileClientTask);
const watchClientTask = task.define('watch-client', task.series(util.rimraf('out'), compilation.watchTask('out', false))); const watchClientTask = task.define('watch-client', task.series(util.rimraf('out'), util.buildWebNodePaths('out'), compilation.watchTask('out', false)));
gulp.task(watchClientTask); gulp.task(watchClientTask);
// All // All

104
build/gulpfile.scan.js Normal file
View File

@@ -0,0 +1,104 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const path = require('path');
const task = require('./lib/task');
const util = require('./lib/util');
const _ = require('underscore');
const electron = require('gulp-atom-electron');
const { config } = require('./lib/electron');
const filter = require('gulp-filter');
const deps = require('./lib/dependencies');
const root = path.dirname(__dirname);
const BUILD_TARGETS = [
{ platform: 'win32', arch: 'ia32' },
{ platform: 'win32', arch: 'x64' },
{ platform: 'win32', arch: 'arm64' },
{ platform: 'darwin', arch: null, opts: { stats: true } },
{ platform: 'linux', arch: 'ia32' },
{ platform: 'linux', arch: 'x64' },
{ platform: 'linux', arch: 'armhf' },
{ platform: 'linux', arch: 'arm64' },
];
BUILD_TARGETS.forEach(buildTarget => {
const dashed = (str) => (str ? `-${str}` : ``);
const platform = buildTarget.platform;
const arch = buildTarget.arch;
const destinationExe = path.join(path.dirname(root), 'scanbin', `VSCode${dashed(platform)}${dashed(arch)}`, 'bin');
const destinationPdb = path.join(path.dirname(root), 'scanbin', `VSCode${dashed(platform)}${dashed(arch)}`, 'pdb');
const tasks = [];
// removal tasks
tasks.push(util.rimraf(destinationExe), util.rimraf(destinationPdb));
// electron
tasks.push(() => electron.dest(destinationExe, _.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch })));
// pdbs for windows
if (platform === 'win32') {
tasks.push(
() => electron.dest(destinationPdb, _.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, pdbs: true })),
util.rimraf(path.join(destinationExe, 'swiftshader')),
util.rimraf(path.join(destinationExe, 'd3dcompiler_47.dll')));
}
if (platform === 'linux') {
tasks.push(
() => electron.dest(destinationPdb, _.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, symbols: true }))
);
}
// node modules
tasks.push(
nodeModules(destinationExe, destinationPdb, platform)
);
const setupSymbolsTask = task.define(`vscode-symbols${dashed(platform)}${dashed(arch)}`,
task.series(...tasks)
);
gulp.task(setupSymbolsTask);
});
function nodeModules(destinationExe, destinationPdb, platform) {
const productionDependencies = deps.getProductionDependencies(root);
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
const exe = () => {
return gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**/*.node']))
.pipe(gulp.dest(destinationExe));
};
if (platform === 'win32') {
const pdb = () => {
return gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**/*.pdb']))
.pipe(gulp.dest(destinationPdb));
};
return gulp.parallel(exe, pdb);
}
if (platform === 'linux') {
const pdb = () => {
return gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**/*.sym']))
.pipe(gulp.dest(destinationPdb));
};
return gulp.parallel(exe, pdb);
}
return exe;
}

View File

@@ -38,6 +38,7 @@ const vscodeEntryPoints = _.flatten([
buildfile.base, buildfile.base,
buildfile.workerExtensionHost, buildfile.workerExtensionHost,
buildfile.workerNotebook, buildfile.workerNotebook,
buildfile.workerLanguageDetection,
buildfile.workbenchDesktop, buildfile.workbenchDesktop,
buildfile.code buildfile.code
]); ]);
@@ -64,8 +65,6 @@ const vscodeResources = [
'out-build/vs/workbench/contrib/debug/**/*.json', 'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt', 'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
'out-build/vs/workbench/contrib/webview/browser/pre/*.js', 'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js',
'out-build/vs/**/markdown.css', 'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json', 'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/platform/files/**/*.exe', 'out-build/vs/platform/files/**/*.exe',
@@ -288,7 +287,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
'**/node-pty/build/Release/*', '**/node-pty/build/Release/*',
'**/node-pty/lib/worker/conoutSocketWorker.js', '**/node-pty/lib/worker/conoutSocketWorker.js',
'**/node-pty/lib/shared/conout.js', '**/node-pty/lib/shared/conout.js',
'**/*.wasm' '**/*.wasm',
], 'node_modules.asar')); ], 'node_modules.asar'));
let all = es.merge( let all = es.merge(

View File

@@ -27,7 +27,7 @@ const zipPath = arch => path.join(zipDir(arch), `azuredatastudio-win32-${arch}.z
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`); const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const issPath = path.join(__dirname, 'win32', 'code.iss'); const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe'); const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1'); const signWin32Path = path.join(repoPath, 'build', 'azure-pipelines', 'common', 'sign-win32');
function packageInnoSetup(iss, options, cb) { function packageInnoSetup(iss, options, cb) {
options = options || {}; options = options || {};
@@ -50,7 +50,7 @@ function packageInnoSetup(iss, options, cb) {
const args = [ const args = [
iss, iss,
...defs, ...defs,
`/sesrp=powershell.exe -ExecutionPolicy bypass ${signPS1} $f` `/sesrp=node ${signWin32Path} $f`
]; ];
cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] }) cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] })

View File

@@ -12,221 +12,7 @@ const vfs = require('vinyl-fs');
const path = require('path'); const path = require('path');
const fs = require('fs'); const fs = require('fs');
const pall = require('p-all'); const pall = require('p-all');
const { all, copyrightFilter, indentationFilter, jsHygieneFilter, tsHygieneFilter } = require('./filters');
/**
* Hygiene works by creating cascading subsets of all our files and
* passing them through a sequence of checks. Here are the current subsets,
* named according to the checks performed on them. Each subset contains
* the following one, as described in mathematical notation:
*
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
*/
const all = [
'*',
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*',
'!test/**/out/**',
'!**/node_modules/**',
'!build/actions/**/*.js', // {{SQL CARBON EDIT}}
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.all = all;
const indentationFilter = [
'**',
// except specific files
'!**/ThirdPartyNotices.txt',
'!**/LICENSE.{txt,rtf}',
'!LICENSES.chromium.html',
'!**/LICENSE',
'!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/semver/semver.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/unit/assert.js',
'!resources/linux/snap/electron-launch',
// except specific folders
'!test/automation/out/**',
'!test/smoke/out/**',
'!extensions/typescript-language-features/test-workspace/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/codicon/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,darwin}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
'!**/Dockerfile',
'!**/Dockerfile.*',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
// {{SQL CARBON EDIT}}
'!**/*.gif',
'!build/actions/**/*.js',
'!**/*.{xlf,lcl,docx,sql,vsix,bacpac,ipynb,jpg}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/integration-tests/testData/**',
'!extensions/arc/src/controller/generated/**',
'!extensions/sql-database-projects/resources/templates/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.json',
'!extensions/sql-database-projects/src/test/baselines/*.sqlproj',
'!extensions/sql-database-projects/BuildDirectory/SystemDacpacs/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts',
'!resources/linux/snap/electron-launch',
'!extensions/markdown-language-features/media/*.js',
'!extensions/simple-browser/media/*.js',
'!resources/xlf/LocProject.json', // {{SQL CARBON EDIT}}
'!build/**/*' // {{SQL CARBON EDIT}}
];
const copyrightFilter = [
'**',
'!**/*.desktop',
'!**/*.json',
'!**/*.html',
'!**/*.template',
'!**/*.md',
'!**/*.bat',
'!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml',
'!**/*.sh',
'!**/*.txt',
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!**/*.js.map',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js',
'!resources/web/code-web.js',
'!resources/completions/**',
'!extensions/configuration-editing/build/inline-allOf.ts',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js',
'!resources/serverless/code-web.js',
'!src/vs/editor/test/node/classification/typescript-test.ts',
// {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/services/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/services/notebook/common/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/notebook/src/prompts/**',
'!extensions/mssql/src/prompts/**',
'!extensions/kusto/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/query-history/images/**',
'!extensions/sql/build/update-grammar.js',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac',
'!**/*.bacpac',
'!**/*.py'
];
const jsHygieneFilter = [
'src/**/*.js',
'build/gulpfile.*.js',
'!src/vs/loader.js',
'!src/vs/css.js',
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js',
'!src/**/semver.js',
'!**/test/**',
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.jsHygieneFilter = jsHygieneFilter;
const tsHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/typescript-basics/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}}
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
'!src/vs/workbench/contrib/extensions/browser/extensionRecommendationsService.ts', // {{SQL CARBON EDIT}} skip this because known issue
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.tsHygieneFilter = tsHygieneFilter;
const copyrightHeaderLines = [ const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------', '/*---------------------------------------------------------------------------------------------',

View File

@@ -25,7 +25,7 @@ async function downloadExtensionDetails(extension) {
const promises = []; const promises = [];
for (const fileName of contentFileNames) { for (const fileName of contentFileNames) {
promises.push(new Promise(resolve => { promises.push(new Promise(resolve => {
got_1.default(`${repositoryContentBaseUrl}/${fileName}`) (0, got_1.default)(`${repositoryContentBaseUrl}/${fileName}`)
.then(response => { .then(response => {
resolve({ fileName, body: response.rawBody }); resolve({ fileName, body: response.rawBody });
}) })

View File

@@ -17,7 +17,7 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors"); const ansiColors = require("ansi-colors");
const os = require("os"); const os = require("os");
const watch = require('./watch'); const watch = require('./watch');
const reporter = reporter_1.createReporter(); const reporter = (0, reporter_1.createReporter)();
function getTypeScriptCompilerOptions(src) { function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`); const rootDir = path.join(__dirname, `../../${src}`);
let options = {}; let options = {};
@@ -37,6 +37,9 @@ function createCompile(src, build, emitError) {
const sourcemaps = require('gulp-sourcemaps'); const sourcemaps = require('gulp-sourcemaps');
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json'); const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) }); const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
if (!build) {
overrideOptions.inlineSourceMap = true;
}
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err)); const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
function pipeline(token) { function pipeline(token) {
const bom = require('gulp-bom'); const bom = require('gulp-bom');

View File

@@ -44,6 +44,9 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json'); const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) }; const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
if (!build) {
overrideOptions.inlineSourceMap = true;
}
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err)); const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));

View File

@@ -11,19 +11,69 @@ const vfs = require("vinyl-fs");
const filter = require("gulp-filter"); const filter = require("gulp-filter");
const _ = require("underscore"); const _ = require("underscore");
const util = require("./util"); const util = require("./util");
function isDocumentSuffix(str) {
return str === 'document' || str === 'script' || str === 'file' || str === 'source code';
}
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8')); const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root); const commit = util.getVersion(root);
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8')); const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) { /**
* Generate a `DarwinDocumentType` given a list of file extensions, an icon name, and an optional suffix or file type name.
* @param extensions A list of file extensions, such as `['bat', 'cmd']`
* @param icon A sentence-cased file type name that matches the lowercase name of a darwin icon resource.
* For example, `'HTML'` instead of `'html'`, or `'Java'` instead of `'java'`.
* This parameter is lowercased before it is used to reference an icon file.
* @param nameOrSuffix An optional suffix or a string to use as the file type. If a suffix is provided,
* it is used with the icon parameter to generate a file type string. If nothing is provided,
* `'document'` is used with the icon parameter to generate file type string.
*
* For example, if you call `darwinBundleDocumentType(..., 'HTML')`, the resulting file type is `"HTML document"`,
* and the `'html'` darwin icon is used.
*
* If you call `darwinBundleDocumentType(..., 'Javascript', 'file')`, the resulting file type is `"Javascript file"`.
* and the `'javascript'` darwin icon is used.
*
* If you call `darwinBundleDocumentType(..., 'bat', 'Windows command script')`, the file type is `"Windows command script"`,
* and the `'bat'` darwin icon is used.
*/
function darwinBundleDocumentType(extensions, icon, nameOrSuffix) {
// If given a suffix, generate a name from it. If not given anything, default to 'document'
if (isDocumentSuffix(nameOrSuffix) || !nameOrSuffix) {
nameOrSuffix = icon.charAt(0).toUpperCase() + icon.slice(1) + ' ' + (nameOrSuffix !== null && nameOrSuffix !== void 0 ? nameOrSuffix : 'document');
}
return { return {
name: product.nameLong + ' document', name: nameOrSuffix,
role: 'Editor', role: 'Editor',
ostypes: ['TEXT', 'utxt', 'TUTX', '****'], ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
extensions: extensions, extensions: extensions,
iconFile: icon iconFile: 'resources/darwin/' + icon + '.icns'
}; };
} }
/**
* Generate several `DarwinDocumentType`s with unique names and a shared icon.
* @param types A map of file type names to their associated file extensions.
* @param icon A darwin icon resource to use. For example, `'HTML'` would refer to `resources/darwin/html.icns`
*
* Examples:
* ```
* darwinBundleDocumentTypes({ 'C header file': 'h', 'C source code': 'c' },'c')
* darwinBundleDocumentTypes({ 'React source code': ['jsx', 'tsx'] }, 'react')
* ```
*/
// {{SQL CARBON EDIT}} Remove unused
// function darwinBundleDocumentTypes(types: { [name: string]: string | string[] }, icon: string): DarwinDocumentType[] {
// return Object.keys(types).map((name: string): DarwinDocumentType => {
// const extensions = types[name];
// return {
// name: name,
// role: 'Editor',
// ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
// extensions: Array.isArray(extensions) ? extensions : [extensions],
// iconFile: 'resources/darwin/' + icon + '.icns',
// } as DarwinDocumentType;
// });
// }
exports.config = { exports.config = {
version: util.getElectronVersion(), version: util.getElectronVersion(),
productAppName: product.nameLong, productAppName: product.nameLong,
@@ -35,7 +85,7 @@ exports.config = {
darwinHelpBookFolder: 'VS Code HelpBook', darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook', darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [ darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'), darwinBundleDocumentType(['csv', 'json', 'sqlplan', 'sql', 'xml'], 'code_file'),
], ],
darwinBundleURLTypes: [{ darwinBundleURLTypes: [{
role: 'Viewer', role: 'Viewer',

View File

@@ -12,22 +12,84 @@ import * as filter from 'gulp-filter';
import * as _ from 'underscore'; import * as _ from 'underscore';
import * as util from './util'; import * as util from './util';
type DarwinDocumentSuffix = 'document' | 'script' | 'file' | 'source code';
type DarwinDocumentType = {
name: string,
role: string,
ostypes: string[],
extensions: string[],
iconFile: string,
};
function isDocumentSuffix(str?: string): str is DarwinDocumentSuffix {
return str === 'document' || str === 'script' || str === 'file' || str === 'source code';
}
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8')); const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root); const commit = util.getVersion(root);
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8')); const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions: string[], icon: string) { /**
* Generate a `DarwinDocumentType` given a list of file extensions, an icon name, and an optional suffix or file type name.
* @param extensions A list of file extensions, such as `['bat', 'cmd']`
* @param icon A sentence-cased file type name that matches the lowercase name of a darwin icon resource.
* For example, `'HTML'` instead of `'html'`, or `'Java'` instead of `'java'`.
* This parameter is lowercased before it is used to reference an icon file.
* @param nameOrSuffix An optional suffix or a string to use as the file type. If a suffix is provided,
* it is used with the icon parameter to generate a file type string. If nothing is provided,
* `'document'` is used with the icon parameter to generate file type string.
*
* For example, if you call `darwinBundleDocumentType(..., 'HTML')`, the resulting file type is `"HTML document"`,
* and the `'html'` darwin icon is used.
*
* If you call `darwinBundleDocumentType(..., 'Javascript', 'file')`, the resulting file type is `"Javascript file"`.
* and the `'javascript'` darwin icon is used.
*
* If you call `darwinBundleDocumentType(..., 'bat', 'Windows command script')`, the file type is `"Windows command script"`,
* and the `'bat'` darwin icon is used.
*/
function darwinBundleDocumentType(extensions: string[], icon: string, nameOrSuffix?: string | DarwinDocumentSuffix): DarwinDocumentType {
// If given a suffix, generate a name from it. If not given anything, default to 'document'
if (isDocumentSuffix(nameOrSuffix) || !nameOrSuffix) {
nameOrSuffix = icon.charAt(0).toUpperCase() + icon.slice(1) + ' ' + (nameOrSuffix ?? 'document');
}
return { return {
name: product.nameLong + ' document', name: nameOrSuffix,
role: 'Editor', role: 'Editor',
ostypes: ['TEXT', 'utxt', 'TUTX', '****'], ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
extensions: extensions, extensions: extensions,
iconFile: icon iconFile: 'resources/darwin/' + icon + '.icns'
}; };
} }
/**
* Generate several `DarwinDocumentType`s with unique names and a shared icon.
* @param types A map of file type names to their associated file extensions.
* @param icon A darwin icon resource to use. For example, `'HTML'` would refer to `resources/darwin/html.icns`
*
* Examples:
* ```
* darwinBundleDocumentTypes({ 'C header file': 'h', 'C source code': 'c' },'c')
* darwinBundleDocumentTypes({ 'React source code': ['jsx', 'tsx'] }, 'react')
* ```
*/
// {{SQL CARBON EDIT}} Remove unused
// function darwinBundleDocumentTypes(types: { [name: string]: string | string[] }, icon: string): DarwinDocumentType[] {
// return Object.keys(types).map((name: string): DarwinDocumentType => {
// const extensions = types[name];
// return {
// name: name,
// role: 'Editor',
// ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
// extensions: Array.isArray(extensions) ? extensions : [extensions],
// iconFile: 'resources/darwin/' + icon + '.icns',
// } as DarwinDocumentType;
// });
// }
export const config = { export const config = {
version: util.getElectronVersion(), version: util.getElectronVersion(),
productAppName: product.nameLong, productAppName: product.nameLong,
@@ -39,7 +101,7 @@ export const config = {
darwinHelpBookFolder: 'VS Code HelpBook', darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook', darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [ darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'), darwinBundleDocumentType(['csv', 'json', 'sqlplan', 'sql', 'xml'], 'code_file'),
], ],
darwinBundleURLTypes: [{ darwinBundleURLTypes: [{
role: 'Viewer', role: 'Viewer',

View File

@@ -21,7 +21,7 @@ module.exports = new class {
const configs = context.options; const configs = context.options;
for (const config of configs) { for (const config of configs) {
if (minimatch(context.getFilename(), config.target)) { if (minimatch(context.getFilename(), config.target)) {
return utils_1.createImportRuleListener((node, value) => this._checkImport(context, config, node, value)); return (0, utils_1.createImportRuleListener)((node, value) => this._checkImport(context, config, node, value));
} }
} }
return {}; return {};
@@ -29,7 +29,7 @@ module.exports = new class {
_checkImport(context, config, node, path) { _checkImport(context, config, node, path) {
// resolve relative paths // resolve relative paths
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(context.getFilename(), path); path = (0, path_1.join)(context.getFilename(), path);
} }
let restrictions; let restrictions;
if (typeof config.restrictions === 'string') { if (typeof config.restrictions === 'string') {

View File

@@ -17,7 +17,7 @@ module.exports = new class {
}; };
} }
create(context) { create(context) {
const fileDirname = path_1.dirname(context.getFilename()); const fileDirname = (0, path_1.dirname)(context.getFilename());
const parts = fileDirname.split(/\\|\//); const parts = fileDirname.split(/\\|\//);
const ruleArgs = context.options[0]; const ruleArgs = context.options[0];
let config; let config;
@@ -39,11 +39,11 @@ module.exports = new class {
// nothing // nothing
return {}; return {};
} }
return utils_1.createImportRuleListener((node, path) => { return (0, utils_1.createImportRuleListener)((node, path) => {
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(path_1.dirname(context.getFilename()), path); path = (0, path_1.join)((0, path_1.dirname)(context.getFilename()), path);
} }
const parts = path_1.dirname(path).split(/\\|\//); const parts = (0, path_1.dirname)(path).split(/\\|\//);
for (let i = parts.length - 1; i >= 0; i--) { for (let i = parts.length - 1; i >= 0; i--) {
const part = parts[i]; const part = parts[i];
if (config.allowed.has(part)) { if (config.allowed.has(part)) {

View File

@@ -20,10 +20,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName) || /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName) || /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) { || /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
return utils_1.createImportRuleListener((node, path) => { return (0, utils_1.createImportRuleListener)((node, path) => {
// resolve relative paths // resolve relative paths
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(context.getFilename(), path); path = (0, path_1.join)(context.getFilename(), path);
} }
if (/vs(\/|\\)nls/.test(path)) { if (/vs(\/|\\)nls/.test(path)) {
context.report({ context.report({

View File

@@ -21,10 +21,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
// the vs/editor folder is allowed to use the standalone editor // the vs/editor folder is allowed to use the standalone editor
return {}; return {};
} }
return utils_1.createImportRuleListener((node, path) => { return (0, utils_1.createImportRuleListener)((node, path) => {
// resolve relative paths // resolve relative paths
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(context.getFilename(), path); path = (0, path_1.join)(context.getFilename(), path);
} }
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path) if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path)
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path) || /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path)

View File

@@ -15,7 +15,7 @@ module.exports = new (_a = class TranslationRemind {
}; };
} }
create(context) { create(context) {
return utils_1.createImportRuleListener((node, path) => this._checkImport(context, node, path)); return (0, utils_1.createImportRuleListener)((node, path) => this._checkImport(context, node, path));
} }
_checkImport(context, node, path) { _checkImport(context, node, path) {
if (path !== TranslationRemind.NLS_MODULE) { if (path !== TranslationRemind.NLS_MODULE) {
@@ -31,7 +31,7 @@ module.exports = new (_a = class TranslationRemind {
let resourceDefined = false; let resourceDefined = false;
let json; let json;
try { try {
json = fs_1.readFileSync('./build/lib/i18n.resources.json', 'utf8'); json = (0, fs_1.readFileSync)('./build/lib/i18n.resources.json', 'utf8');
} }
catch (e) { catch (e) {
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.'); console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');

View File

@@ -1,7 +1,7 @@
"use strict"; "use strict";
/*--------------------------------------------------------------------------------------------- /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
module.exports = new class ApiVsCodeInComments { module.exports = new class ApiVsCodeInComments {
constructor() { constructor() {

View File

@@ -21,6 +21,8 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors"); const ansiColors = require("ansi-colors");
const buffer = require('gulp-buffer'); const buffer = require('gulp-buffer');
const jsoncParser = require("jsonc-parser"); const jsoncParser = require("jsonc-parser");
const dependencies_1 = require("./dependencies");
const _ = require("underscore");
const util = require('./util'); const util = require('./util');
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root); const commit = util.getVersion(root);
@@ -145,7 +147,7 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
console.error(packagedDependencies); console.error(packagedDependencies);
result.emit('error', err); result.emit('error', err);
}); });
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath))); return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
} }
function fromLocalNormal(extensionPath) { function fromLocalNormal(extensionPath) {
const result = es.through(); const result = es.through();
@@ -163,7 +165,7 @@ function fromLocalNormal(extensionPath) {
es.readArray(files).pipe(result); es.readArray(files).pipe(result);
}) })
.catch(err => result.emit('error', err)); .catch(err => result.emit('error', err));
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath))); return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
} }
exports.fromLocalNormal = fromLocalNormal; exports.fromLocalNormal = fromLocalNormal;
const baseHeaders = { const baseHeaders = {
@@ -254,14 +256,30 @@ const productJson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../produ
const builtInExtensions = productJson.builtInExtensions || []; const builtInExtensions = productJson.builtInExtensions || [];
const webBuiltInExtensions = productJson.webBuiltInExtensions || []; const webBuiltInExtensions = productJson.webBuiltInExtensions || [];
/** /**
* Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionsUtil.ts` * Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionManifestPropertiesService.ts`
*/ */
function isWebExtension(manifest) { function isWebExtension(manifest) {
if (Boolean(manifest.browser)) {
return true;
}
if (Boolean(manifest.main)) {
return false;
}
// neither browser nor main
if (typeof manifest.extensionKind !== 'undefined') { if (typeof manifest.extensionKind !== 'undefined') {
const extensionKind = Array.isArray(manifest.extensionKind) ? manifest.extensionKind : [manifest.extensionKind]; const extensionKind = Array.isArray(manifest.extensionKind) ? manifest.extensionKind : [manifest.extensionKind];
return (extensionKind.indexOf('web') >= 0); if (extensionKind.indexOf('web') >= 0) {
return true;
}
} }
return (!Boolean(manifest.main) || Boolean(manifest.browser)); if (typeof manifest.contributes !== 'undefined') {
for (const id of ['debuggers', 'terminal', 'typescriptServerPlugins']) {
if (manifest.contributes.hasOwnProperty(id)) {
return false;
}
}
}
return true;
} }
function packageLocalExtensionsStream(forWeb) { function packageLocalExtensionsStream(forWeb) {
const localExtensionsDescriptions = (glob.sync('extensions/*/package.json') const localExtensionsDescriptions = (glob.sync('extensions/*/package.json')
@@ -284,8 +302,10 @@ function packageLocalExtensionsStream(forWeb) {
result = localExtensionsStream; result = localExtensionsStream;
} }
else { else {
// also include shared node modules // also include shared production node modules
result = es.merge(localExtensionsStream, gulp.src('extensions/node_modules/**', { base: '.' })); const productionDependencies = (0, dependencies_1.getProductionDependencies)('extensions/');
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
result = es.merge(localExtensionsStream, gulp.src(dependenciesSrc, { base: '.' }));
} }
return (result return (result
.pipe(util2.setExecutableBit(['**/*.sh']))); .pipe(util2.setExecutableBit(['**/*.sh'])));
@@ -400,7 +420,7 @@ function translatePackageJSON(packageJSON, packageNLSPath) {
else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) { else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
const translated = packageNls[val.substr(1, val.length - 2)]; const translated = packageNls[val.substr(1, val.length - 2)];
if (translated) { if (translated) {
obj[key] = translated; obj[key] = typeof translated === 'string' ? translated : (typeof translated.message === 'string' ? translated.message : val);
} }
} }
} }
@@ -470,7 +490,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
reject(); reject();
} }
else { else {
reporter(stats.toJson()); reporter(stats === null || stats === void 0 ? void 0 : stats.toJson());
} }
}); });
} }
@@ -481,7 +501,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
reject(); reject();
} }
else { else {
reporter(stats.toJson()); reporter(stats === null || stats === void 0 ? void 0 : stats.toJson());
resolve(); resolve();
} }
}); });

View File

@@ -21,6 +21,8 @@ import * as ansiColors from 'ansi-colors';
const buffer = require('gulp-buffer'); const buffer = require('gulp-buffer');
import * as jsoncParser from 'jsonc-parser'; import * as jsoncParser from 'jsonc-parser';
import webpack = require('webpack'); import webpack = require('webpack');
import { getProductionDependencies } from './dependencies';
import _ = require('underscore');
const util = require('./util'); const util = require('./util');
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root); const commit = util.getVersion(root);
@@ -303,19 +305,38 @@ const webBuiltInExtensions: IBuiltInExtension[] = productJson.webBuiltInExtensio
type ExtensionKind = 'ui' | 'workspace' | 'web'; type ExtensionKind = 'ui' | 'workspace' | 'web';
interface IExtensionManifest { interface IExtensionManifest {
main: string; main?: string;
browser: string; browser?: string;
extensionKind?: ExtensionKind | ExtensionKind[]; extensionKind?: ExtensionKind | ExtensionKind[];
extensionPack?: string[];
extensionDependencies?: string[];
contributes?: { [id: string]: any };
} }
/** /**
* Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionsUtil.ts` * Loosely based on `getExtensionKind` from `src/vs/workbench/services/extensions/common/extensionManifestPropertiesService.ts`
*/ */
function isWebExtension(manifest: IExtensionManifest): boolean { function isWebExtension(manifest: IExtensionManifest): boolean {
if (Boolean(manifest.browser)) {
return true;
}
if (Boolean(manifest.main)) {
return false;
}
// neither browser nor main
if (typeof manifest.extensionKind !== 'undefined') { if (typeof manifest.extensionKind !== 'undefined') {
const extensionKind = Array.isArray(manifest.extensionKind) ? manifest.extensionKind : [manifest.extensionKind]; const extensionKind = Array.isArray(manifest.extensionKind) ? manifest.extensionKind : [manifest.extensionKind];
return (extensionKind.indexOf('web') >= 0); if (extensionKind.indexOf('web') >= 0) {
return true;
}
} }
return (!Boolean(manifest.main) || Boolean(manifest.browser)); if (typeof manifest.contributes !== 'undefined') {
for (const id of ['debuggers', 'terminal', 'typescriptServerPlugins']) {
if (manifest.contributes.hasOwnProperty(id)) {
return false;
}
}
}
return true;
} }
export function packageLocalExtensionsStream(forWeb: boolean): Stream { export function packageLocalExtensionsStream(forWeb: boolean): Stream {
@@ -344,8 +365,10 @@ export function packageLocalExtensionsStream(forWeb: boolean): Stream {
if (forWeb) { if (forWeb) {
result = localExtensionsStream; result = localExtensionsStream;
} else { } else {
// also include shared node modules // also include shared production node modules
result = es.merge(localExtensionsStream, gulp.src('extensions/node_modules/**', { base: '.' })); const productionDependencies = getProductionDependencies('extensions/');
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
result = es.merge(localExtensionsStream, gulp.src(dependenciesSrc, { base: '.' }));
} }
return ( return (
@@ -469,8 +492,11 @@ export function packageRebuildExtensionsStream(): NodeJS.ReadWriteStream {
// {{SQL CARBON EDIT}} end // {{SQL CARBON EDIT}} end
export function translatePackageJSON(packageJSON: string, packageNLSPath: string) { export function translatePackageJSON(packageJSON: string, packageNLSPath: string) {
interface NLSFormat {
[key: string]: string | { message: string, comment: string[] };
}
const CharCode_PC = '%'.charCodeAt(0); const CharCode_PC = '%'.charCodeAt(0);
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString()); const packageNls: NLSFormat = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const translate = (obj: any) => { const translate = (obj: any) => {
for (let key in obj) { for (let key in obj) {
const val = obj[key]; const val = obj[key];
@@ -481,7 +507,7 @@ export function translatePackageJSON(packageJSON: string, packageNLSPath: string
} else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) { } else if (typeof val === 'string' && val.charCodeAt(0) === CharCode_PC && val.charCodeAt(val.length - 1) === CharCode_PC) {
const translated = packageNls[val.substr(1, val.length - 2)]; const translated = packageNls[val.substr(1, val.length - 2)];
if (translated) { if (translated) {
obj[key] = translated; obj[key] = typeof translated === 'string' ? translated : (typeof translated.message === 'string' ? translated.message : val);
} }
} }
} }
@@ -556,7 +582,7 @@ export async function webpackExtensions(taskName: string, isWatch: boolean, webp
if (err) { if (err) {
reject(); reject();
} else { } else {
reporter(stats.toJson()); reporter(stats?.toJson());
} }
}); });
} else { } else {
@@ -565,7 +591,7 @@ export async function webpackExtensions(taskName: string, isWatch: boolean, webp
fancyLog.error(err); fancyLog.error(err);
reject(); reject();
} else { } else {
reporter(stats.toJson()); reporter(stats?.toJson());
resolve(); resolve();
} }
}); });

View File

@@ -466,7 +466,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
}); });
} }
function processNlsFiles(opts) { function processNlsFiles(opts) {
return event_stream_1.through(function (file) { return (0, event_stream_1.through)(function (file) {
let fileName = path.basename(file.path); let fileName = path.basename(file.path);
if (fileName === 'nls.metadata.json') { if (fileName === 'nls.metadata.json') {
let json = null; let json = null;
@@ -524,7 +524,7 @@ function getResource(sourceFile) {
} }
exports.getResource = getResource; exports.getResource = getResource;
function createXlfFilesForCoreBundle() { function createXlfFilesForCoreBundle() {
return event_stream_1.through(function (file) { return (0, event_stream_1.through)(function (file) {
const basename = path.basename(file.path); const basename = path.basename(file.path);
if (basename === 'nls.metadata.json') { if (basename === 'nls.metadata.json') {
if (file.isBuffer()) { if (file.isBuffer()) {
@@ -579,7 +579,7 @@ function createXlfFilesForExtensions() {
let counter = 0; let counter = 0;
let folderStreamEnded = false; let folderStreamEnded = false;
let folderStreamEndEmitted = false; let folderStreamEndEmitted = false;
return event_stream_1.through(function (extensionFolder) { return (0, event_stream_1.through)(function (extensionFolder) {
const folderStream = this; const folderStream = this;
const stat = fs.statSync(extensionFolder.path); const stat = fs.statSync(extensionFolder.path);
if (!stat.isDirectory()) { if (!stat.isDirectory()) {
@@ -597,7 +597,7 @@ function createXlfFilesForExtensions() {
} }
return _xlf; return _xlf;
} }
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) { gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe((0, event_stream_1.through)(function (file) {
if (file.isBuffer()) { if (file.isBuffer()) {
const buffer = file.contents; const buffer = file.contents;
const basename = path.basename(file.path); const basename = path.basename(file.path);
@@ -656,7 +656,7 @@ function createXlfFilesForExtensions() {
} }
exports.createXlfFilesForExtensions = createXlfFilesForExtensions; exports.createXlfFilesForExtensions = createXlfFilesForExtensions;
function createXlfFilesForIsl() { function createXlfFilesForIsl() {
return event_stream_1.through(function (file) { return (0, event_stream_1.through)(function (file) {
let projectName, resourceFile; let projectName, resourceFile;
if (path.basename(file.path) === 'messages.en.isl') { if (path.basename(file.path) === 'messages.en.isl') {
projectName = setupProject; projectName = setupProject;
@@ -709,7 +709,7 @@ exports.createXlfFilesForIsl = createXlfFilesForIsl;
function pushXlfFiles(apiHostname, username, password) { function pushXlfFiles(apiHostname, username, password) {
let tryGetPromises = []; let tryGetPromises = [];
let updateCreatePromises = []; let updateCreatePromises = [];
return event_stream_1.through(function (file) { return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative); const project = path.dirname(file.relative);
const fileName = path.basename(file.path); const fileName = path.basename(file.path);
const slug = fileName.substr(0, fileName.length - '.xlf'.length); const slug = fileName.substr(0, fileName.length - '.xlf'.length);
@@ -771,7 +771,7 @@ function getAllResources(project, apiHostname, username, password) {
function findObsoleteResources(apiHostname, username, password) { function findObsoleteResources(apiHostname, username, password) {
let resourcesByProject = Object.create(null); let resourcesByProject = Object.create(null);
resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
return event_stream_1.through(function (file) { return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative); const project = path.dirname(file.relative);
const fileName = path.basename(file.path); const fileName = path.basename(file.path);
const slug = fileName.substr(0, fileName.length - '.xlf'.length); const slug = fileName.substr(0, fileName.length - '.xlf'.length);
@@ -923,7 +923,7 @@ function pullXlfFiles(apiHostname, username, password, language, resources) {
const credentials = `${username}:${password}`; const credentials = `${username}:${password}`;
let expectedTranslationsCount = resources.length; let expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false; let translationsRetrieved = 0, called = false;
return event_stream_1.readable(function (_count, callback) { return (0, event_stream_1.readable)(function (_count, callback) {
// Mark end of stream when all resources were retrieved // Mark end of stream when all resources were retrieved
if (translationsRetrieved === expectedTranslationsCount) { if (translationsRetrieved === expectedTranslationsCount) {
return this.emit('end'); return this.emit('end');
@@ -981,7 +981,7 @@ function retrieveResource(language, resource, apiHostname, credentials) {
} }
function prepareI18nFiles() { function prepareI18nFiles() {
let parsePromises = []; let parsePromises = [];
return event_stream_1.through(function (xlf) { return (0, event_stream_1.through)(function (xlf) {
let stream = this; let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString()); let parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise); parsePromises.push(parsePromise);
@@ -1026,7 +1026,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
let mainPack = { version: exports.i18nPackVersion, contents: {} }; let mainPack = { version: exports.i18nPackVersion, contents: {} };
let extensionsPacks = {}; let extensionsPacks = {};
let errors = []; let errors = [];
return event_stream_1.through(function (xlf) { return (0, event_stream_1.through)(function (xlf) {
let project = path.basename(path.dirname(path.dirname(xlf.relative))); let project = path.basename(path.dirname(path.dirname(xlf.relative)));
let resource = path.basename(xlf.relative, '.xlf'); let resource = path.basename(xlf.relative, '.xlf');
let contents = xlf.contents.toString(); let contents = xlf.contents.toString();
@@ -1088,7 +1088,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
exports.prepareI18nPackFiles = prepareI18nPackFiles; exports.prepareI18nPackFiles = prepareI18nPackFiles;
function prepareIslFiles(language, innoSetupConfig) { function prepareIslFiles(language, innoSetupConfig) {
let parsePromises = []; let parsePromises = [];
return event_stream_1.through(function (xlf) { return (0, event_stream_1.through)(function (xlf) {
let stream = this; let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString()); let parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise); parsePromises.push(parsePromise);

View File

@@ -46,6 +46,10 @@
"name": "vs/workbench/contrib/callHierarchy", "name": "vs/workbench/contrib/callHierarchy",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/contrib/typeHierarchy",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/contrib/codeActions", "name": "vs/workbench/contrib/codeActions",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -94,6 +98,10 @@
"name": "vs/workbench/contrib/issue", "name": "vs/workbench/contrib/issue",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/contrib/interactive",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/contrib/keybindings", "name": "vs/workbench/contrib/keybindings",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -246,6 +254,10 @@
"name": "vs/workbench/contrib/views", "name": "vs/workbench/contrib/views",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/contrib/languageDetection",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/actions", "name": "vs/workbench/services/actions",
"project": "vscode-workbench" "project": "vscode-workbench"

View File

@@ -199,7 +199,7 @@ const RULES = [
] ]
} }
]; ];
const TS_CONFIG_PATH = path_1.join(__dirname, '../../', 'src', 'tsconfig.json'); const TS_CONFIG_PATH = (0, path_1.join)(__dirname, '../../', 'src', 'tsconfig.json');
let hasErrors = false; let hasErrors = false;
function checkFile(program, sourceFile, rule) { function checkFile(program, sourceFile, rule) {
checkNode(sourceFile); checkNode(sourceFile);
@@ -250,8 +250,8 @@ function checkFile(program, sourceFile, rule) {
} }
function createProgram(tsconfigPath) { function createProgram(tsconfigPath) {
const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile); const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile);
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => fs_1.readFileSync(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' }; const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => (0, fs_1.readFileSync)(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, path_1.resolve(path_1.dirname(tsconfigPath)), { noEmit: true }); const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, (0, path_1.resolve)((0, path_1.dirname)(tsconfigPath)), { noEmit: true });
const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true); const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true);
return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost); return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost);
} }
@@ -261,7 +261,7 @@ function createProgram(tsconfigPath) {
const program = createProgram(TS_CONFIG_PATH); const program = createProgram(TS_CONFIG_PATH);
for (const sourceFile of program.getSourceFiles()) { for (const sourceFile of program.getSourceFiles()) {
for (const rule of RULES) { for (const rule of RULES) {
if (minimatch_1.match([sourceFile.fileName], rule.target).length > 0) { if ((0, minimatch_1.match)([sourceFile.fileName], rule.target).length > 0) {
if (!rule.skip) { if (!rule.skip) {
checkFile(program, sourceFile, rule); checkFile(program, sourceFile, rule);
} }

View File

@@ -96,7 +96,7 @@ function modifyI18nPackFiles(existingTranslationFolder, resultingTranslationPath
let mainPack = { version: i18n.i18nPackVersion, contents: {} }; let mainPack = { version: i18n.i18nPackVersion, contents: {} };
let extensionsPacks = {}; let extensionsPacks = {};
let errors = []; let errors = [];
return event_stream_1.through(function (xlf) { return (0, event_stream_1.through)(function (xlf) {
let rawResource = path.basename(xlf.relative, '.xlf'); let rawResource = path.basename(xlf.relative, '.xlf');
let resource = rawResource.substring(0, rawResource.lastIndexOf('.')); let resource = rawResource.substring(0, rawResource.lastIndexOf('.'));
let contents = xlf.contents.toString(); let contents = xlf.contents.toString();

View File

@@ -53,8 +53,8 @@ define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
* Returns a stream containing the patched JavaScript and source maps. * Returns a stream containing the patched JavaScript and source maps.
*/ */
function nls() { function nls() {
const input = event_stream_1.through(); const input = (0, event_stream_1.through)();
const output = input.pipe(event_stream_1.through(function (f) { const output = input.pipe((0, event_stream_1.through)(function (f) {
if (!f.sourceMap) { if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`)); return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
} }
@@ -72,7 +72,7 @@ function nls() {
} }
_nls.patchFiles(f, typescript).forEach(f => this.emit('data', f)); _nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
})); }));
return event_stream_1.duplex(input, output); return (0, event_stream_1.duplex)(input, output);
} }
exports.nls = nls; exports.nls = nls;
function isImportNode(ts, node) { function isImportNode(ts, node) {

View File

@@ -98,7 +98,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest, fileContentMapper
return es.readArray(treatedSources) return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through()) .pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest)) .pipe(concat(dest))
.pipe(stats_1.createStatsStream(dest)); .pipe((0, stats_1.createStatsStream)(dest));
} }
function toBundleStream(src, bundledFileHeader, bundles, fileContentMapper) { function toBundleStream(src, bundledFileHeader, bundles, fileContentMapper) {
return es.merge(bundles.map(function (bundle) { return es.merge(bundles.map(function (bundle) {
@@ -155,7 +155,7 @@ function optimizeTask(opts) {
addComment: true, addComment: true,
includeContent: true includeContent: true
})) }))
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({ .pipe(opts.languages && opts.languages.length ? (0, i18n_1.processNlsFiles)({
fileHeader: bundledFileHeader, fileHeader: bundledFileHeader,
languages: opts.languages languages: opts.languages
}) : es.through()) }) : es.through())
@@ -179,7 +179,7 @@ function minifyTask(src, sourceMapBaseUrl) {
sourcemap: 'external', sourcemap: 'external',
outdir: '.', outdir: '.',
platform: 'node', platform: 'node',
target: ['node14.16'], target: ['esnext'],
write: false write: false
}).then(res => { }).then(res => {
const jsFile = res.outputFiles.find(f => /\.js$/.test(f.path)); const jsFile = res.outputFiles.find(f => /\.js$/.test(f.path));

View File

@@ -256,7 +256,7 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
sourcemap: 'external', sourcemap: 'external',
outdir: '.', outdir: '.',
platform: 'node', platform: 'node',
target: ['node14.16'], target: ['esnext'],
write: false write: false
}).then(res => { }).then(res => {
const jsFile = res.outputFiles.find(f => /\.js$/.test(f.path))!; const jsFile = res.outputFiles.find(f => /\.js$/.test(f.path))!;

View File

@@ -12,7 +12,7 @@ const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
const rootDir = path.resolve(__dirname, '..', '..'); const rootDir = path.resolve(__dirname, '..', '..');
function runProcess(command, args = []) { function runProcess(command, args = []) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const child = child_process_1.spawn(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env }); const child = (0, child_process_1.spawn)(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
child.on('exit', err => !err ? resolve() : process.exit(err !== null && err !== void 0 ? err : 1)); child.on('exit', err => !err ? resolve() : process.exit(err !== null && err !== void 0 ? err : 1));
child.on('error', reject); child.on('error', reject);
}); });

View File

@@ -4,7 +4,7 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.incremental = void 0; exports.buildWebNodePaths = exports.acquireWebNodePaths = exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.incremental = void 0;
const es = require("event-stream"); const es = require("event-stream");
const debounce = require("debounce"); const debounce = require("debounce");
const _filter = require("gulp-filter"); const _filter = require("gulp-filter");
@@ -274,3 +274,51 @@ function getElectronVersion() {
return target; return target;
} }
exports.getElectronVersion = getElectronVersion; exports.getElectronVersion = getElectronVersion;
function acquireWebNodePaths() {
var _a;
const root = path.join(__dirname, '..', '..');
const webPackageJSON = path.join(root, '/remote/web', 'package.json');
const webPackages = JSON.parse(fs.readFileSync(webPackageJSON, 'utf8')).dependencies;
const nodePaths = {};
for (const key of Object.keys(webPackages)) {
const packageJSON = path.join(root, 'node_modules', key, 'package.json');
const packageData = JSON.parse(fs.readFileSync(packageJSON, 'utf8'));
let entryPoint = typeof packageData.browser === 'string' ? packageData.browser : (_a = packageData.main) !== null && _a !== void 0 ? _a : packageData.main; // {{SQL CARBON EDIT}} Some packages (like Turndown) have objects in this field instead of the entry point, fall back to main in that case
// On rare cases a package doesn't have an entrypoint so we assume it has a dist folder with a min.js
if (!entryPoint) {
console.warn(`No entry point for ${key} assuming dist/${key}.min.js`);
entryPoint = `dist/${key}.min.js`;
}
// Remove any starting path information so it's all relative info
if (entryPoint.startsWith('./')) {
entryPoint = entryPoint.substr(2);
}
else if (entryPoint.startsWith('/')) {
entryPoint = entryPoint.substr(1);
}
nodePaths[key] = entryPoint;
}
return nodePaths;
}
exports.acquireWebNodePaths = acquireWebNodePaths;
function buildWebNodePaths(outDir) {
const result = () => new Promise((resolve, _) => {
const root = path.join(__dirname, '..', '..');
const nodePaths = acquireWebNodePaths();
// Now we write the node paths to out/vs
const outDirectory = path.join(root, outDir, 'vs');
fs.mkdirSync(outDirectory, { recursive: true });
const headerWithGeneratedFileWarning = `/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
// This file is generated by build/npm/postinstall.js. Do not edit.`;
const fileContents = `${headerWithGeneratedFileWarning}\nself.webPackagePaths = ${JSON.stringify(nodePaths, null, 2)};`;
fs.writeFileSync(path.join(outDirectory, 'webPackagePaths.js'), fileContents, 'utf8');
resolve();
});
result.taskName = 'build-web-node-paths';
return result;
}
exports.buildWebNodePaths = buildWebNodePaths;

View File

@@ -340,3 +340,50 @@ export function getElectronVersion(): string {
const target = /^target "(.*)"$/m.exec(yarnrc)![1]; const target = /^target "(.*)"$/m.exec(yarnrc)![1];
return target; return target;
} }
export function acquireWebNodePaths() {
const root = path.join(__dirname, '..', '..');
const webPackageJSON = path.join(root, '/remote/web', 'package.json');
const webPackages = JSON.parse(fs.readFileSync(webPackageJSON, 'utf8')).dependencies;
const nodePaths: { [key: string]: string } = {};
for (const key of Object.keys(webPackages)) {
const packageJSON = path.join(root, 'node_modules', key, 'package.json');
const packageData = JSON.parse(fs.readFileSync(packageJSON, 'utf8'));
let entryPoint = typeof packageData.browser === 'string' ? packageData.browser : packageData.main ?? packageData.main; // {{SQL CARBON EDIT}} Some packages (like Turndown) have objects in this field instead of the entry point, fall back to main in that case
// On rare cases a package doesn't have an entrypoint so we assume it has a dist folder with a min.js
if (!entryPoint) {
console.warn(`No entry point for ${key} assuming dist/${key}.min.js`);
entryPoint = `dist/${key}.min.js`;
}
// Remove any starting path information so it's all relative info
if (entryPoint.startsWith('./')) {
entryPoint = entryPoint.substr(2);
} else if (entryPoint.startsWith('/')) {
entryPoint = entryPoint.substr(1);
}
nodePaths[key] = entryPoint;
}
return nodePaths;
}
export function buildWebNodePaths(outDir: string) {
const result = () => new Promise<void>((resolve, _) => {
const root = path.join(__dirname, '..', '..');
const nodePaths = acquireWebNodePaths();
// Now we write the node paths to out/vs
const outDirectory = path.join(root, outDir, 'vs');
fs.mkdirSync(outDirectory, { recursive: true });
const headerWithGeneratedFileWarning = `/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
// This file is generated by build/npm/postinstall.js. Do not edit.`;
const fileContents = `${headerWithGeneratedFileWarning}\nself.webPackagePaths = ${JSON.stringify(nodePaths, null, 2)};`;
fs.writeFileSync(path.join(outDirectory, 'webPackagePaths.js'), fileContents, 'utf8');
resolve();
});
result.taskName = 'build-web-node-paths';
return result;
}

View File

@@ -0,0 +1,61 @@
// Can be removed once https://github.com/electron/electron-rebuild/pull/703 is available.
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadLibcxxObjects = exports.downloadLibcxxHeaders = void 0;
const debug = require("debug");
const extract = require("extract-zip");
const fs = require("fs-extra");
const path = require("path");
const packageJSON = require("../../package.json");
const get_1 = require("@electron/get");
const d = debug('libcxx-fetcher');
async function downloadLibcxxHeaders(outDir, electronVersion, lib_name) {
if (await fs.pathExists(path.resolve(outDir, 'include')))
return;
if (!await fs.pathExists(outDir))
await fs.mkdirp(outDir);
d(`downloading ${lib_name}_headers`);
const headers = await (0, get_1.downloadArtifact)({
version: electronVersion,
isGeneric: true,
artifactName: `${lib_name}_headers.zip`,
});
d(`unpacking ${lib_name}_headers from ${headers}`);
await extract(headers, { dir: outDir });
}
exports.downloadLibcxxHeaders = downloadLibcxxHeaders;
async function downloadLibcxxObjects(outDir, electronVersion, targetArch = 'x64') {
if (await fs.pathExists(path.resolve(outDir, 'libc++.a')))
return;
if (!await fs.pathExists(outDir))
await fs.mkdirp(outDir);
d(`downloading libcxx-objects-linux-${targetArch}`);
const objects = await (0, get_1.downloadArtifact)({
version: electronVersion,
platform: 'linux',
artifactName: 'libcxx-objects',
arch: targetArch,
});
d(`unpacking libcxx-objects from ${objects}`);
await extract(objects, { dir: outDir });
}
exports.downloadLibcxxObjects = downloadLibcxxObjects;
async function main() {
const libcxxObjectsDirPath = process.env['VSCODE_LIBCXX_OBJECTS_DIR'];
const libcxxHeadersDownloadDir = process.env['VSCODE_LIBCXX_HEADERS_DIR'];
const libcxxabiHeadersDownloadDir = process.env['VSCODE_LIBCXXABI_HEADERS_DIR'];
const arch = process.env['VSCODE_ARCH'];
const electronVersion = packageJSON.devDependencies.electron;
if (!libcxxObjectsDirPath || !libcxxHeadersDownloadDir || !libcxxabiHeadersDownloadDir) {
throw new Error('Required build env not set');
}
await downloadLibcxxObjects(libcxxObjectsDirPath, electronVersion, arch);
await downloadLibcxxHeaders(libcxxHeadersDownloadDir, electronVersion, 'libcxx');
await downloadLibcxxHeaders(libcxxabiHeadersDownloadDir, electronVersion, 'libcxxabi');
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -0,0 +1,66 @@
// Can be removed once https://github.com/electron/electron-rebuild/pull/703 is available.
'use strict';
import * as debug from 'debug';
import * as extract from 'extract-zip';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as packageJSON from '../../package.json';
import { downloadArtifact } from '@electron/get';
const d = debug('libcxx-fetcher');
export async function downloadLibcxxHeaders(outDir: string, electronVersion: string, lib_name: string): Promise<void> {
if (await fs.pathExists(path.resolve(outDir, 'include'))) return;
if (!await fs.pathExists(outDir)) await fs.mkdirp(outDir);
d(`downloading ${lib_name}_headers`);
const headers = await downloadArtifact({
version: electronVersion,
isGeneric: true,
artifactName: `${lib_name}_headers.zip`,
});
d(`unpacking ${lib_name}_headers from ${headers}`);
await extract(headers, { dir: outDir });
}
export async function downloadLibcxxObjects(outDir: string, electronVersion: string, targetArch: string = 'x64'): Promise<void> {
if (await fs.pathExists(path.resolve(outDir, 'libc++.a'))) return;
if (!await fs.pathExists(outDir)) await fs.mkdirp(outDir);
d(`downloading libcxx-objects-linux-${targetArch}`);
const objects = await downloadArtifact({
version: electronVersion,
platform: 'linux',
artifactName: 'libcxx-objects',
arch: targetArch,
});
d(`unpacking libcxx-objects from ${objects}`);
await extract(objects, { dir: outDir });
}
async function main(): Promise<void> {
const libcxxObjectsDirPath = process.env['VSCODE_LIBCXX_OBJECTS_DIR'];
const libcxxHeadersDownloadDir = process.env['VSCODE_LIBCXX_HEADERS_DIR'];
const libcxxabiHeadersDownloadDir = process.env['VSCODE_LIBCXXABI_HEADERS_DIR'];
const arch = process.env['VSCODE_ARCH'];
const electronVersion = packageJSON.devDependencies.electron;
if (!libcxxObjectsDirPath || !libcxxHeadersDownloadDir || !libcxxabiHeadersDownloadDir) {
throw new Error('Required build env not set');
}
await downloadLibcxxObjects(libcxxObjectsDirPath, electronVersion, arch);
await downloadLibcxxHeaders(libcxxHeadersDownloadDir, electronVersion, 'libcxx');
await downloadLibcxxHeaders(libcxxabiHeadersDownloadDir, electronVersion, 'libcxxabi');
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -5,6 +5,10 @@
declare let MonacoEnvironment: monaco.Environment | undefined; declare let MonacoEnvironment: monaco.Environment | undefined;
interface Window {
MonacoEnvironment?: monaco.Environment | undefined;
}
declare namespace monaco { declare namespace monaco {
export type Thenable<T> = PromiseLike<T>; export type Thenable<T> = PromiseLike<T>;

View File

@@ -33,7 +33,6 @@ module.exports = {
stats: { stats: {
all: false, all: false,
modules: true, modules: true,
maxModules: 0,
errors: true, errors: true,
warnings: true, warnings: true,
// our additional options // our additional options

Some files were not shown because too many files have changed in this diff Show More