Compare commits

..

11 Commits

Author SHA1 Message Date
Karl Burtram
83a4316cf8 Fix typo in vbump (#17937) (#17938) 2021-12-15 12:19:38 -08:00
Karl Burtram
b4c22c698d Bump STS to rollback data type changes (#17935) (#17936) 2021-12-15 12:11:59 -08:00
Alex Ma
b44891657c Bump to langpack versions (#17910) (#17911)
* Bump to langpack versions

* fixed newline
2021-12-14 11:07:25 -08:00
Alex Ma
f99b27807f Langpack port for December release (#17905)
* Update to langpack for December release  (#17904)

* update to xlfs

* update to langpack json files

* [Loc] fix for newlines at end of package.json (#17906)
2021-12-14 07:27:32 -08:00
Charles Gagnon
93ea635bac Use standard key events for Notebook keybindings (#17880) (#17882)
* Use standard key events for Notebook keybindings

* Add more
2021-12-09 13:14:52 -08:00
Barbara Valdez
651f6fdcfc Fix split cell undo (#17845) (#17873)
* fix split cell undo
2021-12-08 16:31:32 -08:00
Alan Ren
cfc8e41798 open database dashboard (#17871) (#17872) 2021-12-08 15:35:20 -08:00
Barbara Valdez
bcd2e7313a Add undo/redo to convert cells (#17835) (#17867)
* add undo/redo to convert cells
2021-12-08 14:52:57 -08:00
Alan Ren
61f094f797 vbump sts (#17865) (#17868) 2021-12-08 14:51:52 -08:00
Kim Santiago
d5a3370b65 fix load publish profile not working (#17851) (#17862) 2021-12-08 11:23:11 -08:00
Alan Ren
a2d5b2acd1 fix grid actionbar (#17837) (#17854) 2021-12-08 10:16:32 -08:00
3534 changed files with 119110 additions and 310997 deletions

View File

@@ -30,11 +30,7 @@
],
// Optionally loads a cached yarn install for the repo
"postCreateCommand": ".devcontainer/cache/restore-diff.sh && sudo chown node:node /workspaces",
"postCreateCommand": ".devcontainer/cache/restore-diff.sh",
"remoteUser": "node",
"hostRequirements": {
"memory": "6gb"
}
"remoteUser": "node"
}

View File

@@ -8,18 +8,13 @@
**/semver/**
**/test/**/*.js
**/node_modules/**
/extensions/**/out/**
/extensions/**/build/**
/extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts
/extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts
/extensions/markdown-language-features/media/**
/extensions/markdown-language-features/notebook-out/**
/extensions/typescript-basics/test/colorize-fixtures/**
/extensions/**/dist/**
/extensions/types
/extensions/typescript-language-features/test-workspace/**
/test/automation/out
# These files are not linted by `yarn eslint`, so we exclude them from being linted in the editor.
# This ensures that if we add new rules and they pass CI, the are also no errors in the editor.
/resources/web/code-web.js
**/vscode-api-tests/testWorkspace/**
**/vscode-api-tests/testWorkspace2/**
**/extensions/**/out/**
**/extensions/**/build/**
**/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts
**/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts
**/extensions/markdown-language-features/media/**
**/extensions/markdown-language-features/notebook-out/**
**/extensions/typescript-basics/test/colorize-fixtures/**
**/extensions/**/dist/**

View File

@@ -7,8 +7,7 @@
},
"plugins": [
"@typescript-eslint",
"jsdoc",
"header"
"jsdoc"
],
"rules": {
"constructor-super": "warn",
@@ -134,7 +133,7 @@
"restrictions": [
"vs/nls",
"**/{vs,sql}/base/{common,node}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -176,7 +175,7 @@
"vs/nls",
"**/{vs,sql}/base/{common,node}/**",
"**/{vs,sql}/base/parts/*/{common,node}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -195,7 +194,7 @@
"vs/css!./**/*",
"**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -204,7 +203,7 @@
"vs/nls",
"**/{vs,sql}/base/{common,node,electron-main}/**",
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -212,8 +211,6 @@
"restrictions": [
"vs/nls",
"azdata",
"mssql",
"azurecore",
"**/{vs,sql}/base/common/**",
"**/{vs,sql}/base/parts/*/common/**",
"**/{vs,sql}/platform/*/common/**"
@@ -254,7 +251,7 @@
"**/{vs,sql}/base/{common,node}/**",
"**/{vs,sql}/base/parts/*/{common,node}/**",
"**/{vs,sql}/platform/*/{common,node}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -276,7 +273,7 @@
"**/{vs,sql}/base/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/platform/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -287,7 +284,8 @@
"**/{vs,sql}/base/{common,node,electron-main}/**",
"**/{vs,sql}/base/parts/*/{common,node,electron-main}/**",
"**/{vs,sql}/platform/*/{common,node,electron-main}/**",
"@vscode/*", "*" // node modules
"**/{vs,sql}/code/**",
"*" // node modules
]
},
{
@@ -474,8 +472,6 @@
"restrictions": [
"vscode",
"azdata",
"mssql",
"azurecore",
"vs/nls",
"**/{vs,sql}/base/common/**",
"**/{vs,sql}/platform/*/common/**",
@@ -524,7 +520,7 @@
"**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/services/*/{common,browser,node,electron-sandbox,electron-browser}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -539,7 +535,7 @@
"vs/workbench/contrib/files/browser/editors/fileEditorInput",
"**/{vs,sql}/workbench/services/**",
"**/{vs,sql}/workbench/test/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -554,7 +550,6 @@
"**/{vs,sql}/workbench/common/**",
"**/{vs,sql}/workbench/services/**/common/**",
"**/{vs,sql}/workbench/api/**/common/**",
"**/{vs,sql}/workbench/contrib/**/common/**",
"vs/workbench/contrib/files/common/editors/fileEditorInput", // this should be fine, it only accesses constants from contrib
"vscode-textmate",
"vscode-oniguruma",
@@ -582,8 +577,6 @@
"vs/nls",
"vs/css!./**/*",
"azdata",
"mssql",
"azurecore",
"vscode",
"**/{vs,sql}/base/**/{common,browser,worker}/**",
"**/{vs,sql}/platform/**/{common,browser}/**",
@@ -592,17 +585,14 @@
"**/{vs,sql}/workbench/{common,browser}/**",
"**/{vs,sql}/workbench/api/{common,browser}/**",
"**/{vs,sql}/workbench/services/**/{common,browser}/**",
"**/{vs,sql}/workbench/contrib/**/common/**",
"vscode-textmate",
"vscode-oniguruma",
"iconv-lite-umd",
"jschardet",
"@vscode/vscode-languagedetection",
"@angular/*",
"rxjs/**",
"sanitize-html",
"ansi_up",
"@microsoft/applicationinsights-web"
"ansi_up"
]
},
{
@@ -615,7 +605,7 @@
"**/{vs,sql}/workbench/{common,node}/**",
"**/{vs,sql}/workbench/api/{common,node}/**",
"**/{vs,sql}/workbench/services/**/{common,node}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -646,7 +636,7 @@
"**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -769,8 +759,7 @@
"vscode-textmate",
"vscode-oniguruma",
"iconv-lite-umd",
"jschardet",
"azdataGraph"
"jschardet"
]
},
{
@@ -785,7 +774,7 @@
"**/{vs,sql}/workbench/api/{common,node}/**",
"**/{vs,sql}/workbench/services/**/{common,node}/**",
"**/{vs,sql}/workbench/contrib/**/{common,node}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -818,7 +807,7 @@
"**/{vs,sql}/workbench/api/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/services/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/workbench/contrib/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -841,7 +830,7 @@
"**/{vs,sql}/base/parts/**/{common,node}/**",
"**/{vs,sql}/platform/**/{common,node}/**",
"**/{vs,sql}/code/**/{common,node}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -853,7 +842,7 @@
"**/{vs,sql}/base/parts/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/platform/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"**/{vs,sql}/code/**/{common,browser,node,electron-sandbox,electron-browser}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -864,7 +853,7 @@
"**/{vs,sql}/base/parts/**/{common,node,electron-main}/**",
"**/{vs,sql}/platform/**/{common,node,electron-main}/**",
"**/{vs,sql}/code/**/{common,node,electron-main}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -876,7 +865,8 @@
"**/{vs,sql}/platform/**/{common,node}/**",
"**/{vs,sql}/workbench/**/{common,node}/**",
"**/{vs,sql}/server/**",
"@vscode/*", "*" // node modules
"**/{vs,sql}/code/**/{common,node}/**",
"*" // node modules
]
},
{
@@ -947,28 +937,28 @@
"target": "**/test/smoke/**",
"restrictions": [
"**/test/smoke/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
"target": "**/test/automation/**",
"restrictions": [
"**/test/automation/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
"target": "**/test/integration/**",
"restrictions": [
"**/test/integration/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
"target": "**/test/monaco/**",
"restrictions": [
"**/test/monaco/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -988,7 +978,7 @@
"target": "**/{node,electron-browser,electron-main}/**/*.test.ts",
"restrictions": [
"**/{vs,sql}/**",
"@vscode/*", "*", // node modules
"*", // node modules
"@angular/*" // {{SQL CARBON EDIT}}
]
},
@@ -996,14 +986,14 @@
"target": "**/{node,electron-browser,electron-main}/**/test/**",
"restrictions": [
"**/{vs,sql}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
"target": "**/test/{node,electron-browser,electron-main}/**",
"restrictions": [
"**/{vs,sql}/**",
"@vscode/*", "*" // node modules
"*" // node modules
]
},
{
@@ -1031,16 +1021,6 @@
"xterm*"
]
}
],
"header/header": [
2,
"block",
[
"---------------------------------------------------------------------------------------------",
" * Copyright (c) Microsoft Corporation. All rights reserved.",
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
" *--------------------------------------------------------------------------------------------"
]
]
},
"overrides": [
@@ -1129,47 +1109,6 @@
}
]
}
},
{
"files": [
"src/{vs,sql}/server/*",
// {{SQL CARBON EDIT}} Ignore our own that don't use our copyright
"extensions/azuremonitor/src/prompts/**",
"extensions/azuremonitor/src/typings/findRemove.d.ts",
"extensions/kusto/src/prompts/**",
"extensions/mssql/src/hdfs/webhdfs.ts",
"extensions/mssql/src/prompts/**",
"extensions/mssql/src/typings/bufferStreamReader.d.ts",
"extensions/mssql/src/typings/findRemove.d.ts",
"extensions/notebook/resources/jupyter_config/**",
"extensions/notebook/src/intellisense/text.ts",
"extensions/notebook/src/prompts/**",
"extensions/resource-deployment/src/typings/linuxReleaseInfo.d.ts",
"src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts",
"src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts",
"src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts",
"src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts",
"src/sql/base/browser/ui/table/plugins/rowDetailView.ts",
"src/sql/base/browser/ui/table/plugins/rowMoveManager.plugin.ts",
"src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts",
"src/sql/workbench/services/notebook/browser/outputs/factories.ts",
"src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts",
"src/sql/workbench/services/notebook/browser/outputs/registry.ts",
"src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts",
"src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts",
"src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts",
"src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts",
"src/sql/workbench/services/notebook/common/nbformat.ts",
"src/sql/workbench/services/notebook/browser/outputs/renderers.ts",
"src/sql/workbench/services/notebook/browser/outputs/tableRenderers.ts"
],
"rules": {
"header/header": [
// hygiene.js still checks that all files (even those in this directory) are MIT licensed.
"off"
]
}
}
]
}

View File

@@ -16,7 +16,6 @@
{
"ignoreVoid": true
}
],
"jsdoc/check-param-names": "error"
]
}
}

View File

@@ -1,21 +0,0 @@
# https://git-scm.com/docs/git-blame#Documentation/git-blame.txt---ignore-revs-fileltfilegt
# mjbvz: Fix spacing
13f4f052582bcec3d6c6c6a70d995c9dee2cac13
# mjbvz: Add script to run build with noImplicitOverride
ae1452eea678f5266ef513f22dacebb90955d6c9
# alexdima: Revert "bump version"
537ba0ef1791c090bb18bc68d727816c0451c117
# alexdima: bump version
387a0dcb82df729e316ca2518a9ed81a75482b18
# joaomoreno: add ghooks dev dependency
0dfc06e0f9de5925de792cdf9f0e6597bb25908f
# mjbvz: organize imports
494cbbd02d67e87727ec885f98d19551aa33aad1
a3cb14be7f2cceadb17adf843675b1a59537dbbd
ee1655a82ebdfd38bf8792088a6602c69f7bbd94

3
.gitattributes vendored
View File

@@ -7,5 +7,4 @@ ThirdPartyNotices.txt eol=crlf
*.cmd eol=crlf
*.ps1 eol=lf
*.sh eol=lf
*.rtf -text
*.json linguist-language=jsonc
*.rtf -text

1
.github/CODEOWNERS vendored
View File

@@ -11,7 +11,6 @@
/extensions/query-history/ @Charles-Gagnon
/extensions/resource-deployment/ @Charles-Gagnon
/extensions/schema-compare/ @kisantia
/extensions/sql-bindings/ @vasubhog @Charles-Gagnon @lucyzhang929 @chlafreniere @MaddyDev
/extensions/sql-database-projects/ @Benjin @kisantia
/extensions/mssql/config.json @Charles-Gagnon @alanrenmsft @kburtram

View File

@@ -2,9 +2,8 @@
Needs Logs:
comment: "We need more info to debug your particular issue. If you could attach your logs to the issue (ensure no private data is in them), it would help us fix the issue much faster.
First open the Settings page, find the `Mssql: Tracing Level` setting and change that to `All` then restart ADS and repro your issue.
Next there are two types of logs to collect:
There are two types of logs to collect:
**Console Logs**
@@ -28,22 +27,6 @@ Next there are two types of logs to collect:
- This will open the log folder locally. Please zip up this folder and attach it to the issue."
# actions for Needs Logs - Azure label
Needs Logs - Azure:
comment: "We need more info to debug your Azure Active Directory issue. If you could attach your logs to the issue (ensure no private data is in them), it would help us fix the issue much faster.
- In the settings menu, find the setting titled `Azure: Logging Level` and select the `Verbose` option
- Run the process that produces your error
- Open command palette (Click **View** -> **Command Palette**)
- Run the command: **`Developer: Open Logs Folder`**
- Follow this path to find the Azure Accounts log file: `[default log folder]/exthost1/output_logging_[earliest timestamp]/#-Azure Acounts.log`
- Please attach the Azure-Accounts.log file to the issue."
# actions for Out of Scope label
Out of Scope:
comment: "Thank you for opening this suggestion! This enhancement is not planned in our

View File

@@ -5,8 +5,6 @@
"greazer",
"donjayamanne",
"jilljac",
"IanMatthewHuff",
"tanhakabir",
"dynamicwebpaige"
"IanMatthewHuff"
]
}

View File

@@ -12,73 +12,73 @@ on:
jobs:
windows:
name: Windows
runs-on: windows-2019
timeout-minutes: 30
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2
name: Windows
runs-on: windows-latest
timeout-minutes: 30
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2.2.0
- uses: actions/setup-node@v2
with:
node-version: 14
- uses: actions/setup-node@v2
with:
node-version: 12
- uses: actions/setup-python@v2
with:
python-version: "2.x"
- uses: actions/setup-python@v2
with:
python-version: "2.x"
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
# id: nodeModulesCacheKey
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
# - name: Cache node_modules archive
# id: cacheNodeModules
# uses: actions/cache@v2
# with:
# path: ".build/node_modules_cache"
# key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
# - name: Extract node_modules archive
# if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
# run: 7z.exe x .build/node_modules_cache/cache.7z -aos
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: echo "::set-output name=dir::$(yarn cache dir)"
# - name: Cache yarn directory
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# uses: actions/cache@v2
# with:
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir-
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
# id: nodeModulesCacheKey
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
# - name: Cache node_modules archive
# id: cacheNodeModules
# uses: actions/cache@v2
# with:
# path: ".build/node_modules_cache"
# key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
# - name: Extract node_modules archive
# if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
# run: 7z.exe x .build/node_modules_cache/cache.7z -aos
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: echo "::set-output name=dir::$(yarn cache dir)"
# - name: Cache yarn directory
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# uses: actions/cache@v2
# with:
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
# - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: |
# mkdir -Force .build
# node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
# mkdir -Force .build/node_modules_cache
# 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
- name: Execute yarn
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
# - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: |
# mkdir -Force .build
# node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
# mkdir -Force .build/node_modules_cache
# 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
- name: Run Unit Tests (Electron)
run: .\scripts\test.bat
- name: Run Unit Tests (Electron)
run: .\scripts\test.bat
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now
# run: yarn test-browser --browser chromium
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now
# run: yarn test-browser --browser chromium
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} disable for now
# run: .\scripts\test-integration.bat
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} disable for now
# run: .\scripts\test-integration.bat
linux:
name: Linux
@@ -101,7 +101,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: 14
node-version: 12
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
# id: nodeModulesCacheKey
@@ -111,8 +111,8 @@ jobs:
# uses: actions/cache@v2
# with:
# path: "**/node_modules"
# key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules14-
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -131,15 +131,12 @@ jobs:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
# Don't inline source maps so that we generate code coverage for ts files
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
env:
SQL_NO_INLINE_SOURCEMAP: 1
- name: Run Unit Tests (Electron)
id: electron-unit-tests
run: DISPLAY=:10 ./scripts/test.sh --runGlob "**/sql/**/*.test.js" --coverage
run: DISPLAY=:10 ./scripts/test.sh --coverage --runGlob "**/sql/**/*.test.js" # {{SQL CARBON EDIT}} Run only our tests with coverage
- name: Run Extension Unit Tests (Electron)
id: electron-extension-unit-tests
@@ -173,7 +170,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: 14
node-version: 12
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
@@ -184,8 +181,8 @@ jobs:
# uses: actions/cache@v2
# with:
# path: "**/node_modules"
# key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules14-
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -198,7 +195,7 @@ jobs:
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skip caching for now
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
@@ -235,7 +232,7 @@ jobs:
- uses: actions/setup-node@v2
with:
node-version: 14
node-version: 12
- name: Compute node modules cache key
id: nodeModulesCacheKey
@@ -245,8 +242,7 @@ jobs:
uses: actions/cache@v2
with:
path: "**/node_modules"
key: ${{ runner.os }}-cacheNodeModules14-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-cacheNodeModules14-
key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
- name: Get yarn cache directory path
id: yarnCacheDirPath
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -276,23 +272,15 @@ jobs:
- name: Run Valid Layers Checks
run: yarn valid-layers-check
- name: Run Strict Compile Options # {{SQL CARBON EDIT}} add step
run: yarn strict-vscode
# - name: Run Monaco Editor Checks {{SQL CARBON EDIT}} Remove Monaco checks
# run: yarn monaco-compile-check
- name: Compile /build/
run: yarn --cwd build compile
- name: Run eslint
run: yarn eslint
# {{SQL CARBON EDIT}} Don't need this
# - name: Run Monaco Editor Checks
# run: yarn monaco-compile-check
# {{SQL CARBON EDIT}} Don't need this
# - name: Run vscode-dts Compile Checks
# run: yarn vscode-dts-compile-check
- name: Run Trusted Types Checks
run: yarn tsec-compile-check

3
.gitignore vendored
View File

@@ -15,6 +15,3 @@ test_data/
test-results/
yarn-error.log
*.vsix
vscode.lsif
vscode.db
/.profile-oss

View File

@@ -1,6 +0,0 @@
{
"project": "src/tsconfig.json",
"source": "./package.json",
"package": "package.json",
"out": "vscode.lsif"
}

27
.vscode/launch.json vendored
View File

@@ -48,7 +48,7 @@
"cascadeTerminateToConfigurations": [
"Attach to Extension Host"
],
"userDataDir": "${workspaceFolder}/.profile-oss",
"userDataDir": false,
"pauseForSourceMap": false,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
@@ -294,31 +294,6 @@
"group": "5_samples"
},
"timeout": 30000
},
{
"name": "Run Sample Notebook Provider Extension",
"type": "sqlopsExtensionHost",
"request": "launch",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
},
"args": [
"--extensionDevelopmentPath=${workspaceRoot}/samples/sample-notebook-provider"
],
"outFiles": [
"${workspaceRoot}/samples/sample-notebook-provider/out/**/*.js"
],
"preLaunchTask": "Watch sample-notebook-provider",
"presentation": {
"group": "5_samples"
},
"timeout": 30000
}
],
"compounds": [

View File

@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"August 2021\""
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"June 2021\""
},
{
"kind": 1,

View File

@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"July 2021\""
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\""
},
{
"kind": 1,

View File

@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub repo:microsoft/vscode-emmet-helper\n\n$MILESTONE=milestone:\"July 2021\"\n\n$MINE=assignee:@me"
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\"\n\n$MINE=assignee:@me"
},
{
"kind": 1,
@@ -62,7 +62,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed -label:verified"
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed"
},
{
"kind": 1,

View File

@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"August 2021\""
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"June 2021\""
},
{
"kind": 1,

View File

@@ -2,46 +2,55 @@
{
"kind": 1,
"language": "markdown",
"value": "### Bug Verification Queries\n\nBefore shipping we want to verify _all_ bugs. That means when a bug is fixed we check that the fix actually works. It's always best to start with bugs that you have filed and the proceed with bugs that have been filed from users outside the development team. "
"value": "### Bug Verification Queries\n\nBefore shipping we want to verify _all_ bugs. That means when a bug is fixed we check that the fix actually works. It's always best to start with bugs that you have filed and the proceed with bugs that have been filed from users outside the development team. ",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "#### Config: update list of `repos` and the `milestone`"
"value": "#### Config: update list of `repos` and the `milestone`",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-jupyter repo:microsoft/vscode-python\r\n$milestone=milestone:\"July 2021\""
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"March 2021\"",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "### Bugs You Filed"
"value": "### Bugs You Filed",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate author:@me"
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate author:@me",
"editable": false
},
{
"kind": 1,
"language": "markdown",
"value": "### Bugs From Outside"
"value": "### Bugs From Outside",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh -author:hediet -author:joyceerhl -author:rchiodo -author:IanMatthewHuff"
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh",
"editable": false
},
{
"kind": 1,
"language": "markdown",
"value": "### All"
"value": "### All",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate"
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate",
"editable": false
}
]

View File

@@ -4,7 +4,6 @@
"files.exclude": {
".git": true,
".build": true,
".profile-oss": true,
"**/.DS_Store": true,
"build/**/*.js": {
"when": "$(basename).ts"

26
.vscode/tasks.json vendored
View File

@@ -100,6 +100,20 @@
"group": "build",
"problemMatcher": []
},
{
"type": "npm",
"script": "strict-vscode-watch",
"label": "TS - Strict VSCode",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-vscode",
"applyTo": "allDocuments"
}
},
{
"type": "npm",
"script": "watch-webd",
@@ -229,18 +243,6 @@
"reveal": "never"
},
"group": "build"
},
{
"type": "npm",
"script": "watch",
"label": "Watch sample-notebook-provider",
"path": "./samples/sample-notebook-provider/package.json",
"problemMatcher": "$tsc-watch",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"group": "build"
}
]
}

View File

@@ -1,3 +1,3 @@
disturl "https://electronjs.org/headers"
target "13.6.6"
target "12.0.9"
runtime "electron"

View File

@@ -1,10 +1,14 @@
# Change Log
## Version 1.36.1
* Release date: April 22, 2022
## Version 1.33.1
* Release date: Nov 4, 2021
* Release status: General Availability
## What's new in this version
- April Hotfix addressing these issues https://github.com/microsoft/azuredatastudio/milestone/88?closed=1.
## Hotfix release
- Fix for [#16535 Unable to See Saved Connections in Restricted Mode](https://github.com/microsoft/azuredatastudio/issues/17535)
- Fix for [#17579 Can't type in Notebook code cell after editing text cell](https://github.com/microsoft/azuredatastudio/issues/17579)
| Platform |
| --------------------------------------- |
@@ -16,107 +20,31 @@
| [Linux RPM][linux-rpm] |
| [Linux DEB][linux-deb] |
[win-user]: https://go.microsoft.com/fwlink/?linkid=2193235
[win-system]: https://go.microsoft.com/fwlink/?linkid=2193326
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2193236
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2192971
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2193237
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2193238
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2193327
## Version 1.36.2
* Release date: May 20, 2022
* Release status: General Availability
## What's new in this version
- Fix connectivity issue with PBI data source
- Fix query plan zoom and icon issues
- Issues fixed in this release https://github.com/microsoft/azuredatastudio/milestone/89?closed=1
## Version 1.36.1
* Release date: April 22, 2022
* Release status: General Availability
## What's new in this version
* April Hotfix addressing these issues https://github.com/microsoft/azuredatastudio/milestone/88?closed=1.
* Hotfix RCA - https://github.com/microsoft/azuredatastudio/wiki/ADS-April-2022-Hotfix-RCA
## Version 1.36.0
* Release date: April 20, 2022
* Release status: General Availability
## What's new in this version
- General Availability of the Azure SQL Migration Extension for ADS
- Support for .NET Interactive Notebooks Extension
- New Table Designer Features including support for System Versioned, Graph and Memory Optomized Tables
- Query Plan Viewer Updates includign warning and parallelism icons, the option to disable tooltips and support for opening .sqlplan files
- Improvements in SQL Projects and Schema Compare
## Version 1.35.1
* Release date: March 17, 2022
* Release status: General Availability
## Hotfix release
- Fix for [Excel number format #18615](https://github.com/microsoft/azuredatastudio/issues/18615)
- Fix for [Geometry Data Type Returned as Unknown Charset in Results Grid #18630](https://github.com/microsoft/azuredatastudio/issues/18630)
## Version 1.35.0
* Release date: February 24, 2022
* Release status: General Availability
## What's new in this version
* New Features:
* Table Designer - Added functionality for creation and management of tables for SQL Servers. Built using DacFx framework
* Query Plan Viewer - Added functionality for users to view a graphic view of estimated and actual query plans without need for an extension
* Azure Arc Extension - Updated the Data Controller deployment wizard and the SQL Managed Instance - Azure Arc deployment wizard to reflect the deployment experience in Azure Portal
* Bug Fixes:
* Azure Arc Extension - SQL Managed Instance-Azure Arc is now fixed for both indirect connectivity mode and direct connectivity mode
* Notebooks - Support for keyboard navigation between cells to minimize mouse clicking
## Version 1.34.0
* Release date: December 15, 2021
* Release status: General Availability
## What's new in this version
* New Features:
* Added “Currently restoring backup file” in the migration progress details page of Azure SQL Migration extension when backup files location is Azure Storage blob container
* Enhancements to diagnostics in Azure SQL Migration extension
* Support for project build with .NET 6 in SQL Database Projects extension
* Publish to container in SQL Database Projects extension
* Undo and redo support for notebook cell-level operations
* Extension Updates:
* Azure SQL Migration
* Langpacks
* SQL Database Projects
* Bug Fixes:
* Fix for multiple database migrations when using network share as backup files location in Azure SQL Migration extension
* Fix for multiple database migrations when using blob storage containers as backup files location in Azure SQL Migration extension
* Fix to pre-populate target database names in the migration wizard in Azure SQL Migration extension
* Fix to column sorting in grids where the presence of null values could lead to unexpected results
* Fix for Python upgrades when two or more notebooks were open
## Version 1.33.1
* Release date: Nov 4, 2021
* Release status: General Availability
## Hotfix release
- Fix for [#16535 Unable to See Saved Connections in Restricted Mode](https://github.com/microsoft/azuredatastudio/issues/17535)
- Fix for [#17579 Can't type in Notebook code cell after editing text cell](https://github.com/microsoft/azuredatastudio/issues/17579)
[win-user]: https://go.microsoft.com/fwlink/?linkid=2176805
[win-system]: https://go.microsoft.com/fwlink/?linkid=2175910
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2176806
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2176807
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2176505
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2176005
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2176006
## Version 1.33.0
* Release date: October 27, 2021
* Release status: General Availability
## What's new in this version
* New Notebook Features:
* Notebook Views
* Split cell support
* Keyboard shortcuts for Markdown Toolbar Cells
* Notebook Views
* Split cell support
* Keyboard shortcuts for Markdown Toolbar Cells
* Ctrl/Cmd + B = Bold Text
* Ctrl/Cmd + I = Italicize Text
* Ctrl/Cmd + U = Underline Text
* Ctrl/Cmd + Shift + K = Add Code Block
* Ctrl/Cmd + Shift + H = Highlight Text
* Book improvements
* Book improvements
* Add a new section
* Drag and Drop
* Extension Updates:
* Import
* Langpacks
@@ -132,8 +60,8 @@
* Release date: August 18, 2021
* Release status: General Availability
* Extension Updates:
* Arc/Az CLI extensions - Azure Arc extension now uses Azure CLI instead of Azure Data CLI for deploying and interacting with Azure Arc
instances
* Arc/Az CLI extensions - Azure Arc extension now uses Azure CLI instead of Azure Data CLI for deploying and interacting with Azure Arc
instances
* Langpacks
* SQL Database Projects
* Azure Monitor

View File

@@ -131,10 +131,10 @@ Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the [Source EULA](LICENSE.txt).
[win-user]: https://go.microsoft.com/fwlink/?linkid=2193235
[win-system]: https://go.microsoft.com/fwlink/?linkid=2193326
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2193236
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2192971
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2193237
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2193238
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2193327
[win-user]: https://go.microsoft.com/fwlink/?linkid=2176805
[win-system]: https://go.microsoft.com/fwlink/?linkid=2175910
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2176806
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2176807
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2176505
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2176005
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2176006

View File

@@ -8,151 +8,149 @@ The original copyright notices and the licenses under which Microsoft received
such components are set forth below. Microsoft reserves all rights not
expressly granted herein, whether by implication, estoppel or otherwise.
angular: https://github.com/angular/angular
angular2-grid: https://github.com/BTMorton/angular2-grid
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
axios: https://github.com/axios/axios
bootstrap: https://github.com/twbs/bootstrap
chart.js: https://github.com/Timer/chartjs
chokidar: https://github.com/paulmillr/chokidar
comment-json: https://github.com/kaelzhang/node-comment-json
core-js: https://github.com/zloirock/core-js
decompress: https://github.com/kevva/decompress
emmet: https://github.com/emmetio/emmet
error-ex: https://github.com/Qix-/node-error-ex
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
fast-plist: https://github.com/Microsoft/node-fast-plist
figures: https://github.com/sindresorhus/figures
find-remove: https://www.npmjs.com/package/find-remove
fs-extra: https://github.com/jprichardson/node-fs-extra
gc-signals: https://github.com/Microsoft/node-gc-signals
getmac: https://github.com/bevry/getmac
graceful-fs: https://github.com/isaacs/node-graceful-fs
gridstack: https://github.com/gridstack/gridstack.js
html-query-plan: https://github.com/JustinPealing/html-query-plan
http-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
https-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
iconv-lite: https://github.com/ashtuchkin/iconv-lite
jquery: https://github.com/jquery/jquery
jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab
keytar: https://github.com/atom/node-keytar
make-error: https://github.com/JsCommunity/make-error
mark.js: https://github.com/julmot/mark.js
minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment
mxgraph: https://github.com/jgraph/mxgraph
native-keymap: https://github.com/Microsoft/node-native-keymap
native-watchdog: https://github.com/Microsoft/node-native-watchdog
ng2-charts: https://github.com/valor-software/ng2-charts
node-fetch: https://github.com/bitinn/node-fetch
node-pty: https://github.com/Tyriar/node-pty
nsfw: https://github.com/Axosoft/nsfw
optimist: https://github.com/substack/node-optimist
primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js
pyzmq: https://github.com/zeromq/pyzmq
qs: https://github.com/ljharb/qs
reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS
semver: https://github.com/npm/node-semver
slickgrid: https://github.com/6pac/SlickGrid
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
svg.js: https://github.com/svgdotjs/svg.js
systemjs: https://github.com/systemjs/systemjs
temp-write: https://github.com/sindresorhus/temp-write
turndown: https://github.com/domchristie/turndown
turndown-plugin-gfm: https://github.com/domchristie/turndown-plugin-gfm
underscore: https://github.com/jashkenas/underscore
v8-profiler: https://github.com/node-inspector/v8-profiler
vscode: https://github.com/microsoft/vscode
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
vscode-nls: https://github.com/Microsoft/vscode-nls
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
vscode-textmate: https://github.com/Microsoft/vscode-textmate
winreg: https://github.com/fresc81/node-winreg
xmldom: https://github.com/xmldom/xmldom
xml-formatter: https://github.com/chrisbottin/xml-formatter
xterm: https://github.com/sourcelair/xterm.js
yargs: https://github.com/yargs/yargs
yauzl: https://github.com/thejoshwolfe/yauzl
zone.js: https://www.npmjs.com/package/zone
angular: https://github.com/angular/angular
angular2-grid: https://github.com/BTMorton/angular2-grid
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
axios: https://github.com/axios/axios
bootstrap: https://github.com/twbs/bootstrap
chart.js: https://github.com/Timer/chartjs
chokidar: https://github.com/paulmillr/chokidar
comment-json: https://github.com/kaelzhang/node-comment-json
core-js: https://github.com/zloirock/core-js
decompress: https://github.com/kevva/decompress
emmet: https://github.com/emmetio/emmet
error-ex: https://github.com/Qix-/node-error-ex
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
fast-plist: https://github.com/Microsoft/node-fast-plist
figures: https://github.com/sindresorhus/figures
find-remove: https://www.npmjs.com/package/find-remove
fs-extra: https://github.com/jprichardson/node-fs-extra
gc-signals: https://github.com/Microsoft/node-gc-signals
getmac: https://github.com/bevry/getmac
graceful-fs: https://github.com/isaacs/node-graceful-fs
gridstack: https://github.com/gridstack/gridstack.js
html-query-plan: https://github.com/JustinPealing/html-query-plan
http-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
https-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
iconv-lite: https://github.com/ashtuchkin/iconv-lite
jquery: https://github.com/jquery/jquery
jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab
keytar: https://github.com/atom/node-keytar
make-error: https://github.com/JsCommunity/make-error
mark.js: https://github.com/julmot/mark.js
minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment
native-keymap: https://github.com/Microsoft/node-native-keymap
native-watchdog: https://github.com/Microsoft/node-native-watchdog
ng2-charts: https://github.com/valor-software/ng2-charts
node-fetch: https://github.com/bitinn/node-fetch
node-pty: https://github.com/Tyriar/node-pty
nsfw: https://github.com/Axosoft/nsfw
optimist: https://github.com/substack/node-optimist
primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js
pyzmq: https://github.com/zeromq/pyzmq
qs: https://github.com/ljharb/qs
reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS
semver: https://github.com/npm/node-semver
slickgrid: https://github.com/6pac/SlickGrid
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
svg.js: https://github.com/svgdotjs/svg.js
systemjs: https://github.com/systemjs/systemjs
temp-write: https://github.com/sindresorhus/temp-write
turndown: https://github.com/domchristie/turndown
turndown-plugin-gfm: https://github.com/domchristie/turndown-plugin-gfm
underscore: https://github.com/jashkenas/underscore
v8-profiler: https://github.com/node-inspector/v8-profiler
vscode: https://github.com/microsoft/vscode
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
vscode-nls: https://github.com/Microsoft/vscode-nls
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
vscode-textmate: https://github.com/Microsoft/vscode-textmate
winreg: https://github.com/fresc81/node-winreg
xmldom: https://github.com/xmldom/xmldom
xml-formatter: https://github.com/chrisbottin/xml-formatter
xterm: https://github.com/sourcelair/xterm.js
yargs: https://github.com/yargs/yargs
yauzl: https://github.com/thejoshwolfe/yauzl
zone.js: https://www.npmjs.com/package/zone
Microsoft PROSE SDK: https://microsoft.github.io/prose
Microsoft PROSE SDK: https://microsoft.github.io/prose
1. atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure)
2. atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script)
3. atom/language-css version 0.44.6 (https://github.com/atom/language-css)
4. atom/language-java version 0.32.1 (https://github.com/atom/language-java)
5. atom/language-sass version 0.62.1 (https://github.com/atom/language-sass)
6. atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
7. atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
8. better-go-syntax version 1.0.0 (https://github.com/jeff-hykin/better-go-syntax/ )
9. Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
10. daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars)
11. dart-lang/dart-syntax-highlight (https://github.com/dart-lang/dart-syntax-highlight)
12. davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle)
13. definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
14. demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml)
15. Document Object Model version 4.0.0 (https://www.w3.org/DOM/)
16. dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage)
17. expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation)
18. fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle)
19. freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
20. HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
21. Ikuyadeu/vscode-R version 2.0.0 (https://github.com/Ikuyadeu/vscode-R)
22. insane version 2.6.2 (https://github.com/bevacqua/insane)
23. Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site)
24. ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
25. James-Yu/LaTeX-Workshop version 8.19.1 (https://github.com/James-Yu/LaTeX-Workshop)
26. jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar)
27. jeff-hykin/cpp-textmate-grammar version 1.15.5 (https://github.com/jeff-hykin/cpp-textmate-grammar)
28. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
29. JuliaEditorSupport/atom-language-julia version 0.21.1 (https://github.com/JuliaEditorSupport/atom-language-julia)
30. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
31. language-docker (https://github.com/moby/moby)
32. language-less version 0.34.2 (https://github.com/atom/language-less)
33. language-php version 0.46.2 (https://github.com/atom/language-php)
34. MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
35. marked version 1.1.0 (https://github.com/markedjs/marked)
36. mdn-data version 1.1.12 (https://github.com/mdn/data)
37. microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage)
38. microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage)
39. microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar)
40. microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql)
41. mmims/language-batchfile version 0.7.6 (https://github.com/mmims/language-batchfile)
42. NVIDIA/cuda-cpp-grammar (https://github.com/NVIDIA/cuda-cpp-grammar)
43. PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
44. rust-syntax version 0.5.0 (https://github.com/dustypomerleau/rust-syntax)
45. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
46. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
47. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
48. textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
49. textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
50. textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
51. textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
52. textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
53. textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
54. textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
55. textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
56. textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
57. textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
58. textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
59. textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
60. TypeScript-TmLanguage version 0.1.8 (https://github.com/microsoft/TypeScript-TmLanguage)
61. TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage)
62. Unicode version 12.0.0 (https://home.unicode.org/)
63. vscode-codicons version 0.0.14 (https://github.com/microsoft/vscode-codicons)
64. vscode-logfile-highlighter version 2.11.0 (https://github.com/emilast/vscode-logfile-highlighter)
65. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
66. Web Background Synchronization (https://github.com/WICG/background-sync)
atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure)
atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script)
atom/language-css version 0.44.4 (https://github.com/atom/language-css)
atom/language-java version 0.32.1 (https://github.com/atom/language-java)
atom/language-sass version 0.62.1 (https://github.com/atom/language-sass)
atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
better-go-syntax version 1.0.0 (https://github.com/jeff-hykin/better-go-syntax/ )
Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars)
dart-lang/dart-syntax-highlight (https://github.com/dart-lang/dart-syntax-highlight)
davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle)
definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml)
Document Object Model version 4.0.0 (https://www.w3.org/DOM/)
dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage)
expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation)
fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle)
freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
Ikuyadeu/vscode-R version 1.3.0 (https://github.com/Ikuyadeu/vscode-R)
insane version 2.6.2 (https://github.com/bevacqua/insane)
Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site)
ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar)
jeff-hykin/cpp-textmate-grammar version 1.15.5 (https://github.com/jeff-hykin/cpp-textmate-grammar)
js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
JuliaEditorSupport/atom-language-julia version 0.21.0 (https://github.com/JuliaEditorSupport/atom-language-julia)
Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
language-docker (https://github.com/moby/moby)
language-less version 0.34.2 (https://github.com/atom/language-less)
language-php version 0.46.2 (https://github.com/atom/language-php)
MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
marked version 1.1.0 (https://github.com/markedjs/marked)
mdn-data version 1.1.12 (https://github.com/mdn/data)
microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage)
microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage)
microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar)
microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql)
mmims/language-batchfile version 0.7.6 (https://github.com/mmims/language-batchfile)
NVIDIA/cuda-cpp-grammar (https://github.com/NVIDIA/cuda-cpp-grammar)
PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
rust-syntax version 0.4.3 (https://github.com/dustypomerleau/rust-syntax)
seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
TypeScript-TmLanguage version 0.1.8 (https://github.com/microsoft/TypeScript-TmLanguage)
TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage)
Unicode version 12.0.0 (https://home.unicode.org/)
vscode-codicons version 0.0.14 (https://github.com/microsoft/vscode-codicons)
vscode-logfile-highlighter version 2.11.0 (https://github.com/emilast/vscode-logfile-highlighter)
vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
Web Background Synchronization (https://github.com/WICG/background-sync)
%% atom/language-clojure NOTICES AND INFORMATION BEGIN HERE
@@ -1467,7 +1465,7 @@ END OF make-error NOTICES AND INFORMATION
=========================================
The MIT License (MIT)
Copyright (c) 2021 REditorSupport
Copyright (c) 20142019 Julian Kühnel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -1539,193 +1537,6 @@ OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF moment NOTICES AND INFORMATION
%% mxgraph NOTICES AND INFORMATION BEGIN HERE
=========================================
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
(e) Neither the Work nor Derivative Works may be used or form any
part of a larger work that integrates or is supposed to be
integrated with a product or service owned or marketed by an
Atlassian entity, including its successors and assignees in title.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
//SEIBERT/MEDIA GmbH, Wiesbaden, Germany is the exclusive licensee of
JGraph for software products based on this codebase within the Atlassian
ecosystem of products.
=========================================
END OF mxgraph NOTICES AND INFORMATION
%% native-keymap NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) Microsoft Corporation

View File

@@ -12,14 +12,14 @@ fsevents/src/**
fsevents/test/**
!fsevents/**/*.node
@vscode/sqlite3/binding.gyp
@vscode/sqlite3/benchmark/**
@vscode/sqlite3/cloudformation/**
@vscode/sqlite3/deps/**
@vscode/sqlite3/test/**
@vscode/sqlite3/build/**
@vscode/sqlite3/src/**
!@vscode/sqlite3/build/Release/*.node
vscode-sqlite3/binding.gyp
vscode-sqlite3/benchmark/**
vscode-sqlite3/cloudformation/**
vscode-sqlite3/deps/**
vscode-sqlite3/test/**
vscode-sqlite3/build/**
vscode-sqlite3/src/**
!vscode-sqlite3/build/Release/*.node
windows-mutex/binding.gyp
windows-mutex/build/**

View File

@@ -29,6 +29,3 @@ xterm-addon-unicode11/out/**
xterm-addon-webgl/src/**
xterm-addon-webgl/out/**
# This makes sure the model is included in the package
!@vscode/vscode-languagedetection/model/**

View File

@@ -0,0 +1,17 @@
{
"env": {
"commonjs": true,
"es6": true,
"node": true
},
"extends": "eslint:recommended",
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"parserOptions": {
"ecmaVersion": 2018
},
"rules": {
}
}

2
build/actions/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
node_modules
*.js.map

View File

@@ -1,6 +1,6 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
export const inlineSuggestCommitId = 'editor.action.inlineSuggest.commit';
Object.defineProperty(exports, "__esModule", { value: true });

96
build/actions/api/api.ts Normal file
View File

@@ -0,0 +1,96 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
export interface GitHub {
query(query: Query): AsyncIterableIterator<GitHubIssue[]>
hasWriteAccess(user: User): Promise<boolean>
repoHasLabel(label: string): Promise<boolean>
createLabel(label: string, color: string, description: string): Promise<void>
deleteLabel(label: string): Promise<void>
readConfig(path: string): Promise<any>
createIssue(owner: string, repo: string, title: string, body: string): Promise<void>
releaseContainsCommit(release: string, commit: string): Promise<boolean>
}
export interface GitHubIssue extends GitHub {
getIssue(): Promise<Issue>
postComment(body: string): Promise<void>
deleteComment(id: number): Promise<void>
getComments(last?: boolean): AsyncIterableIterator<Comment[]>
closeIssue(): Promise<void>
lockIssue(): Promise<void>
setMilestone(milestoneId: number): Promise<void>
addLabel(label: string): Promise<void>
removeLabel(label: string): Promise<void>
addAssignee(assignee: string): Promise<void>
getClosingInfo(): Promise<{ hash: string | undefined; timestamp: number } | undefined>
}
type SortVar =
| 'comments'
| 'reactions'
| 'reactions-+1'
| 'reactions--1'
| 'reactions-smile'
| 'reactions-thinking_face'
| 'reactions-heart'
| 'reactions-tada'
| 'interactions'
| 'created'
| 'updated'
type SortOrder = 'asc' | 'desc'
export type Reactions = {
'+1': number
'-1': number
laugh: number
hooray: number
confused: number
heart: number
rocket: number
eyes: number
}
export interface User {
name: string
isGitHubApp?: boolean
}
export interface Comment {
author: User
body: string
id: number
timestamp: number
}
export interface Issue {
author: User
body: string
title: string
labels: string[]
open: boolean
locked: boolean
number: number
numComments: number
reactions: Reactions
milestoneId: number | null
assignee?: string
createdAt: number
updatedAt: number
closedAt?: number
}
export interface Query {
q: string
sort?: SortVar
order?: SortOrder
}

View File

@@ -0,0 +1,293 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const core_1 = require("@actions/core");
const github_1 = require("@actions/github");
const child_process_1 = require("child_process");
const utils_1 = require("../utils/utils");
class OctoKit {
constructor(token, params, options = { readonly: false }) {
this.token = token;
this.params = params;
this.options = options;
// when in readonly mode, record labels just-created so at to not throw unneccesary errors
this.mockLabels = new Set();
this.writeAccessCache = {};
this.octokit = new github_1.GitHub(token);
}
async *query(query) {
const q = query.q + ` repo:${this.params.owner}/${this.params.repo}`;
console.log(`Querying for ${q}:`);
const options = this.octokit.search.issuesAndPullRequests.endpoint.merge({
...query,
q,
per_page: 100,
headers: { Accept: 'application/vnd.github.squirrel-girl-preview+json' },
});
let pageNum = 0;
const timeout = async () => {
if (pageNum < 2) {
/* pass */
}
else if (pageNum < 4) {
await new Promise((resolve) => setTimeout(resolve, 3000));
}
else {
await new Promise((resolve) => setTimeout(resolve, 30000));
}
};
for await (const pageResponse of this.octokit.paginate.iterator(options)) {
await timeout();
await utils_1.logRateLimit(this.token);
const page = pageResponse.data;
console.log(`Page ${++pageNum}: ${page.map(({ number }) => number).join(' ')}`);
yield page.map((issue) => new OctoKitIssue(this.token, this.params, this.octokitIssueToIssue(issue)));
}
}
async createIssue(owner, repo, title, body) {
core_1.debug(`Creating issue \`${title}\` on ${owner}/${repo}`);
if (!this.options.readonly)
await this.octokit.issues.create({ owner, repo, title, body });
}
octokitIssueToIssue(issue) {
var _a, _b, _c, _d, _e, _f;
return {
author: { name: issue.user.login, isGitHubApp: issue.user.type === 'Bot' },
body: issue.body,
number: issue.number,
title: issue.title,
labels: issue.labels.map((label) => label.name),
open: issue.state === 'open',
locked: issue.locked,
numComments: issue.comments,
reactions: issue.reactions,
assignee: (_b = (_a = issue.assignee) === null || _a === void 0 ? void 0 : _a.login) !== null && _b !== void 0 ? _b : (_d = (_c = issue.assignees) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d.login,
milestoneId: (_f = (_e = issue.milestone) === null || _e === void 0 ? void 0 : _e.number) !== null && _f !== void 0 ? _f : null,
createdAt: +new Date(issue.created_at),
updatedAt: +new Date(issue.updated_at),
closedAt: issue.closed_at ? +new Date(issue.closed_at) : undefined,
};
}
async hasWriteAccess(user) {
if (user.name in this.writeAccessCache) {
core_1.debug('Got permissions from cache for ' + user);
return this.writeAccessCache[user.name];
}
core_1.debug('Fetching permissions for ' + user);
const permissions = (await this.octokit.repos.getCollaboratorPermissionLevel({
...this.params,
username: user.name,
})).data.permission;
return (this.writeAccessCache[user.name] = permissions === 'admin' || permissions === 'write');
}
async repoHasLabel(name) {
try {
await this.octokit.issues.getLabel({ ...this.params, name });
return true;
}
catch (err) {
if (err.status === 404) {
return this.options.readonly && this.mockLabels.has(name);
}
throw err;
}
}
async createLabel(name, color, description) {
core_1.debug('Creating label ' + name);
if (!this.options.readonly)
await this.octokit.issues.createLabel({ ...this.params, color, description, name });
else
this.mockLabels.add(name);
}
async deleteLabel(name) {
core_1.debug('Deleting label ' + name);
try {
if (!this.options.readonly)
await this.octokit.issues.deleteLabel({ ...this.params, name });
}
catch (err) {
if (err.status === 404) {
return;
}
throw err;
}
}
async readConfig(path) {
core_1.debug('Reading config at ' + path);
const repoPath = `.github/${path}.json`;
const data = (await this.octokit.repos.getContents({ ...this.params, path: repoPath })).data;
if ('type' in data && data.type === 'file') {
if (data.encoding === 'base64' && data.content) {
return JSON.parse(Buffer.from(data.content, 'base64').toString('utf-8'));
}
throw Error(`Could not read contents "${data.content}" in encoding "${data.encoding}"`);
}
throw Error('Found directory at config path when expecting file' + JSON.stringify(data));
}
async releaseContainsCommit(release, commit) {
if (utils_1.getInput('commitReleasedDebuggingOverride')) {
return true;
}
return new Promise((resolve, reject) => child_process_1.exec(`git -C ./repo merge-base --is-ancestor ${commit} ${release}`, (err) => !err || err.code === 1 ? resolve(!err) : reject(err)));
}
}
exports.OctoKit = OctoKit;
class OctoKitIssue extends OctoKit {
constructor(token, params, issueData, options = { readonly: false }) {
super(token, params, options);
this.params = params;
this.issueData = issueData;
}
async addAssignee(assignee) {
core_1.debug('Adding assignee ' + assignee + ' to ' + this.issueData.number);
if (!this.options.readonly) {
await this.octokit.issues.addAssignees({
...this.params,
issue_number: this.issueData.number,
assignees: [assignee],
});
}
}
async closeIssue() {
core_1.debug('Closing issue ' + this.issueData.number);
if (!this.options.readonly)
await this.octokit.issues.update({
...this.params,
issue_number: this.issueData.number,
state: 'closed',
});
}
async lockIssue() {
core_1.debug('Locking issue ' + this.issueData.number);
if (!this.options.readonly)
await this.octokit.issues.lock({ ...this.params, issue_number: this.issueData.number });
}
async getIssue() {
if (isIssue(this.issueData)) {
core_1.debug('Got issue data from query result ' + this.issueData.number);
return this.issueData;
}
console.log('Fetching issue ' + this.issueData.number);
const issue = (await this.octokit.issues.get({
...this.params,
issue_number: this.issueData.number,
mediaType: { previews: ['squirrel-girl'] },
})).data;
return (this.issueData = this.octokitIssueToIssue(issue));
}
async postComment(body) {
core_1.debug(`Posting comment ${body} on ${this.issueData.number}`);
if (!this.options.readonly)
await this.octokit.issues.createComment({
...this.params,
issue_number: this.issueData.number,
body,
});
}
async deleteComment(id) {
core_1.debug(`Deleting comment ${id} on ${this.issueData.number}`);
if (!this.options.readonly)
await this.octokit.issues.deleteComment({
owner: this.params.owner,
repo: this.params.repo,
comment_id: id,
});
}
async setMilestone(milestoneId) {
core_1.debug(`Setting milestone for ${this.issueData.number} to ${milestoneId}`);
if (!this.options.readonly)
await this.octokit.issues.update({
...this.params,
issue_number: this.issueData.number,
milestone: milestoneId,
});
}
async *getComments(last) {
core_1.debug('Fetching comments for ' + this.issueData.number);
const response = this.octokit.paginate.iterator(this.octokit.issues.listComments.endpoint.merge({
...this.params,
issue_number: this.issueData.number,
per_page: 100,
...(last ? { per_page: 1, page: (await this.getIssue()).numComments } : {}),
}));
for await (const page of response) {
yield page.data.map((comment) => ({
author: { name: comment.user.login, isGitHubApp: comment.user.type === 'Bot' },
body: comment.body,
id: comment.id,
timestamp: +new Date(comment.created_at),
}));
}
}
async addLabel(name) {
core_1.debug(`Adding label ${name} to ${this.issueData.number}`);
if (!(await this.repoHasLabel(name))) {
throw Error(`Action could not execute becuase label ${name} is not defined.`);
}
if (!this.options.readonly)
await this.octokit.issues.addLabels({
...this.params,
issue_number: this.issueData.number,
labels: [name],
});
}
async removeLabel(name) {
core_1.debug(`Removing label ${name} from ${this.issueData.number}`);
try {
if (!this.options.readonly)
await this.octokit.issues.removeLabel({
...this.params,
issue_number: this.issueData.number,
name,
});
}
catch (err) {
if (err.status === 404) {
console.log(`Label ${name} not found on issue`);
return;
}
throw err;
}
}
async getClosingInfo() {
var _a;
if ((await this.getIssue()).open) {
return;
}
const options = this.octokit.issues.listEventsForTimeline.endpoint.merge({
...this.params,
issue_number: this.issueData.number,
});
let closingCommit;
for await (const event of this.octokit.paginate.iterator(options)) {
const timelineEvents = event.data;
for (const timelineEvent of timelineEvents) {
if (timelineEvent.event === 'closed') {
closingCommit = {
hash: (_a = timelineEvent.commit_id) !== null && _a !== void 0 ? _a : undefined,
timestamp: +new Date(timelineEvent.created_at),
};
}
}
}
console.log(`Got ${closingCommit} as closing commit of ${this.issueData.number}`);
return closingCommit;
}
}
exports.OctoKitIssue = OctoKitIssue;
function isIssue(object) {
const isIssue = 'author' in object &&
'body' in object &&
'title' in object &&
'labels' in object &&
'open' in object &&
'locked' in object &&
'number' in object &&
'numComments' in object &&
'reactions' in object &&
'milestoneId' in object;
return isIssue;
}

View File

@@ -0,0 +1,336 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { debug } from '@actions/core'
import { GitHub as GitHubAPI } from '@actions/github'
import { Octokit } from '@octokit/rest'
import { exec } from 'child_process'
import { getInput, logRateLimit } from '../utils/utils'
import { Comment, GitHub, GitHubIssue, Issue, Query, User } from './api'
export class OctoKit implements GitHub {
protected octokit: GitHubAPI
// when in readonly mode, record labels just-created so at to not throw unneccesary errors
protected mockLabels: Set<string> = new Set()
constructor(
private token: string,
protected params: { repo: string; owner: string },
protected options: { readonly: boolean } = { readonly: false },
) {
this.octokit = new GitHubAPI(token)
}
async *query(query: Query): AsyncIterableIterator<GitHubIssue[]> {
const q = query.q + ` repo:${this.params.owner}/${this.params.repo}`
console.log(`Querying for ${q}:`)
const options = this.octokit.search.issuesAndPullRequests.endpoint.merge({
...query,
q,
per_page: 100,
headers: { Accept: 'application/vnd.github.squirrel-girl-preview+json' },
})
let pageNum = 0
const timeout = async () => {
if (pageNum < 2) {
/* pass */
} else if (pageNum < 4) {
await new Promise((resolve) => setTimeout(resolve, 3000))
} else {
await new Promise((resolve) => setTimeout(resolve, 30000))
}
}
for await (const pageResponse of this.octokit.paginate.iterator(options)) {
await timeout()
await logRateLimit(this.token)
const page: Array<Octokit.SearchIssuesAndPullRequestsResponseItemsItem> = pageResponse.data
console.log(`Page ${++pageNum}: ${page.map(({ number }) => number).join(' ')}`)
yield page.map(
(issue) => new OctoKitIssue(this.token, this.params, this.octokitIssueToIssue(issue)),
)
}
}
async createIssue(owner: string, repo: string, title: string, body: string): Promise<void> {
debug(`Creating issue \`${title}\` on ${owner}/${repo}`)
if (!this.options.readonly) await this.octokit.issues.create({ owner, repo, title, body })
}
protected octokitIssueToIssue(
issue: Octokit.IssuesGetResponse | Octokit.SearchIssuesAndPullRequestsResponseItemsItem,
): Issue {
return {
author: { name: issue.user.login, isGitHubApp: issue.user.type === 'Bot' },
body: issue.body,
number: issue.number,
title: issue.title,
labels: (issue.labels as Octokit.IssuesGetLabelResponse[]).map((label) => label.name),
open: issue.state === 'open',
locked: (issue as any).locked,
numComments: issue.comments,
reactions: (issue as any).reactions,
assignee: issue.assignee?.login ?? (issue as any).assignees?.[0]?.login,
milestoneId: issue.milestone?.number ?? null,
createdAt: +new Date(issue.created_at),
updatedAt: +new Date(issue.updated_at),
closedAt: issue.closed_at ? +new Date((issue.closed_at as unknown) as string) : undefined,
}
}
private writeAccessCache: Record<string, boolean> = {}
async hasWriteAccess(user: User): Promise<boolean> {
if (user.name in this.writeAccessCache) {
debug('Got permissions from cache for ' + user)
return this.writeAccessCache[user.name]
}
debug('Fetching permissions for ' + user)
const permissions = (
await this.octokit.repos.getCollaboratorPermissionLevel({
...this.params,
username: user.name,
})
).data.permission
return (this.writeAccessCache[user.name] = permissions === 'admin' || permissions === 'write')
}
async repoHasLabel(name: string): Promise<boolean> {
try {
await this.octokit.issues.getLabel({ ...this.params, name })
return true
} catch (err) {
if (err.status === 404) {
return this.options.readonly && this.mockLabels.has(name)
}
throw err
}
}
async createLabel(name: string, color: string, description: string): Promise<void> {
debug('Creating label ' + name)
if (!this.options.readonly)
await this.octokit.issues.createLabel({ ...this.params, color, description, name })
else this.mockLabels.add(name)
}
async deleteLabel(name: string): Promise<void> {
debug('Deleting label ' + name)
try {
if (!this.options.readonly) await this.octokit.issues.deleteLabel({ ...this.params, name })
} catch (err) {
if (err.status === 404) {
return
}
throw err
}
}
async readConfig(path: string): Promise<any> {
debug('Reading config at ' + path)
const repoPath = `.github/${path}.json`
const data = (await this.octokit.repos.getContents({ ...this.params, path: repoPath })).data
if ('type' in data && data.type === 'file') {
if (data.encoding === 'base64' && data.content) {
return JSON.parse(Buffer.from(data.content, 'base64').toString('utf-8'))
}
throw Error(`Could not read contents "${data.content}" in encoding "${data.encoding}"`)
}
throw Error('Found directory at config path when expecting file' + JSON.stringify(data))
}
async releaseContainsCommit(release: string, commit: string): Promise<boolean> {
if (getInput('commitReleasedDebuggingOverride')) {
return true
}
return new Promise((resolve, reject) =>
exec(`git -C ./repo merge-base --is-ancestor ${commit} ${release}`, (err) =>
!err || err.code === 1 ? resolve(!err) : reject(err),
),
)
}
}
export class OctoKitIssue extends OctoKit implements GitHubIssue {
constructor(
token: string,
protected params: { repo: string; owner: string },
private issueData: { number: number } | Issue,
options: { readonly: boolean } = { readonly: false },
) {
super(token, params, options)
}
async addAssignee(assignee: string): Promise<void> {
debug('Adding assignee ' + assignee + ' to ' + this.issueData.number)
if (!this.options.readonly) {
await this.octokit.issues.addAssignees({
...this.params,
issue_number: this.issueData.number,
assignees: [assignee],
})
}
}
async closeIssue(): Promise<void> {
debug('Closing issue ' + this.issueData.number)
if (!this.options.readonly)
await this.octokit.issues.update({
...this.params,
issue_number: this.issueData.number,
state: 'closed',
})
}
async lockIssue(): Promise<void> {
debug('Locking issue ' + this.issueData.number)
if (!this.options.readonly)
await this.octokit.issues.lock({ ...this.params, issue_number: this.issueData.number })
}
async getIssue(): Promise<Issue> {
if (isIssue(this.issueData)) {
debug('Got issue data from query result ' + this.issueData.number)
return this.issueData
}
console.log('Fetching issue ' + this.issueData.number)
const issue = (
await this.octokit.issues.get({
...this.params,
issue_number: this.issueData.number,
mediaType: { previews: ['squirrel-girl'] },
})
).data
return (this.issueData = this.octokitIssueToIssue(issue))
}
async postComment(body: string): Promise<void> {
debug(`Posting comment ${body} on ${this.issueData.number}`)
if (!this.options.readonly)
await this.octokit.issues.createComment({
...this.params,
issue_number: this.issueData.number,
body,
})
}
async deleteComment(id: number): Promise<void> {
debug(`Deleting comment ${id} on ${this.issueData.number}`)
if (!this.options.readonly)
await this.octokit.issues.deleteComment({
owner: this.params.owner,
repo: this.params.repo,
comment_id: id,
})
}
async setMilestone(milestoneId: number) {
debug(`Setting milestone for ${this.issueData.number} to ${milestoneId}`)
if (!this.options.readonly)
await this.octokit.issues.update({
...this.params,
issue_number: this.issueData.number,
milestone: milestoneId,
})
}
async *getComments(last?: boolean): AsyncIterableIterator<Comment[]> {
debug('Fetching comments for ' + this.issueData.number)
const response = this.octokit.paginate.iterator(
this.octokit.issues.listComments.endpoint.merge({
...this.params,
issue_number: this.issueData.number,
per_page: 100,
...(last ? { per_page: 1, page: (await this.getIssue()).numComments } : {}),
}),
)
for await (const page of response) {
yield (page.data as Octokit.IssuesListCommentsResponseItem[]).map((comment) => ({
author: { name: comment.user.login, isGitHubApp: comment.user.type === 'Bot' },
body: comment.body,
id: comment.id,
timestamp: +new Date(comment.created_at),
}))
}
}
async addLabel(name: string): Promise<void> {
debug(`Adding label ${name} to ${this.issueData.number}`)
if (!(await this.repoHasLabel(name))) {
throw Error(`Action could not execute becuase label ${name} is not defined.`)
}
if (!this.options.readonly)
await this.octokit.issues.addLabels({
...this.params,
issue_number: this.issueData.number,
labels: [name],
})
}
async removeLabel(name: string): Promise<void> {
debug(`Removing label ${name} from ${this.issueData.number}`)
try {
if (!this.options.readonly)
await this.octokit.issues.removeLabel({
...this.params,
issue_number: this.issueData.number,
name,
})
} catch (err) {
if (err.status === 404) {
console.log(`Label ${name} not found on issue`)
return
}
throw err
}
}
async getClosingInfo(): Promise<{ hash: string | undefined; timestamp: number } | undefined> {
if ((await this.getIssue()).open) {
return
}
const options = this.octokit.issues.listEventsForTimeline.endpoint.merge({
...this.params,
issue_number: this.issueData.number,
})
let closingCommit: { hash: string | undefined; timestamp: number } | undefined
for await (const event of this.octokit.paginate.iterator(options)) {
const timelineEvents = event.data as Octokit.IssuesListEventsForTimelineResponseItem[]
for (const timelineEvent of timelineEvents) {
if (timelineEvent.event === 'closed') {
closingCommit = {
hash: timelineEvent.commit_id ?? undefined,
timestamp: +new Date(timelineEvent.created_at),
}
}
}
}
console.log(`Got ${closingCommit} as closing commit of ${this.issueData.number}`)
return closingCommit
}
}
function isIssue(object: any): object is Issue {
const isIssue =
'author' in object &&
'body' in object &&
'title' in object &&
'labels' in object &&
'open' in object &&
'locked' in object &&
'number' in object &&
'numComments' in object &&
'reactions' in object &&
'milestoneId' in object
return isIssue
}

View File

@@ -0,0 +1,123 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
class Testbed {
constructor(config) {
var _a, _b, _c, _d, _e;
this.config = {
globalLabels: (_a = config === null || config === void 0 ? void 0 : config.globalLabels) !== null && _a !== void 0 ? _a : [],
configs: (_b = config === null || config === void 0 ? void 0 : config.configs) !== null && _b !== void 0 ? _b : {},
writers: (_c = config === null || config === void 0 ? void 0 : config.writers) !== null && _c !== void 0 ? _c : [],
releasedCommits: (_d = config === null || config === void 0 ? void 0 : config.releasedCommits) !== null && _d !== void 0 ? _d : [],
queryRunner: (_e = config === null || config === void 0 ? void 0 : config.queryRunner) !== null && _e !== void 0 ? _e : async function* () {
yield [];
},
};
}
async *query(query) {
for await (const page of this.config.queryRunner(query)) {
yield page.map((issue) => issue instanceof TestbedIssue ? issue : new TestbedIssue(this.config, issue));
}
}
async createIssue(_owner, _repo, _title, _body) {
// pass...
}
async readConfig(path) {
return JSON.parse(JSON.stringify(this.config.configs[path]));
}
async hasWriteAccess(user) {
return this.config.writers.includes(user.name);
}
async repoHasLabel(label) {
return this.config.globalLabels.includes(label);
}
async createLabel(label, _color, _description) {
this.config.globalLabels.push(label);
}
async deleteLabel(labelToDelete) {
this.config.globalLabels = this.config.globalLabels.filter((label) => label !== labelToDelete);
}
async releaseContainsCommit(_release, commit) {
return this.config.releasedCommits.includes(commit);
}
}
exports.Testbed = Testbed;
class TestbedIssue extends Testbed {
constructor(globalConfig, issueConfig) {
var _a, _b, _c;
super(globalConfig);
issueConfig = issueConfig !== null && issueConfig !== void 0 ? issueConfig : {};
issueConfig.comments = (_a = issueConfig === null || issueConfig === void 0 ? void 0 : issueConfig.comments) !== null && _a !== void 0 ? _a : [];
issueConfig.labels = (_b = issueConfig === null || issueConfig === void 0 ? void 0 : issueConfig.labels) !== null && _b !== void 0 ? _b : [];
issueConfig.issue = {
author: { name: 'JacksonKearl' },
body: 'issue body',
locked: false,
numComments: ((_c = issueConfig === null || issueConfig === void 0 ? void 0 : issueConfig.comments) === null || _c === void 0 ? void 0 : _c.length) || 0,
number: 1,
open: true,
title: 'issue title',
assignee: undefined,
reactions: {
'+1': 0,
'-1': 0,
confused: 0,
eyes: 0,
heart: 0,
hooray: 0,
laugh: 0,
rocket: 0,
},
closedAt: undefined,
createdAt: +new Date(),
updatedAt: +new Date(),
...issueConfig.issue,
};
this.issueConfig = issueConfig;
}
async addAssignee(assignee) {
this.issueConfig.issue.assignee = assignee;
}
async setMilestone(milestoneId) {
this.issueConfig.issue.milestoneId = milestoneId;
}
async getIssue() {
const labels = [...this.issueConfig.labels];
return { ...this.issueConfig.issue, labels };
}
async postComment(body, author) {
this.issueConfig.comments.push({
author: { name: author !== null && author !== void 0 ? author : 'bot' },
body,
id: Math.random(),
timestamp: +new Date(),
});
}
async deleteComment(id) {
this.issueConfig.comments = this.issueConfig.comments.filter((comment) => comment.id !== id);
}
async *getComments(last) {
yield last
? [this.issueConfig.comments[this.issueConfig.comments.length - 1]]
: this.issueConfig.comments;
}
async addLabel(label) {
this.issueConfig.labels.push(label);
}
async removeLabel(labelToDelete) {
this.issueConfig.labels = this.issueConfig.labels.filter((label) => label !== labelToDelete);
}
async closeIssue() {
this.issueConfig.issue.open = false;
}
async lockIssue() {
this.issueConfig.issue.locked = true;
}
async getClosingInfo() {
return this.issueConfig.closingCommit;
}
}
exports.TestbedIssue = TestbedIssue;

View File

@@ -0,0 +1,170 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Comment, GitHub, GitHubIssue, Issue, Query, User } from './api'
type TestbedConfig = {
globalLabels: string[]
configs: Record<string, any>
writers: string[]
releasedCommits: string[]
queryRunner: (query: Query) => AsyncIterableIterator<(TestbedIssueConstructorArgs | TestbedIssue)[]>
}
export type TestbedConstructorArgs = Partial<TestbedConfig>
export class Testbed implements GitHub {
public config: TestbedConfig
constructor(config?: TestbedConstructorArgs) {
this.config = {
globalLabels: config?.globalLabels ?? [],
configs: config?.configs ?? {},
writers: config?.writers ?? [],
releasedCommits: config?.releasedCommits ?? [],
queryRunner:
config?.queryRunner ??
async function* () {
yield []
},
}
}
async *query(query: Query): AsyncIterableIterator<GitHubIssue[]> {
for await (const page of this.config.queryRunner(query)) {
yield page.map((issue) =>
issue instanceof TestbedIssue ? issue : new TestbedIssue(this.config, issue),
)
}
}
async createIssue(_owner: string, _repo: string, _title: string, _body: string): Promise<void> {
// pass...
}
async readConfig(path: string): Promise<any> {
return JSON.parse(JSON.stringify(this.config.configs[path]))
}
async hasWriteAccess(user: User): Promise<boolean> {
return this.config.writers.includes(user.name)
}
async repoHasLabel(label: string): Promise<boolean> {
return this.config.globalLabels.includes(label)
}
async createLabel(label: string, _color: string, _description: string): Promise<void> {
this.config.globalLabels.push(label)
}
async deleteLabel(labelToDelete: string): Promise<void> {
this.config.globalLabels = this.config.globalLabels.filter((label) => label !== labelToDelete)
}
async releaseContainsCommit(_release: string, commit: string): Promise<boolean> {
return this.config.releasedCommits.includes(commit)
}
}
type TestbedIssueConfig = {
issue: Omit<Issue, 'labels'>
comments: Comment[]
labels: string[]
closingCommit: { hash: string | undefined; timestamp: number } | undefined
}
export type TestbedIssueConstructorArgs = Partial<Omit<TestbedIssueConfig, 'issue'>> & {
issue?: Partial<Omit<Issue, 'labels'>>
}
export class TestbedIssue extends Testbed implements GitHubIssue {
public issueConfig: TestbedIssueConfig
constructor(globalConfig?: TestbedConstructorArgs, issueConfig?: TestbedIssueConstructorArgs) {
super(globalConfig)
issueConfig = issueConfig ?? {}
issueConfig.comments = issueConfig?.comments ?? []
issueConfig.labels = issueConfig?.labels ?? []
issueConfig.issue = {
author: { name: 'JacksonKearl' },
body: 'issue body',
locked: false,
numComments: issueConfig?.comments?.length || 0,
number: 1,
open: true,
title: 'issue title',
assignee: undefined,
reactions: {
'+1': 0,
'-1': 0,
confused: 0,
eyes: 0,
heart: 0,
hooray: 0,
laugh: 0,
rocket: 0,
},
closedAt: undefined,
createdAt: +new Date(),
updatedAt: +new Date(),
...issueConfig.issue,
}
this.issueConfig = issueConfig as TestbedIssueConfig
}
async addAssignee(assignee: string): Promise<void> {
this.issueConfig.issue.assignee = assignee
}
async setMilestone(milestoneId: number): Promise<void> {
this.issueConfig.issue.milestoneId = milestoneId
}
async getIssue(): Promise<Issue> {
const labels = [...this.issueConfig.labels]
return { ...this.issueConfig.issue, labels }
}
async postComment(body: string, author?: string): Promise<void> {
this.issueConfig.comments.push({
author: { name: author ?? 'bot' },
body,
id: Math.random(),
timestamp: +new Date(),
})
}
async deleteComment(id: number): Promise<void> {
this.issueConfig.comments = this.issueConfig.comments.filter((comment) => comment.id !== id)
}
async *getComments(last?: boolean): AsyncIterableIterator<Comment[]> {
yield last
? [this.issueConfig.comments[this.issueConfig.comments.length - 1]]
: this.issueConfig.comments
}
async addLabel(label: string): Promise<void> {
this.issueConfig.labels.push(label)
}
async removeLabel(labelToDelete: string): Promise<void> {
this.issueConfig.labels = this.issueConfig.labels.filter((label) => label !== labelToDelete)
}
async closeIssue(): Promise<void> {
this.issueConfig.issue.open = false
}
async lockIssue(): Promise<void> {
this.issueConfig.issue.locked = true
}
async getClosingInfo(): Promise<{ hash: string | undefined; timestamp: number } | undefined> {
return this.issueConfig.closingCommit
}
}

View File

@@ -0,0 +1,12 @@
name: 'PR Labeler'
description: 'Automatically add a Label to a PR'
inputs:
token:
description: GitHub token with issue, comment, and label read/write permissions
default: ${{ github.token }}
label:
description: Github label to add to the PR
required: true
runs:
using: 'node12'
main: 'index.js'

View File

@@ -0,0 +1,22 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const core = require("@actions/core");
const github_1 = require("@actions/github");
const octokit_1 = require("../api/octokit");
const utils_1 = require("../utils/utils");
const token = utils_1.getRequiredInput('token');
const label = utils_1.getRequiredInput('label');
async function main() {
const pr = new octokit_1.OctoKitIssue(token, github_1.context.repo, { number: github_1.context.issue.number });
pr.addLabel(label);
}
main()
.then(() => utils_1.logRateLimit(token))
.catch(async (error) => {
core.setFailed(error.message);
await utils_1.logErrorToIssue(error.message, true, token);
});

View File

@@ -0,0 +1,26 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as core from '@actions/core'
import { context } from '@actions/github'
import { OctoKitIssue } from '../api/octokit'
import { getRequiredInput, logErrorToIssue, logRateLimit } from '../utils/utils'
const token = getRequiredInput('token');
const label = getRequiredInput('label');
async function main() {
const pr = new OctoKitIssue(token, context.repo, { number: context.issue.number });
pr.addLabel(label);
}
main()
.then(() => logRateLimit(token))
.catch(async (error) => {
core.setFailed(error.message)
await logErrorToIssue(error.message, true, token)
})

View File

@@ -0,0 +1,24 @@
{
"name": "github-actions",
"version": "1.0.0",
"description": "GitHub Actions",
"scripts": {
"test": "mocha -r ts-node/register **/*.test.ts",
"build": "tsc -p ./tsconfig.json",
"lint": "eslint -c .eslintrc --fix --ext .ts .",
"watch-typecheck": "tsc --watch"
},
"repository": {
"type": "git",
"url": "git+https://github.com/microsoft/azuredatastudio.git"
},
"keywords": [],
"author": "",
"dependencies": {
"@actions/core": "^1.2.6",
"@actions/github": "^2.1.1",
"axios": "^0.21.4",
"ts-node": "^8.6.2",
"typescript": "^3.8.3"
}
}

View File

@@ -0,0 +1,19 @@
{
"compilerOptions": {
"target": "es2019",
"strict": true,
"module": "commonjs",
"moduleResolution": "node",
"removeComments": false,
"resolveJsonModule": true,
"lib": [
"es2020"
],
},
"include": [
"./**/*.ts"
],
"exclude": [
"node_modules"
]
}

View File

@@ -0,0 +1,72 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const core = require("@actions/core");
const github_1 = require("@actions/github");
const axios_1 = require("axios");
const octokit_1 = require("../api/octokit");
exports.getInput = (name) => core.getInput(name) || undefined;
exports.getRequiredInput = (name) => core.getInput(name, { required: true });
exports.normalizeIssue = (issue) => {
const { body, title } = issue;
const isBug = body.includes('bug_report_template') || /Issue Type:.*Bug.*/.test(body);
const isFeatureRequest = body.includes('feature_request_template') || /Issue Type:.*Feature Request.*/.test(body);
const cleanse = (str) => str
.toLowerCase()
.replace(/<!--.*?-->/gu, '')
.replace(/.* version: .*/gu, '')
.replace(/issue type: .*/gu, '')
.replace(/<details>(.|\s)*?<\/details>/gu, '')
.replace(/vs ?code/gu, '')
.replace(/we have written.*please paste./gu, '')
.replace(/steps to reproduce:/gu, '')
.replace(/does this issue occur when all extensions are disabled.*/gu, '')
.replace(/```(.|\s)*?```/gu, '')
.replace(/!?\[.*?\]\(.*?\)/gu, '')
.replace(/\s+/gu, ' ');
return {
body: cleanse(body),
title: cleanse(title),
issueType: isBug ? 'bug' : isFeatureRequest ? 'feature_request' : 'unknown',
};
};
exports.loadLatestRelease = async (quality) => (await axios_1.default.get(`https://vscode-update.azurewebsites.net/api/update/darwin/${quality}/latest`)).data;
exports.daysAgoToTimestamp = (days) => +new Date(Date.now() - days * 24 * 60 * 60 * 1000);
exports.daysAgoToHumanReadbleDate = (days) => new Date(Date.now() - days * 24 * 60 * 60 * 1000).toISOString().replace(/\.\d{3}\w$/, '');
exports.logRateLimit = async (token) => {
const usageData = (await new github_1.GitHub(token).rateLimit.get()).data.resources;
['core', 'graphql', 'search'].forEach(async (category) => {
const usage = 1 - usageData[category].remaining / usageData[category].limit;
const message = `Usage at ${usage} for ${category}`;
if (usage > 0) {
console.log(message);
}
if (usage > 0.5) {
await exports.logErrorToIssue(message, false, token);
}
});
};
exports.logErrorToIssue = async (message, ping, token) => {
// Attempt to wait out abuse detection timeout if present
await new Promise((resolve) => setTimeout(resolve, 10000));
const dest = github_1.context.repo.repo === 'vscode-internalbacklog'
? { repo: 'vscode-internalbacklog', issue: 974 }
: { repo: 'vscode', issue: 93814 };
return new octokit_1.OctoKitIssue(token, { owner: 'Microsoft', repo: dest.repo }, { number: dest.issue })
.postComment(`
Workflow: ${github_1.context.workflow}
Error: ${message}
Issue: ${ping ? `${github_1.context.repo.owner}/${github_1.context.repo.repo}#` : ''}${github_1.context.issue.number}
Repo: ${github_1.context.repo.owner}/${github_1.context.repo.repo}
<!-- Context:
${JSON.stringify(github_1.context, null, 2).replace(/<!--/gu, '<@--').replace(/-->/gu, '--@>')}
-->
`);
};

View File

@@ -0,0 +1,95 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as core from '@actions/core'
import { context, GitHub } from '@actions/github'
import axios from 'axios'
import { OctoKitIssue } from '../api/octokit'
import { Issue } from '../api/api'
export const getInput = (name: string) => core.getInput(name) || undefined
export const getRequiredInput = (name: string) => core.getInput(name, { required: true })
export const normalizeIssue = (
issue: Issue,
): { body: string; title: string; issueType: 'bug' | 'feature_request' | 'unknown' } => {
const { body, title } = issue
const isBug = body.includes('bug_report_template') || /Issue Type:.*Bug.*/.test(body)
const isFeatureRequest =
body.includes('feature_request_template') || /Issue Type:.*Feature Request.*/.test(body)
const cleanse = (str: string) =>
str
.toLowerCase()
.replace(/<!--.*?-->/gu, '')
.replace(/.* version: .*/gu, '')
.replace(/issue type: .*/gu, '')
.replace(/<details>(.|\s)*?<\/details>/gu, '')
.replace(/vs ?code/gu, '')
.replace(/we have written.*please paste./gu, '')
.replace(/steps to reproduce:/gu, '')
.replace(/does this issue occur when all extensions are disabled.*/gu, '')
.replace(/```(.|\s)*?```/gu, '')
.replace(/!?\[.*?\]\(.*?\)/gu, '')
.replace(/\s+/gu, ' ')
return {
body: cleanse(body),
title: cleanse(title),
issueType: isBug ? 'bug' : isFeatureRequest ? 'feature_request' : 'unknown',
}
}
export interface Release {
productVersion: string
timestamp: number
version: string
}
export const loadLatestRelease = async (quality: 'stable' | 'insider'): Promise<Release | undefined> =>
(await axios.get(`https://vscode-update.azurewebsites.net/api/update/darwin/${quality}/latest`)).data
export const daysAgoToTimestamp = (days: number): number => +new Date(Date.now() - days * 24 * 60 * 60 * 1000)
export const daysAgoToHumanReadbleDate = (days: number) =>
new Date(Date.now() - days * 24 * 60 * 60 * 1000).toISOString().replace(/\.\d{3}\w$/, '')
export const logRateLimit = async (token: string) => {
const usageData = (await new GitHub(token).rateLimit.get()).data.resources
;(['core', 'graphql', 'search'] as const).forEach(async (category) => {
const usage = 1 - usageData[category].remaining / usageData[category].limit
const message = `Usage at ${usage} for ${category}`
if (usage > 0) {
console.log(message)
}
if (usage > 0.5) {
await logErrorToIssue(message, false, token)
}
})
}
export const logErrorToIssue = async (message: string, ping: boolean, token: string): Promise<void> => {
// Attempt to wait out abuse detection timeout if present
await new Promise((resolve) => setTimeout(resolve, 10000))
const dest =
context.repo.repo === 'vscode-internalbacklog'
? { repo: 'vscode-internalbacklog', issue: 974 }
: { repo: 'vscode', issue: 93814 }
return new OctoKitIssue(token, { owner: 'Microsoft', repo: dest.repo }, { number: dest.issue })
.postComment(`
Workflow: ${context.workflow}
Error: ${message}
Issue: ${ping ? `${context.repo.owner}/${context.repo.repo}#` : ''}${context.issue.number}
Repo: ${context.repo.owner}/${context.repo.repo}
<!-- Context:
${JSON.stringify(context, null, 2).replace(/<!--/gu, '<@--').replace(/-->/gu, '--@>')}
-->
`)
}

421
build/actions/yarn.lock Normal file
View File

@@ -0,0 +1,421 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@actions/core@^1.2.6":
version "1.2.6"
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.6.tgz#a78d49f41a4def18e88ce47c2cac615d5694bf09"
integrity sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA==
"@actions/github@^2.1.1":
version "2.1.1"
resolved "https://registry.yarnpkg.com/@actions/github/-/github-2.1.1.tgz#bcabedff598196d953f58ba750d5e75549a75142"
integrity sha512-kAgTGUx7yf5KQCndVeHSwCNZuDBvPyxm5xKTswW2lofugeuC1AZX73nUUVDNaysnM9aKFMHv9YCdVJbg7syEyA==
dependencies:
"@actions/http-client" "^1.0.3"
"@octokit/graphql" "^4.3.1"
"@octokit/rest" "^16.43.1"
"@actions/http-client@^1.0.3":
version "1.0.8"
resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-1.0.8.tgz#8bd76e8eca89dc8bcf619aa128eba85f7a39af45"
integrity sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==
dependencies:
tunnel "0.0.6"
"@octokit/auth-token@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.4.0.tgz#b64178975218b99e4dfe948253f0673cbbb59d9f"
integrity sha512-eoOVMjILna7FVQf96iWc3+ZtE/ZT6y8ob8ZzcqKY1ibSQCnu4O/B7pJvzMx5cyZ/RjAff6DAdEb0O0Cjcxidkg==
dependencies:
"@octokit/types" "^2.0.0"
"@octokit/endpoint@^6.0.1":
version "6.0.1"
resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.1.tgz#16d5c0e7a83e3a644d1ddbe8cded6c3d038d31d7"
integrity sha512-pOPHaSz57SFT/m3R5P8MUu4wLPszokn5pXcB/pzavLTQf2jbU+6iayTvzaY6/BiotuRS0qyEUkx3QglT4U958A==
dependencies:
"@octokit/types" "^2.11.1"
is-plain-object "^3.0.0"
universal-user-agent "^5.0.0"
"@octokit/graphql@^4.3.1":
version "4.3.1"
resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-4.3.1.tgz#9ee840e04ed2906c7d6763807632de84cdecf418"
integrity sha512-hCdTjfvrK+ilU2keAdqNBWOk+gm1kai1ZcdjRfB30oA3/T6n53UVJb7w0L5cR3/rhU91xT3HSqCd+qbvH06yxA==
dependencies:
"@octokit/request" "^5.3.0"
"@octokit/types" "^2.0.0"
universal-user-agent "^4.0.0"
"@octokit/plugin-paginate-rest@^1.1.1":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-1.1.2.tgz#004170acf8c2be535aba26727867d692f7b488fc"
integrity sha512-jbsSoi5Q1pj63sC16XIUboklNw+8tL9VOnJsWycWYR78TKss5PVpIPb1TUUcMQ+bBh7cY579cVAWmf5qG+dw+Q==
dependencies:
"@octokit/types" "^2.0.1"
"@octokit/plugin-request-log@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.0.tgz#eef87a431300f6148c39a7f75f8cfeb218b2547e"
integrity sha512-ywoxP68aOT3zHCLgWZgwUJatiENeHE7xJzYjfz8WI0goynp96wETBF+d95b8g/uL4QmS6owPVlaxiz3wyMAzcw==
"@octokit/plugin-rest-endpoint-methods@2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-2.4.0.tgz#3288ecf5481f68c494dd0602fc15407a59faf61e"
integrity sha512-EZi/AWhtkdfAYi01obpX0DF7U6b1VRr30QNQ5xSFPITMdLSfhcBqjamE3F+sKcxPbD7eZuMHu3Qkk2V+JGxBDQ==
dependencies:
"@octokit/types" "^2.0.1"
deprecation "^2.3.1"
"@octokit/request-error@^1.0.2":
version "1.2.1"
resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-1.2.1.tgz#ede0714c773f32347576c25649dc013ae6b31801"
integrity sha512-+6yDyk1EES6WK+l3viRDElw96MvwfJxCt45GvmjDUKWjYIb3PJZQkq3i46TwGwoPD4h8NmTrENmtyA1FwbmhRA==
dependencies:
"@octokit/types" "^2.0.0"
deprecation "^2.0.0"
once "^1.4.0"
"@octokit/request-error@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.0.0.tgz#94ca7293373654400fbb2995f377f9473e00834b"
integrity sha512-rtYicB4Absc60rUv74Rjpzek84UbVHGHJRu4fNVlZ1mCcyUPPuzFfG9Rn6sjHrd95DEsmjSt1Axlc699ZlbDkw==
dependencies:
"@octokit/types" "^2.0.0"
deprecation "^2.0.0"
once "^1.4.0"
"@octokit/request@^5.2.0", "@octokit/request@^5.3.0":
version "5.4.2"
resolved "https://registry.yarnpkg.com/@octokit/request/-/request-5.4.2.tgz#74f8e5bbd39dc738a1b127629791f8ad1b3193ee"
integrity sha512-zKdnGuQ2TQ2vFk9VU8awFT4+EYf92Z/v3OlzRaSh4RIP0H6cvW1BFPXq4XYvNez+TPQjqN+0uSkCYnMFFhcFrw==
dependencies:
"@octokit/endpoint" "^6.0.1"
"@octokit/request-error" "^2.0.0"
"@octokit/types" "^2.11.1"
deprecation "^2.0.0"
is-plain-object "^3.0.0"
node-fetch "^2.3.0"
once "^1.4.0"
universal-user-agent "^5.0.0"
"@octokit/rest@^16.43.1":
version "16.43.1"
resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-16.43.1.tgz#3b11e7d1b1ac2bbeeb23b08a17df0b20947eda6b"
integrity sha512-gfFKwRT/wFxq5qlNjnW2dh+qh74XgTQ2B179UX5K1HYCluioWj8Ndbgqw2PVqa1NnVJkGHp2ovMpVn/DImlmkw==
dependencies:
"@octokit/auth-token" "^2.4.0"
"@octokit/plugin-paginate-rest" "^1.1.1"
"@octokit/plugin-request-log" "^1.0.0"
"@octokit/plugin-rest-endpoint-methods" "2.4.0"
"@octokit/request" "^5.2.0"
"@octokit/request-error" "^1.0.2"
atob-lite "^2.0.0"
before-after-hook "^2.0.0"
btoa-lite "^1.0.0"
deprecation "^2.0.0"
lodash.get "^4.4.2"
lodash.set "^4.3.2"
lodash.uniq "^4.5.0"
octokit-pagination-methods "^1.1.0"
once "^1.4.0"
universal-user-agent "^4.0.0"
"@octokit/types@^2.0.0", "@octokit/types@^2.0.1", "@octokit/types@^2.11.1":
version "2.12.1"
resolved "https://registry.yarnpkg.com/@octokit/types/-/types-2.12.1.tgz#4a26b4a85ec121043d3b0745b5798f9d8fd968ca"
integrity sha512-LRLR1tjbcCfAmUElvTmMvLEzstpx6Xt/aQVTg2xvd+kHA2Ekp1eWl5t+gU7bcwjXHYEAzh4hH4WH+kS3vh+wRw==
dependencies:
"@types/node" ">= 8"
"@types/node@>= 8":
version "13.13.2"
resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.2.tgz#160d82623610db590a64e8ca81784e11117e5a54"
integrity sha512-LB2R1Oyhpg8gu4SON/mfforE525+Hi/M1ineICEDftqNVTyFg1aRIeGuTvXAoWHc4nbrFncWtJgMmoyRvuGh7A==
arg@^4.1.0:
version "4.1.3"
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
atob-lite@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/atob-lite/-/atob-lite-2.0.0.tgz#0fef5ad46f1bd7a8502c65727f0367d5ee43d696"
integrity sha1-D+9a1G8b16hQLGVyfwNn1e5D1pY=
axios@^0.21.4:
version "0.21.4"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575"
integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==
dependencies:
follow-redirects "^1.14.0"
before-after-hook@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.1.0.tgz#b6c03487f44e24200dd30ca5e6a1979c5d2fb635"
integrity sha512-IWIbu7pMqyw3EAJHzzHbWa85b6oud/yfKYg5rqB5hNE8CeMi3nX+2C2sj0HswfblST86hpVEOAb9x34NZd6P7A==
btoa-lite@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/btoa-lite/-/btoa-lite-1.0.0.tgz#337766da15801210fdd956c22e9c6891ab9d0337"
integrity sha1-M3dm2hWAEhD92VbCLpxokaudAzc=
buffer-from@^1.0.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
cross-spawn@^6.0.0:
version "6.0.5"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
dependencies:
nice-try "^1.0.4"
path-key "^2.0.1"
semver "^5.5.0"
shebang-command "^1.2.0"
which "^1.2.9"
deprecation@^2.0.0, deprecation@^2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919"
integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==
diff@^4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
end-of-stream@^1.1.0:
version "1.4.4"
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
dependencies:
once "^1.4.0"
execa@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==
dependencies:
cross-spawn "^6.0.0"
get-stream "^4.0.0"
is-stream "^1.1.0"
npm-run-path "^2.0.0"
p-finally "^1.0.0"
signal-exit "^3.0.0"
strip-eof "^1.0.0"
follow-redirects@^1.14.0:
version "1.14.3"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.3.tgz#6ada78118d8d24caee595595accdc0ac6abd022e"
integrity sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw==
get-stream@^4.0.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
dependencies:
pump "^3.0.0"
is-plain-object@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-3.0.0.tgz#47bfc5da1b5d50d64110806c199359482e75a928"
integrity sha512-tZIpofR+P05k8Aocp7UI/2UTa9lTJSebCXpFFoR9aibpokDj/uXBsJ8luUu0tTVYKkMU6URDUuOfJZ7koewXvg==
dependencies:
isobject "^4.0.0"
is-stream@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
isobject@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/isobject/-/isobject-4.0.0.tgz#3f1c9155e73b192022a80819bacd0343711697b0"
integrity sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA==
lodash.get@^4.4.2:
version "4.4.2"
resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99"
integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=
lodash.set@^4.3.2:
version "4.3.2"
resolved "https://registry.yarnpkg.com/lodash.set/-/lodash.set-4.3.2.tgz#d8757b1da807dde24816b0d6a84bea1a76230b23"
integrity sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=
lodash.uniq@^4.5.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=
macos-release@^2.2.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/macos-release/-/macos-release-2.3.0.tgz#eb1930b036c0800adebccd5f17bc4c12de8bb71f"
integrity sha512-OHhSbtcviqMPt7yfw5ef5aghS2jzFVKEFyCJndQt2YpSQ9qRVSEv2axSJI1paVThEu+FFGs584h/1YhxjVqajA==
make-error@^1.1.1:
version "1.3.6"
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
nice-try@^1.0.4:
version "1.0.5"
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
node-fetch@^2.3.0:
version "2.6.1"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
npm-run-path@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=
dependencies:
path-key "^2.0.0"
octokit-pagination-methods@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/octokit-pagination-methods/-/octokit-pagination-methods-1.1.0.tgz#cf472edc9d551055f9ef73f6e42b4dbb4c80bea4"
integrity sha512-fZ4qZdQ2nxJvtcasX7Ghl+WlWS/d9IgnBIwFZXVNNZUmzpno91SX5bc5vuxiuKoCtK78XxGGNuSCrDC7xYB3OQ==
once@^1.3.1, once@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
dependencies:
wrappy "1"
os-name@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/os-name/-/os-name-3.1.0.tgz#dec19d966296e1cd62d701a5a66ee1ddeae70801"
integrity sha512-h8L+8aNjNcMpo/mAIBPn5PXCM16iyPGjHNWo6U1YO8sJTMHtEtyczI6QJnLoplswm6goopQkqc7OAnjhWcugVg==
dependencies:
macos-release "^2.2.0"
windows-release "^3.1.0"
p-finally@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=
path-key@^2.0.0, path-key@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
pump@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
dependencies:
end-of-stream "^1.1.0"
once "^1.3.1"
semver@^5.5.0:
version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
shebang-command@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
dependencies:
shebang-regex "^1.0.0"
shebang-regex@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
signal-exit@^3.0.0:
version "3.0.3"
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c"
integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==
source-map-support@^0.5.17:
version "0.5.19"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61"
integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==
dependencies:
buffer-from "^1.0.0"
source-map "^0.6.0"
source-map@^0.6.0:
version "0.6.1"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
strip-eof@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
ts-node@^8.6.2:
version "8.9.0"
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.9.0.tgz#d7bf7272dcbecd3a2aa18bd0b96c7d2f270c15d4"
integrity sha512-rwkXfOs9zmoHrV8xE++dmNd6ZIS+nmHHCxcV53ekGJrxFLMbp+pizpPS07ARvhwneCIECPppOwbZHvw9sQtU4w==
dependencies:
arg "^4.1.0"
diff "^4.0.1"
make-error "^1.1.1"
source-map-support "^0.5.17"
yn "3.1.1"
tunnel@0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c"
integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==
typescript@^3.8.3:
version "3.8.3"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.8.3.tgz#409eb8544ea0335711205869ec458ab109ee1061"
integrity sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==
universal-user-agent@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-4.0.1.tgz#fd8d6cb773a679a709e967ef8288a31fcc03e557"
integrity sha512-LnST3ebHwVL2aNe4mejI9IQh2HfZ1RLo8Io2HugSif8ekzD1TlWpHpColOB/eh8JHMLkGH3Akqf040I+4ylNxg==
dependencies:
os-name "^3.1.0"
universal-user-agent@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-5.0.0.tgz#a3182aa758069bf0e79952570ca757de3579c1d9"
integrity sha512-B5TPtzZleXyPrUMKCpEHFmVhMN6EhmJYjG5PQna9s7mXeSqGTLap4OpqLl5FCEFUI3UBmllkETwKf/db66Y54Q==
dependencies:
os-name "^3.1.0"
which@^1.2.9:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
dependencies:
isexe "^2.0.0"
windows-release@^3.1.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/windows-release/-/windows-release-3.3.0.tgz#dce167e9f8be733f21c849ebd4d03fe66b29b9f0"
integrity sha512-2HetyTg1Y+R+rUgrKeUEhAG/ZuOmTrI1NBb3ZyAGQMYmOJjBBPe4MTodghRkmLJZHwkuPi02anbeGP+Zf401LQ==
dependencies:
execa "^1.0.0"
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
yn@3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==

View File

@@ -1,21 +0,0 @@
{
"codebaseName": "vscode-client",
"ppe": false,
"notificationAliases": [
"sbatten@microsoft.com"
],
"codebaseAdmins": [
"REDMOND\\stbatt",
"REDMOND\\monacotools",
],
"instanceUrl": "https://msazure.visualstudio.com/defaultcollection",
"projectName": "One",
"areaPath": "One\\VSCode\\Client",
"iterationPath": "One",
"notifyAlways": true,
"tools": [
"BinSkim",
"CredScan",
"CodeQL"
]
}

View File

@@ -160,7 +160,7 @@ async function main() {
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
await (0, retry_1.retry)(() => Promise.all([
await retry_1.retry(() => Promise.all([
uploadBlob(blobService, quality, blobName, filePath, fileName),
uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName)
]));
@@ -185,7 +185,7 @@ async function main() {
console.log('Asset:', JSON.stringify(asset, null, ' '));
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await (0, retry_1.retry)(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
console.log(` Done ✔️`);
}
main().then(() => {

View File

@@ -40,7 +40,7 @@ async function main() {
};
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
}
main().then(() => {
console.log('Build successfully created');

View File

@@ -4,9 +4,11 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const retry_1 = require("./retry");
const { installDefaultBrowsersForNpmInstall } = require('playwright-core/lib/utils/registry');
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
const playwrightPath = path.dirname(require.resolve('playwright'));
async function install() {
await (0, retry_1.retry)(() => installDefaultBrowsersForNpmInstall());
await retry_1.retry(() => installBrowsersWithProgressBar(playwrightPath));
}
install();

View File

@@ -0,0 +1,71 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const azure = require("azure-storage");
const mime = require("mime");
const minimist = require("minimist");
const path_1 = require("path");
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService, container) {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService, container, blobName) {
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService, container, blobName, file) {
const blobOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit, files) {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = path_1.basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main() {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => path_1.join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -0,0 +1,9 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
# Publish webview contents
PACKAGEJSON="$REPO/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"

View File

@@ -0,0 +1,87 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { basename, join } from 'path';
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
await new Promise<void>((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit: string, files: readonly string[]): Promise<void> {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main(): void {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -39,7 +39,7 @@ async function main() {
}
console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts;
await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
}
main().then(() => {
console.log('Build successfully released');

View File

@@ -1,17 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const sign_1 = require("./sign");
const path = require("path");
(0, sign_1.main)([
process.env['EsrpCliDllPath'],
'windows',
process.env['ESRPPKI'],
process.env['ESRPAADUsername'],
process.env['ESRPAADPassword'],
path.dirname(process.argv[2]),
path.basename(process.argv[2])
]);

View File

@@ -1,17 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { main } from './sign';
import * as path from 'path';
main([
process.env['EsrpCliDllPath']!,
'windows',
process.env['ESRPPKI']!,
process.env['ESRPAADUsername']!,
process.env['ESRPAADPassword']!,
path.dirname(process.argv[2]),
path.basename(process.argv[2])
]);

View File

@@ -1,77 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.main = void 0;
const cp = require("child_process");
const fs = require("fs");
const tmp = require("tmp");
const crypto = require("crypto");
function getParams(type) {
switch (type) {
case 'windows':
return '[{"keyCode":"CP-230012","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"Append","parameterValue":"/as"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-230012","operationSetCode":"SigntoolVerify","parameters":[{"parameterName":"VerifyAll","parameterValue":"/all"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'rpm':
return '[{ "keyCode": "CP-450779-Pgp", "operationSetCode": "LinuxSign", "parameters": [], "toolName": "sign", "toolVersion": "1.0" }]';
case 'darwin-sign':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppDeveloperSign","parameters":[{"parameterName":"Hardening","parameterValue":"--options=runtime"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'darwin-notarize':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppNotarize","parameters":[{"parameterName":"BundleId","parameterValue":"$(BundleIdentifier)"}],"toolName":"sign","toolVersion":"1.0"}]';
default:
throw new Error(`Sign type ${type} not found`);
}
}
function main([esrpCliPath, type, cert, username, password, folderPath, pattern]) {
tmp.setGracefulCleanup();
const patternPath = tmp.tmpNameSync();
fs.writeFileSync(patternPath, pattern);
const paramsPath = tmp.tmpNameSync();
fs.writeFileSync(paramsPath, getParams(type));
const keyFile = tmp.tmpNameSync();
const key = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
fs.writeFileSync(keyFile, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') }));
const clientkeyPath = tmp.tmpNameSync();
const clientkeyCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientkey = clientkeyCypher.update(password, 'utf8', 'hex');
clientkey += clientkeyCypher.final('hex');
fs.writeFileSync(clientkeyPath, clientkey);
const clientcertPath = tmp.tmpNameSync();
const clientcertCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientcert = clientcertCypher.update(cert, 'utf8', 'hex');
clientcert += clientcertCypher.final('hex');
fs.writeFileSync(clientcertPath, clientcert);
const args = [
esrpCliPath,
'vsts.sign',
'-a', username,
'-k', clientkeyPath,
'-z', clientcertPath,
'-f', folderPath,
'-p', patternPath,
'-u', 'false',
'-x', 'regularSigning',
'-b', 'input.json',
'-l', 'AzSecPack_PublisherPolicyProd.xml',
'-y', 'inlineSignParams',
'-j', paramsPath,
'-c', '9997',
'-t', '120',
'-g', '10',
'-v', 'Tls12',
'-s', 'https://api.esrp.microsoft.com/api/v1',
'-m', '0',
'-o', 'Microsoft',
'-i', 'https://www.microsoft.com',
'-n', '5',
'-r', 'true',
'-e', keyFile,
];
cp.spawnSync('dotnet', args, { stdio: 'inherit' });
}
exports.main = main;
if (require.main === module) {
main(process.argv.slice(2));
}

View File

@@ -1,84 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as cp from 'child_process';
import * as fs from 'fs';
import * as tmp from 'tmp';
import * as crypto from 'crypto';
function getParams(type: string): string {
switch (type) {
case 'windows':
return '[{"keyCode":"CP-230012","operationSetCode":"SigntoolSign","parameters":[{"parameterName":"OpusName","parameterValue":"VS Code"},{"parameterName":"OpusInfo","parameterValue":"https://code.visualstudio.com/"},{"parameterName":"Append","parameterValue":"/as"},{"parameterName":"FileDigest","parameterValue":"/fd \\"SHA256\\""},{"parameterName":"PageHash","parameterValue":"/NPH"},{"parameterName":"TimeStamp","parameterValue":"/tr \\"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\\" /td sha256"}],"toolName":"sign","toolVersion":"1.0"},{"keyCode":"CP-230012","operationSetCode":"SigntoolVerify","parameters":[{"parameterName":"VerifyAll","parameterValue":"/all"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'rpm':
return '[{ "keyCode": "CP-450779-Pgp", "operationSetCode": "LinuxSign", "parameters": [], "toolName": "sign", "toolVersion": "1.0" }]';
case 'darwin-sign':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppDeveloperSign","parameters":[{"parameterName":"Hardening","parameterValue":"--options=runtime"}],"toolName":"sign","toolVersion":"1.0"}]';
case 'darwin-notarize':
return '[{"keyCode":"CP-401337-Apple","operationSetCode":"MacAppNotarize","parameters":[{"parameterName":"BundleId","parameterValue":"$(BundleIdentifier)"}],"toolName":"sign","toolVersion":"1.0"}]';
default:
throw new Error(`Sign type ${type} not found`);
}
}
export function main([esrpCliPath, type, cert, username, password, folderPath, pattern]: string[]) {
tmp.setGracefulCleanup();
const patternPath = tmp.tmpNameSync();
fs.writeFileSync(patternPath, pattern);
const paramsPath = tmp.tmpNameSync();
fs.writeFileSync(paramsPath, getParams(type));
const keyFile = tmp.tmpNameSync();
const key = crypto.randomBytes(32);
const iv = crypto.randomBytes(16);
fs.writeFileSync(keyFile, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') }));
const clientkeyPath = tmp.tmpNameSync();
const clientkeyCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientkey = clientkeyCypher.update(password, 'utf8', 'hex');
clientkey += clientkeyCypher.final('hex');
fs.writeFileSync(clientkeyPath, clientkey);
const clientcertPath = tmp.tmpNameSync();
const clientcertCypher = crypto.createCipheriv('aes-256-cbc', key, iv);
let clientcert = clientcertCypher.update(cert, 'utf8', 'hex');
clientcert += clientcertCypher.final('hex');
fs.writeFileSync(clientcertPath, clientcert);
const args = [
esrpCliPath,
'vsts.sign',
'-a', username,
'-k', clientkeyPath,
'-z', clientcertPath,
'-f', folderPath,
'-p', patternPath,
'-u', 'false',
'-x', 'regularSigning',
'-b', 'input.json',
'-l', 'AzSecPack_PublisherPolicyProd.xml',
'-y', 'inlineSignParams',
'-j', paramsPath,
'-c', '9997',
'-t', '120',
'-g', '10',
'-v', 'Tls12',
'-s', 'https://api.esrp.microsoft.com/api/v1',
'-m', '0',
'-o', 'Microsoft',
'-i', 'https://www.microsoft.com',
'-n', '5',
'-r', 'true',
'-e', keyFile,
];
cp.spawnSync('dotnet', args, { stdio: 'inherit' });
}
if (require.main === module) {
main(process.argv.slice(2));
}

View File

@@ -1,39 +1,39 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
displayName: Download Electron
- script: |
yarn electron x64
displayName: Download Electron
# - script: | {{SQL CARBON EDIT}} remove editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
# - script: | {{SQL CARBON EDIT}} remove editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
@@ -43,21 +43,21 @@ steps:
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
# displayName: Run Unit Tests (Browser)
# - script: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
# displayName: Run Unit Tests (Browser)
# - script: | {{SQL CARBON EDIT}} disable
# ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable
# ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
- task: PublishPipelineArtifact@0
inputs:

View File

@@ -8,7 +8,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- script: |
set -e
@@ -28,10 +27,12 @@ steps:
displayName: Merge distro
- script: |
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
pushd build \
&& yarn \
&& npm install -g typescript \
&& tsc azure-pipelines/common/createAsset.ts \
&& popd
displayName: Restore modules for just build folder and compile it
- download: current
artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
@@ -43,16 +44,28 @@ steps:
mv $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Unzip & move
- task: UseDotNet@2
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
version: 2.x
- task: EsrpClientTool@1
displayName: Download ESRPClient
- script: |
set -e
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-sign $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(agent.builddirectory) VSCode-darwin-$(VSCODE_ARCH).zip
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [
{
"parameterName": "Hardening",
"parameterValue": "--options=runtime"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Codesign
- script: |
@@ -62,10 +75,29 @@ steps:
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier
- script: |
set -e
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" darwin-notarize $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(agent.builddirectory) VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Notarize
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppNotarize",
"parameters": [
{
"parameterName": "BundleId",
"parameterValue": "$(BundleIdentifier)"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Notarization
- script: |
set -e

View File

@@ -8,7 +8,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password,macos-developer-certificate,macos-developer-certificate-key,ticino-storage-key'
- task: DownloadPipelineArtifact@2
inputs:
@@ -55,7 +54,7 @@ steps:
- task: Cache@2
inputs:
key: "nodeModules | $(Agent.OS) | .build/yarnlockhash"
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
@@ -98,7 +97,6 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -187,6 +185,12 @@ steps:
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
@@ -220,11 +224,17 @@ steps:
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/smoke compile
displayName: Compile smoke tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --screenshots .build/logs/smoke-tests
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME"
timeoutInMinutes: 5
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -234,7 +244,7 @@ steps:
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --remote --screenshots .build/logs/smoke-tests
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --remote
timeoutInMinutes: 5
displayName: Run smoke tests (Remote)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -255,14 +265,6 @@ steps:
continueOnError: true
condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: logs-macos-$(VSCODE_ARCH)-$(System.JobAttempt)
targetPath: .build/logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:

View File

@@ -17,13 +17,6 @@ steps:
mv azuredatastudio-darwin-unsigned.zip azuredatastudio-darwin.zip
displayName: 'Rename the file'
- task: UseDotNet@2
displayName: 'Install .NET Core sdk for signing'
inputs:
packageType: sdk
version: 2.1.x
installationPath: $(Agent.ToolsDirectory)/dotnet
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
displayName: 'ESRP CodeSigning'
inputs:

View File

@@ -17,7 +17,7 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -106,7 +106,7 @@ steps:
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests" --coverage
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests"
displayName: Run unit tests
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
@@ -128,24 +128,14 @@ steps:
yarn gulp compile-extensions
displayName: Compile Extensions
# Per https://developercommunity.visualstudio.com/t/variablesexpressions-dont-work-with-continueonerro/1187733 we can't use variables
# in continueOnError directly so instead make two copies of the task and only run one or the other based on the SMOKE_FAIL_ON_ERROR value
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
displayName: Run smoke tests (Electron) (Continue on Error)
displayName: Run smoke tests (Electron)
continueOnError: true
condition: and(succeeded(), and(eq(variables['RUN_TESTS'], 'true'), ne(variables['SMOKE_FAIL_ON_ERROR'], 'true')))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
displayName: Run smoke tests (Electron) (Fail on Error)
condition: and(succeeded(), and(eq(variables['RUN_TESTS'], 'true'), eq(variables['SMOKE_FAIL_ON_ERROR'], 'true')))
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
# - script: |
# set -e

View File

@@ -18,7 +18,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password'
- script: |
set -e

View File

@@ -1,5 +1,5 @@
#Download base image ubuntu 22.04
FROM mcr.microsoft.com/mirror/docker/library/ubuntu:22.04
#Download base image ubuntu 21.04
FROM ubuntu:21.04
ENV TZ=America/Los_Angeles
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:

View File

@@ -18,7 +18,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password'
- script: |
set -e

View File

@@ -1,17 +1,12 @@
#Download base image ubuntu 18.04
FROM mcr.microsoft.com/mirror/docker/library/ubuntu:18.04
#Adding apt repos for g++-4.9
RUN echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial main" >> /etc/apt/sources.list
RUN echo "deb http://dk.archive.ubuntu.com/ubuntu/ xenial universe" >> /etc/apt/sources.list
FROM ubuntu:18.04
# Update Software repository
RUN apt-get update && apt-get upgrade -y
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.9 python-dev \
libgbm-dev
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.9
RUN rm /usr/bin/gcc
RUN rm /usr/bin/g++

View File

@@ -10,7 +10,7 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -22,7 +22,7 @@ steps:
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
@@ -34,7 +34,7 @@ steps:
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
@@ -49,6 +49,10 @@ steps:
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-vscode
displayName: Run Strict Null Check
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks

View File

@@ -12,7 +12,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password'
- task: DownloadPipelineArtifact@2
inputs:
@@ -89,7 +88,6 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -12,7 +12,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,builds-docdb-key-readwrite,vscode-storage-key,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- task: DownloadPipelineArtifact@2
inputs:
@@ -49,7 +48,7 @@ steps:
- task: Cache@2
inputs:
key: "nodeModules | $(Agent.OS) | .build/yarnlockhash"
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
@@ -67,32 +66,14 @@ steps:
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
- script: |
set -e
export npm_config_arch=$(NPM_ARCH)
export npm_config_build_from_source=true
if [ -z "$CC" ] || [ -z "$CXX" ]; then
# Download clang based on chromium revision used by vscode
curl -s https://raw.githubusercontent.com/chromium/chromium/91.0.4472.164/tools/clang/scripts/update.py | python - --output-dir=$PWD/.build/CR_Clang --host-os=linux
# Download libcxx headers and objects from upstream electron releases
DEBUG=libcxx-fetcher \
VSCODE_LIBCXX_OBJECTS_DIR=$PWD/.build/libcxx-objects \
VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \
VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \
VSCODE_ARCH="$(NPM_ARCH)" \
node build/linux/libcxx-fetcher.js
# Set compiler toolchain
export CC=$PWD/.build/CR_Clang/bin/clang
export CXX=$PWD/.build/CR_Clang/bin/clang++
export CXXFLAGS="-nostdinc++ -D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS -isystem$PWD/.build/libcxx_headers/include -isystem$PWD/.build/libcxxabi_headers/include -fPIC -flto=thin -fsplit-lto-unit"
export LDFLAGS="-stdlib=libc++ -fuse-ld=lld -flto=thin -fsplit-lto-unit -L$PWD/.build/libcxx-objects -lc++abi"
export CC=$(which gcc-5)
export CXX=$(which g++-5)
fi
if [ "$VSCODE_ARCH" == "x64" ]; then
@@ -111,7 +92,6 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -147,33 +127,28 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
displayName: Download Electron and Playwright
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
ELECTRON_ROOT=.build/electron
sudo chown root $APP_ROOT/chrome-sandbox
sudo chown root $ELECTRON_ROOT/chrome-sandbox
sudo chmod 4755 $APP_ROOT/chrome-sandbox
sudo chmod 4755 $ELECTRON_ROOT/chrome-sandbox
stat $APP_ROOT/chrome-sandbox
stat $ELECTRON_ROOT/chrome-sandbox
displayName: Change setuid helper binary permission
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
@@ -184,7 +159,7 @@ steps:
INTEGRATION_TEST_APP_NAME="$APP_NAME" \
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -192,7 +167,7 @@ steps:
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
./resources/server/test/test-web-integration.sh --browser chromium
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
displayName: Run integration tests (Browser)
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -204,52 +179,19 @@ steps:
INTEGRATION_TEST_APP_NAME="$APP_NAME" \
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
./resources/server/test/test-remote-integration.sh
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_PATH=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
yarn smoketest-no-compile --build "$APP_PATH" --electronArgs="--disable-dev-shm-usage --use-gl=swiftshader" --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_PATH=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
yarn smoketest-no-compile --build "$APP_PATH" --remote --electronArgs="--disable-dev-shm-usage --use-gl=swiftshader" --screenshots .build/logs/smoke-tests
timeoutInMinutes: 5
displayName: Run smoke tests (Remote)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
yarn smoketest-no-compile --web --headless --electronArgs="--disable-dev-shm-usage --use-gl=swiftshader"
timeoutInMinutes: 5
displayName: Run smoke tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-linux-$(VSCODE_ARCH)
artifactName: "crash-dump-linux-$(VSCODE_ARCH)"
targetPath: .build/crashes
displayName: "Publish Crash Reports"
continueOnError: true
condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: logs-linux-$(VSCODE_ARCH)-$(System.JobAttempt)
targetPath: .build/logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
@@ -270,25 +212,30 @@ steps:
displayName: Prepare snap package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
# needed for code signing
- task: UseDotNet@2
displayName: "Install .NET Core SDK 2.x"
inputs:
version: 2.x
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: EsrpClientTool@1
displayName: Download ESRPClient
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
node build/azure-pipelines/common/sign "$(esrpclient.toolpath)/$(esrpclient.toolname)" rpm $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) .build/linux/rpm '*.rpm'
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: ".build/linux/rpm"
Pattern: "*.rpm"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-450779-Pgp",
"operationSetCode": "LinuxSign",
"parameters": [ ],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
displayName: Codesign rpm
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))

View File

@@ -7,6 +7,12 @@ steps:
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: DownloadPipelineArtifact@0
displayName: "Download Pipeline Artifact"
inputs:

View File

@@ -4,7 +4,7 @@ parameters:
steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -119,7 +119,7 @@ steps:
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests" --coverage
DISPLAY=:10 ./scripts/test.sh --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests"
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))

View File

@@ -104,35 +104,19 @@ variables:
value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }}
- name: VSCODE_BUILD_MACOS_UNIVERSAL
value: ${{ and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }}
- name: AZURE_CDN_URL
value: https://az764295.vo.msecnd.net
- name: AZURE_DOCUMENTDB_ENDPOINT
value: https://vscode.documents.azure.com:443/
- name: AZURE_STORAGE_ACCOUNT
value: ticino
- name: AZURE_STORAGE_ACCOUNT_2
value: vscode
- name: MOONCAKE_CDN_URL
value: https://vscode.cdn.azure.cn
- name: VSCODE_MIXIN_REPO
value: microsoft/vscode-distro
- name: skipComponentGovernanceDetection
value: true
resources:
containers:
- container: vscode-x64
image: vscodehub.azurecr.io/vscode-linux-build-agent:bionic-x64
endpoint: VSCodeHub
options: --user 0:0 --cap-add SYS_ADMIN
options: --user 0:0
- container: vscode-arm64
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
endpoint: VSCodeHub
options: --user 0:0 --cap-add SYS_ADMIN
- container: vscode-armhf
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-armhf
endpoint: VSCodeHub
options: --user 0:0 --cap-add SYS_ADMIN
- container: snapcraft
image: snapcore/snapcraft:stable
@@ -140,7 +124,7 @@ stages:
- stage: Compile
jobs:
- job: Compile
pool: vscode-1es
pool: compile
variables:
VSCODE_ARCH: x64
steps:
@@ -192,11 +176,10 @@ stages:
variables:
VSCODE_ARCH: x64
NPM_ARCH: x64
DISPLAY: ":10"
steps:
- template: linux/product-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true), ne(variables['VSCODE_PUBLISH'], 'false')) }}:
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true)) }}:
- job: LinuxSnap
dependsOn:
- Linux

View File

@@ -12,7 +12,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password,ticino-storage-key'
- script: |
set -e
@@ -39,7 +38,7 @@ steps:
# using `genericNodeModules` instead of `nodeModules` here to avoid sharing the cache with builds running inside containers
- task: Cache@2
inputs:
key: "genericNodeModules | $(Agent.OS) | .build/yarnlockhash"
key: 'genericNodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
@@ -77,7 +76,6 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -100,13 +98,6 @@ steps:
yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check
displayName: Compile & Hygiene
- script: |
set -e
yarn --cwd test/smoke compile
yarn --cwd test/integration/browser compile
displayName: Compile test suites
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
@@ -122,7 +113,15 @@ steps:
- script: |
set -e
tar -cz --ignore-failed-read -f $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-* test/integration/browser/out test/smoke/out test/automation/out
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
# we gotta tarball everything in order to preserve file permissions
- script: |
set -e
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
displayName: Compress compilation artifact
- task: PublishPipelineArtifact@1

View File

@@ -12,7 +12,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'builds-docdb-key-readwrite,github-distro-mixin-password,ticino-storage-key,vscode-storage-key,vscode-mooncake-storage-key'
- pwsh: |
. build/azure-pipelines/win32/exec.ps1
@@ -52,7 +51,6 @@ steps:
- publish: $(Pipeline.Workspace)/artifacts_processed_$(System.StageAttempt)/artifacts_processed_$(System.StageAttempt).txt
artifact: artifacts_processed_$(System.StageAttempt)
displayName: Publish what artifacts were published for this stage attempt
condition: always()
- pwsh: |
$ErrorActionPreference = 'Stop'

View File

@@ -12,7 +12,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'builds-docdb-key-readwrite'
- script: |
set -e

View File

@@ -6,9 +6,6 @@ trigger:
pr: none
pool:
vmImage: ubuntu-latest
steps:
- task: NodeTool@0
inputs:
@@ -47,21 +44,18 @@ steps:
git config --global user.name "Azure Data Studio"
git clone https://$(GITHUB_TOKEN)@$(REPO) --depth=1
node build/azure-pipelines/publish-types/update-types.js
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
cd DefinitelyTyped
# Sync up to latest from the DT repo
git remote add upstream https://github.com/DefinitelyTyped/DefinitelyTyped.git
git fetch upstream
git reset --hard upstream/master
git push --force
git merge upstream/master
git push origin
# Update and format the typings file
cd ..
node build/azure-pipelines/publish-types/update-types.js
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
# Create and push the branch
cd DefinitelyTyped
git diff --color | cat
git add -A
git status

View File

@@ -1,243 +0,0 @@
trigger: none
pr: none
parameters:
- name: ENABLE_TERRAPIN
displayName: "Enable Terrapin"
type: boolean
default: true
- name: SCAN_WINDOWS
displayName: "Scan Windows"
type: boolean
default: true
- name: SCAN_LINUX
displayName: "Scan Linux"
type: boolean
default: false
variables:
- name: ENABLE_TERRAPIN
value: ${{ eq(parameters.ENABLE_TERRAPIN, true) }}
- name: SCAN_WINDOWS
value: ${{ eq(parameters.SCAN_WINDOWS, true) }}
- name: SCAN_LINUX
value: ${{ eq(parameters.SCAN_LINUX, true) }}
- name: VSCODE_MIXIN_REPO
value: microsoft/vscode-distro
- name: skipComponentGovernanceDetection
value: true
- name: NPM_ARCH
value: x64
- name: VSCODE_ARCH
value: x64
stages:
- stage: Windows
condition: eq(variables.SCAN_WINDOWS, 'true')
pool:
vmImage: VS2017-Win2016
jobs:
- job: WindowsJob
timeoutInMinutes: 0
steps:
- task: CredScan@3
continueOnError: true
inputs:
scanFolder: '$(Build.SourcesDirectory)'
outputFormat: 'pre'
- task: NodeTool@0
inputs:
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-SSL-AADAuth,vscode-storage-key,builds-docdb-key-readwrite"
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
displayName: Prepare tooling
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
displayName: Merge distro
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npx https://aka.ms/enablesecurefeed standAlone }
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- task: Semmle@1
inputs:
sourceCodeDirectory: '$(Build.SourcesDirectory)'
language: 'cpp'
buildCommandsString: 'yarn --frozen-lockfile'
querySuite: 'Required'
timeout: '1800'
ram: '16384'
addProjectDirToScanningExclusionList: true
env:
npm_config_arch: "$(NPM_ARCH)"
npm_config_build_from_source: true
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: CodeQL
- powershell: |
. build/azure-pipelines/win32/exec.ps1
. build/azure-pipelines/win32/retry.ps1
$ErrorActionPreference = "Stop"
retry { exec { yarn --frozen-lockfile } }
env:
npm_config_arch: "$(NPM_ARCH)"
npm_config_build_from_source: true
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
CHILD_CONCURRENCY: 1
displayName: Install dependencies
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "vscode-symbols-win32-$(VSCODE_ARCH)" }
displayName: Download Symbols
- task: BinSkim@4
inputs:
InputType: 'Basic'
Function: 'analyze'
TargetPattern: 'guardianGlob'
AnalyzeTargetGlob: '$(agent.builddirectory)\scanbin\**.dll;$(agent.builddirectory)\scanbin\**.exe;$(agent.builddirectory)\scanbin\**.node'
AnalyzeLocalSymbolDirectories: '$(agent.builddirectory)\scanbin\VSCode-win32-$(VSCODE_ARCH)\pdb'
- task: TSAUpload@2
inputs:
GdnPublishTsaOnboard: true
GdnPublishTsaConfigFile: '$(Build.SourcesDirectory)\build\azure-pipelines\.gdntsa'
- stage: Linux
dependsOn: []
condition: eq(variables.SCAN_LINUX, 'true')
pool:
vmImage: "Ubuntu-18.04"
jobs:
- job: LinuxJob
steps:
- task: CredScan@2
inputs:
toolMajorVersion: 'V2'
- task: NodeTool@0
inputs:
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,ESRP-SSL-AADAuth,vscode-storage-key,builds-docdb-key-readwrite"
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
set -e
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
yarn --cwd build
yarn --cwd build compile
displayName: Compile build tools
- script: |
set -e
export npm_config_arch=$(NPM_ARCH)
export npm_config_build_from_source=true
if [ -z "$CC" ] || [ -z "$CXX" ]; then
# Download clang based on chromium revision used by vscode
curl -s https://raw.githubusercontent.com/chromium/chromium/91.0.4472.164/tools/clang/scripts/update.py | python - --output-dir=$PWD/.build/CR_Clang --host-os=linux
# Download libcxx headers and objects from upstream electron releases
DEBUG=libcxx-fetcher \
VSCODE_LIBCXX_OBJECTS_DIR=$PWD/.build/libcxx-objects \
VSCODE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \
VSCODE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \
VSCODE_ARCH="$(NPM_ARCH)" \
node build/linux/libcxx-fetcher.js
# Set compiler toolchain
export CC=$PWD/.build/CR_Clang/bin/clang
export CXX=$PWD/.build/CR_Clang/bin/clang++
export CXXFLAGS="-nostdinc++ -D_LIBCPP_HAS_NO_VENDOR_AVAILABILITY_ANNOTATIONS -isystem$PWD/.build/libcxx_headers/include -isystem$PWD/.build/libcxxabi_headers/include -fPIC -flto=thin -fsplit-lto-unit"
export LDFLAGS="-stdlib=libc++ -fuse-ld=lld -flto=thin -fsplit-lto-unit -L$PWD/.build/libcxx-objects -lc++abi"
fi
if [ "$VSCODE_ARCH" == "x64" ]; then
export VSCODE_REMOTE_CC=$(which gcc-4.8)
export VSCODE_REMOTE_CXX=$(which g++-4.8)
fi
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
echo "Yarn failed too many times" >&2
exit 1
fi
echo "Yarn failed $i, trying again..."
done
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
- script: |
set -e
yarn gulp vscode-symbols-linux-$(VSCODE_ARCH)
displayName: Build
- task: BinSkim@3
inputs:
toolVersion: Latest
InputType: CommandLine
arguments: analyze $(agent.builddirectory)\scanbin\exe\*.* --recurse --local-symbol-directories $(agent.builddirectory)\scanbin\VSCode-linux-$(VSCODE_ARCH)\pdb
- task: TSAUpload@2
inputs:
GdnPublishTsaConfigFile: '$(Build.SourceDirectory)\build\azure-pipelines\.gdntsa'

View File

@@ -1,7 +1,7 @@
resources:
containers:
- container: linux-x64
image: sqltoolscontainers.azurecr.io/linux-build-agent:6
image: sqltoolscontainers.azurecr.io/linux-build-agent:3
endpoint: SqlToolsContainers
jobs:
@@ -47,7 +47,7 @@ jobs:
steps:
- template: linux/sql-product-build-linux.yml
parameters:
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azcli", "azurecore", "cms", "dacpac", "data-workspace", "import", "machine-learning", "notebook", "resource-deployment", "schema-compare", "sql-bindings", "sql-database-projects"]
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azcli", "azurecore", "cms", "dacpac", "data-workspace", "import", "machine-learning", "notebook", "resource-deployment", "schema-compare", "sql-database-projects"]
timeoutInMinutes: 90
- job: Windows
@@ -60,17 +60,16 @@ jobs:
- template: win32/sql-product-build-win32.yml
timeoutInMinutes: 90
# disable due to invalid machine pool (karlb 3/9/2022)
# - job: Windows_Test
# condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
# pool:
# name: mssqltools
# dependsOn:
# - Linux
# - Windows
# steps:
# - template: win32/sql-product-test-win32.yml
# timeoutInMinutes: 90
- job: Windows_Test
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
pool:
name: mssqltools
dependsOn:
- Linux
- Windows
steps:
- template: win32/sql-product-test-win32.yml
timeoutInMinutes: 90
- job: Release
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
@@ -80,8 +79,7 @@ jobs:
- macOS
- Linux
- Windows
# disable due to invalid machine pool (karlb 3/9/2022)
# - Windows_Test
- Windows_Test
- macOS_Signing
steps:
- template: sql-release.yml

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -77,16 +77,14 @@ steps:
node build/azure-pipelines/mixin
displayName: Mix in quality
# Run these separately to avoid OOM errors on pipeline machines
- script: |
set -e
yarn npm-run-all -lp core-ci extensions-ci
yarn npm-run-all -lp hygiene eslint valid-layers-check
yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check
displayName: Compile & Hygiene
- script: |
set -e
yarn npm-run-all -lp sqllint extensions-lint
yarn npm-run-all -lp sqllint extensions-lint strict-vscode
displayName: SQL Hygiene
- script: |
@@ -95,7 +93,6 @@ steps:
AZURE_STORAGE_ACCESS_KEY="$(sourcemap-storage-key)" \
node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e

View File

@@ -1,5 +1,5 @@
#Download base image ubuntu 22.04
FROM mcr.microsoft.com/mirror/docker/library/ubuntu:22.04
#Download base image ubuntu 21.04
FROM ubuntu:21.04
ENV TZ=America/Los_Angeles
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone

View File

@@ -12,7 +12,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: 'github-distro-mixin-password,web-storage-account,web-storage-key,ticino-storage-key'
- task: DownloadPipelineArtifact@2
inputs:
@@ -80,7 +79,6 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -83,6 +83,7 @@ steps:
yarn sqllint
yarn extensions-lint
yarn gulp hygiene
yarn strict-vscode
yarn valid-layers-check
displayName: Run hygiene, eslint
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -104,26 +105,14 @@ steps:
yarn gulp compile-extensions
displayName: Compile Extensions
# disable smoke tests (karlb 3/2/2022)
# # Per https://developercommunity.visualstudio.com/t/variablesexpressions-dont-work-with-continueonerro/1187733 we can't use variables
# # in continueOnError directly so instead make two copies of the task and only run one or the other based on the SMOKE_FAIL_ON_ERROR value
# - script: |
# set -e
# node ./node_modules/playwright/install.js
# APP_ROOT=$(Agent.BuildDirectory)/vscode-reh-web-linux-x64
# xvfb-run yarn smoketest --build "$(Agent.BuildDirectory)/vscode-reh-web-linux-x64" --web --headless --screenshots "$(Build.ArtifactStagingDirectory)/smokeshots" --log "$(Build.ArtifactStagingDirectory)/logs/web/smoke.log"
# displayName: Run smoke tests (Browser) (Continue on Error)
# continueOnError: true
# condition: and(succeeded(), and(eq(variables['RUN_TESTS'], 'true'), ne(variables['SMOKE_FAIL_ON_ERROR'], 'true')))
# disable smoke tests (karlb 3/2/2022)
# - script: |
# set -e
# node ./node_modules/playwright/install.js
# APP_ROOT=$(Agent.BuildDirectory)/vscode-reh-web-linux-x64
# xvfb-run yarn smoketest --build "$(Agent.BuildDirectory)/vscode-reh-web-linux-x64" --web --headless --screenshots "$(Build.ArtifactStagingDirectory)/smokeshots" --log "$(Build.ArtifactStagingDirectory)/logs/web/smoke.log"
# displayName: Run smoke tests (Browser) (Fail on Error)
# condition: and(succeeded(), and(eq(variables['RUN_TESTS'], 'true'), eq(variables['SMOKE_FAIL_ON_ERROR'], 'true')))
- script: |
set -e
node ./node_modules/playwright/install.js
APP_ROOT=$(Agent.BuildDirectory)/vscode-reh-web-linux-x64
xvfb-run yarn smoketest --build "$(Agent.BuildDirectory)/vscode-reh-web-linux-x64" --web --headless --screenshots "$(Build.ArtifactStagingDirectory)/smokeshots" --log "$(Build.ArtifactStagingDirectory)/logs/web/smoke.log"
displayName: Run smoke tests (Browser)
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
# - script: |
# set -e

View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<clear />
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
</packageSources>
<disabledPackageSources>
<clear />
</disabledPackageSources>
</configuration>

View File

@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Microsoft.ESRPClient" version="1.2.47" />
</packages>

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:

View File

@@ -2,6 +2,9 @@
$ErrorActionPreference = "Stop"
$Arch = "$env:VSCODE_ARCH"
exec { yarn gulp "vscode-win32-$Arch-archive" "vscode-win32-$Arch-system-setup" "vscode-win32-$Arch-user-setup" --sign }
$Repo = "$(pwd)"
$Root = "$Repo\.."
$SystemExe = "$Repo\.build\win32-$Arch\system-setup\VSCodeSetup.exe"

View File

@@ -17,7 +17,6 @@ steps:
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
SecretsFilter: "github-distro-mixin-password,vscode-storage-key,builds-docdb-key-readwrite,ESRP-PKI,esrp-aad-username,esrp-aad-password"
- task: DownloadPipelineArtifact@2
inputs:
@@ -54,7 +53,7 @@ steps:
- task: Cache@2
inputs:
key: "nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash"
key: 'nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
@@ -85,7 +84,6 @@ steps:
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
GITHUB_TOKEN: "$(github-distro-mixin-password)"
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
@@ -156,6 +154,13 @@ steps:
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd test/integration/browser compile }
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
@@ -189,41 +194,6 @@ steps:
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd test/smoke compile }
displayName: Compile smoke tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
exec { yarn smoketest-no-compile --build "$AppRoot" --screenshots .build\logs\smoke-tests }
displayName: Run smoke tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
# - powershell: |
# . build/azure-pipelines/win32/exec.ps1
# $ErrorActionPreference = "Stop"
# $AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
# $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"
# exec { yarn smoketest-no-compile --build "$AppRoot" --remote }
# displayName: Run smoke tests (Remote)
# timeoutInMinutes: 5
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"
exec { yarn smoketest-no-compile --web --browser firefox --headless }
displayName: Run smoke tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-windows-$(VSCODE_ARCH)
@@ -232,14 +202,6 @@ steps:
continueOnError: true
condition: failed()
- task: PublishPipelineArtifact@0
inputs:
artifactName: logs-windows-$(VSCODE_ARCH)-$(System.JobAttempt)
targetPath: .build\logs
displayName: "Publish Log Files"
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
@@ -247,58 +209,84 @@ steps:
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: UseDotNet@2
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
version: 2.x
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(CodeSigningFolderPath)"
Pattern: "*.dll,*.exe,*.node"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: EsrpClientTool@1
displayName: Download ESRPClient
- task: NuGetCommand@2
displayName: Install ESRPClient.exe
inputs:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: "ESRP Nuget"
restoreDirectory: packages
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd build }
exec { yarn --cwd build compile }
displayName: Compile build tools
- task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate
inputs:
ESRP: "ESRP CodeSign"
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$EsrpClientTool = (gci -directory -filter EsrpClientTool_* $(Agent.RootDirectory)\_tasks | Select-Object -last 1).FullName
$EsrpCliZip = (gci -recurse -filter esrpcli.*.zip $EsrpClientTool | Select-Object -last 1).FullName
mkdir -p $(Agent.TempDirectory)\esrpcli
Expand-Archive -Path $EsrpCliZip -DestinationPath $(Agent.TempDirectory)\esrpcli
$EsrpCliDllPath = (gci -recurse -filter esrpcli.dll $(Agent.TempDirectory)\esrpcli | Select-Object -last 1).FullName
echo "##vso[task.setvariable variable=EsrpCliDllPath]$EsrpCliDllPath"
displayName: Find ESRP CLI
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build\azure-pipelines\common\sign $env:EsrpCliDllPath windows $(ESRP-PKI) $(esrp-aad-username) $(esrp-aad-password) $(CodeSigningFolderPath) '*.dll,*.exe,*.node' }
displayName: Codesign
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-archive" }
displayName: Package archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:ESRPPKI = "$(ESRP-PKI)"
$env:ESRPAADUsername = "$(esrp-aad-username)"
$env:ESRPAADPassword = "$(esrp-aad-password)"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-system-setup" --sign }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-user-setup" --sign }
displayName: Package setups
- task: PowerShell@2
inputs:
targetType: filePath
filePath: .\build\azure-pipelines\win32\import-esrp-auth-cert.ps1
arguments: "$(ESRP-SSL-AADAuth)"
displayName: Import ESRP Auth Certificate
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |

View File

@@ -0,0 +1,71 @@
function Create-TmpJson($Obj) {
$FileName = [System.IO.Path]::GetTempFileName()
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
return $FileName
}
$Auth = Create-TmpJson @{
Version = "1.0.0"
AuthenticationType = "AAD_CERT"
ClientId = $env:ESRPClientId
AuthCert = @{
SubjectName = $env:ESRPAuthCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
SendX5c = "true"
}
RequestSigningCert = @{
SubjectName = $env:ESRPCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
}
}
$Policy = Create-TmpJson @{
Version = "1.0.0"
}
$Input = Create-TmpJson @{
Version = "1.0.0"
SignBatches = @(
@{
SourceLocationType = "UNC"
SignRequestFiles = @(
@{
SourceLocation = $args[0]
}
)
SigningInfo = @{
Operations = @(
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolSign"
Parameters = @{
OpusName = "VS Code"
OpusInfo = "https://code.visualstudio.com/"
Append = "/as"
FileDigest = "/fd `"SHA256`""
PageHash = "/NPH"
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
}
ToolName = "sign"
ToolVersion = "1.0"
},
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolVerify"
Parameters = @{
VerifyAll = "/all"
}
ToolName = "sign"
ToolVersion = "1.0"
}
)
}
}
)
}
$Output = [System.IO.Path]::GetTempFileName()
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
& "$ScriptPath\ESRPClient\packages\Microsoft.ESRPClient.*\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -287,12 +287,6 @@ steps:
archiveType: tar
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
displayName: 'SBOM Generation Task'
inputs:
BuildDropPath: '$(Build.ArtifactStagingDirectory)'
PackageName: 'Azure Data Studio'
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: build scripts source'
inputs:

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
@@ -51,7 +51,7 @@ steps:
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
continueOnError: false
condition: and(succeeded(), and(eq(variables['RUN_TESTS'], 'true'), ne(variables['RUN_INTEGRATION_TESTS'], 'false')))
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
displayName: Run stable tests
env:
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
@@ -75,7 +75,7 @@ steps:
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; .\scripts\sql-test-integration-unstable.bat }
continueOnError: true
condition: and(succeeded(), and(eq(variables['RUN_UNSTABLE_TESTS'], 'true'), ne(variables['RUN_INTEGRATION_TESTS'], 'false')))
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
displayName: Run unstable integration tests
env:
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
@@ -99,4 +99,4 @@ steps:
mergeTestResults: true
failTaskOnFailedTests: true
continueOnError: true
condition: and(succeededOrFailed(), and(eq(variables['RUN_TESTS'], 'true'), ne(variables['RUN_INTEGRATION_TESTS'], 'false')))
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))

View File

@@ -29,6 +29,7 @@ app.once('ready', () => {
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
webviewTag: true,
enableWebSQL: false,
nativeWindowOpen: true
}

View File

@@ -4,8 +4,7 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const vscode_universal_bundler_1 = require("vscode-universal-bundler");
const cross_spawn_promise_1 = require("@malept/cross-spawn-promise");
const vscode_universal_1 = require("vscode-universal");
const fs = require("fs-extra");
const path = require("path");
const plist = require("plist");
@@ -24,7 +23,7 @@ async function main() {
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
await (0, vscode_universal_bundler_1.makeUniversalApp)({
await vscode_universal_1.makeUniversalApp({
x64AppPath,
arm64AppPath,
x64AsarPath,
@@ -51,12 +50,6 @@ async function main() {
LSRequiresNativeExecution: true
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
// Verify if native module architecture is correct
const findOutput = await (0, cross_spawn_promise_1.spawn)('find', [outAppPath, '-name', 'keytar.node']);
const lipoOutput = await (0, cross_spawn_promise_1.spawn)('lipo', ['-archs', findOutput.replace(/\n$/, "")]);
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
throw new Error(`Invalid arch, got : ${lipoOutput}`);
}
}
if (require.main === module) {
main().catch(err => {

View File

@@ -5,8 +5,7 @@
'use strict';
import { makeUniversalApp } from 'vscode-universal-bundler';
import { spawn } from '@malept/cross-spawn-promise';
import { makeUniversalApp } from 'vscode-universal';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as plist from 'plist';
@@ -58,13 +57,6 @@ async function main() {
LSRequiresNativeExecution: true
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
// Verify if native module architecture is correct
const findOutput = await spawn('find', [outAppPath, '-name', 'keytar.node'])
const lipoOutput = await spawn('lipo', ['-archs', findOutput.replace(/\n$/, "")]);
if (lipoOutput.replace(/\n$/, "") !== 'x86_64 arm64') {
throw new Error(`Invalid arch, got : ${lipoOutput}`)
}
}
if (require.main === module) {

View File

@@ -22,10 +22,6 @@ module.exports.all = [
'!out*/**',
'!test/**/out/**',
'!**/node_modules/**',
// {{SQL CARBON EDIT}}
'!build/actions/**/*.js',
'!build/**/*'
];
module.exports.indentationFilter = [
@@ -83,7 +79,7 @@ module.exports.indentationFilter = [
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,linux,darwin}/**/*.js',
'!build/{lib,download,darwin}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
@@ -95,35 +91,6 @@ module.exports.indentationFilter = [
'!extensions/markdown-language-features/notebook-out/*.js',
'!extensions/markdown-math/notebook-out/*.js',
'!extensions/simple-browser/media/*.js',
// {{SQL CARBON EDIT}} Except for our stuff
'!**/*.gif',
'!build/actions/**/*.js',
'!**/*.{xlf,lcl,docx,sql,vsix,bacpac,ipynb,jpg}',
'!extensions/azuremonitor/sqltoolsservice/**',
'!extensions/kusto/sqltoolsservice/**',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/integration-tests/testData/**',
'!extensions/arc/src/controller/generated/**',
'!extensions/sql-database-projects/resources/templates/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.json',
'!extensions/sql-database-projects/src/test/baselines/*.sqlproj',
'!extensions/sql-database-projects/BuildDirectory/SystemDacpacs/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts',
'!resources/linux/snap/electron-launch',
'!extensions/markdown-language-features/media/*.js',
'!extensions/simple-browser/media/*.js',
'!resources/xlf/LocProject.json',
'!build/**/*',
'!test/coverage/**',
'!extensions/**/coverage/**'
];
module.exports.copyrightFilter = [
@@ -146,7 +113,6 @@ module.exports.copyrightFilter = [
'!**/*.code-workspace',
'!**/*.js.map',
'!build/**/*.init',
'!build/linux/libcxx-fetcher.*',
'!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js',
'!resources/web/code-web.js',
@@ -157,48 +123,6 @@ module.exports.copyrightFilter = [
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
// {{SQL CARBON EDIT}} Except for stuff in our code that doesn't use our copyright
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/azuremonitor/src/prompts/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/kusto/src/prompts/**',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!extensions/mssql/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/notebook/src/prompts/**',
'!extensions/query-history/images/**',
'!extensions/sql/build/update-grammar.js',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowMoveManager.plugin.ts',
'!src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/services/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/services/notebook/common/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac',
'!**/*.bacpac',
'!**/*.py'
];
module.exports.jsHygieneFilter = [
@@ -213,7 +137,6 @@ module.exports.jsHygieneFilter = [
'!src/**/marked.js',
'!src/**/semver.js',
'!**/test/**',
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.tsHygieneFilter = [
@@ -231,11 +154,4 @@ module.exports.tsHygieneFilter = [
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
// {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts',
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // skip this because we have no plans on touching this and its not ours
'!src/vs/workbench/contrib/extensions/browser/extensionRecommendationsService.ts', // skip this because known issue
'!build/**/*'
];

View File

@@ -14,7 +14,6 @@ const compilation = require('./lib/compilation');
const compileBuildTask = task.define('compile-build',
task.series(
util.rimraf('out-build'),
util.buildWebNodePaths('out-build'),
compilation.compileTask('src', 'out-build', true)
)
);

View File

@@ -35,42 +35,42 @@ const compilations = glob.sync('**/tsconfig.json', {
ignore: ['**/out/**', '**/node_modules/**']
});
// const compilations = [
// 'configuration-editing/build/tsconfig.json',
// 'configuration-editing/tsconfig.json',
// 'css-language-features/client/tsconfig.json',
// 'css-language-features/server/tsconfig.json',
// 'debug-auto-launch/tsconfig.json',
// 'debug-server-ready/tsconfig.json',
// 'emmet/tsconfig.json',
// 'extension-editing/tsconfig.json',
// 'git/tsconfig.json',
// 'github-authentication/tsconfig.json',
// 'github/tsconfig.json',
// 'grunt/tsconfig.json',
// 'gulp/tsconfig.json',
// 'html-language-features/client/tsconfig.json',
// 'html-language-features/server/tsconfig.json',
// 'image-preview/tsconfig.json',
// 'ipynb/tsconfig.json',
// 'jake/tsconfig.json',
// 'json-language-features/client/tsconfig.json',
// 'json-language-features/server/tsconfig.json',
// 'markdown-language-features/preview-src/tsconfig.json',
// 'markdown-language-features/tsconfig.json',
// 'markdown-math/tsconfig.json',
// 'merge-conflict/tsconfig.json',
// 'microsoft-authentication/tsconfig.json',
// 'npm/tsconfig.json',
// 'php-language-features/tsconfig.json',
// 'search-result/tsconfig.json',
// 'simple-browser/tsconfig.json',
// 'typescript-language-features/test-workspace/tsconfig.json',
// 'typescript-language-features/tsconfig.json',
// 'vscode-api-tests/tsconfig.json',
// 'vscode-colorize-tests/tsconfig.json',
// 'vscode-custom-editor-tests/tsconfig.json',
// 'vscode-notebook-tests/tsconfig.json',
// 'vscode-test-resolver/tsconfig.json'
// 'configuration-editing/build/tsconfig.json',
// 'configuration-editing/tsconfig.json',
// 'css-language-features/client/tsconfig.json',
// 'css-language-features/server/tsconfig.json',
// 'debug-auto-launch/tsconfig.json',
// 'debug-server-ready/tsconfig.json',
// 'emmet/tsconfig.json',
// 'extension-editing/tsconfig.json',
// 'git/tsconfig.json',
// 'github-authentication/tsconfig.json',
// 'github/tsconfig.json',
// 'grunt/tsconfig.json',
// 'gulp/tsconfig.json',
// 'html-language-features/client/tsconfig.json',
// 'html-language-features/server/tsconfig.json',
// 'image-preview/tsconfig.json',
// 'jake/tsconfig.json',
// 'json-language-features/client/tsconfig.json',
// 'json-language-features/server/tsconfig.json',
// 'markdown-language-features/preview-src/tsconfig.json',
// 'markdown-language-features/tsconfig.json',
// 'markdown-math/tsconfig.json',
// 'merge-conflict/tsconfig.json',
// 'microsoft-authentication/tsconfig.json',
// 'npm/tsconfig.json',
// 'php-language-features/tsconfig.json',
// 'search-result/tsconfig.json',
// 'simple-browser/tsconfig.json',
// 'testing-editor-contributions/tsconfig.json',
// 'typescript-language-features/test-workspace/tsconfig.json',
// 'typescript-language-features/tsconfig.json',
// 'vscode-api-tests/tsconfig.json',
// 'vscode-colorize-tests/tsconfig.json',
// 'vscode-custom-editor-tests/tsconfig.json',
// 'vscode-notebook-tests/tsconfig.json',
// 'vscode-test-resolver/tsconfig.json'
// ];
const getBaseUrl = out => `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}/${out}`;
@@ -240,7 +240,7 @@ exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
//Get every extension in 'extensions' to create XLF files.
const exportCompilations = glob.sync('**/package.json', {
cwd: extensionsPath,
ignore: ['**/out/**', '**/node_modules/**', '**/sqltoolsservice/**', 'package.json']
ignore: ['**/out/**', '**/node_modules/**', 'package.json']
});
//Run the localization packaging task on all extensions in ADS.

View File

@@ -16,10 +16,10 @@ const { monacoTypecheckTask/* , monacoTypecheckWatchTask */ } = require('./gulpf
const { compileExtensionsTask, watchExtensionsTask, compileExtensionMediaTask } = require('./gulpfile.extensions');
// Fast compile for development time
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), util.buildWebNodePaths('out'), compilation.compileTask('src', 'out', false)));
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), compilation.compileTask('src', 'out', false)));
gulp.task(compileClientTask);
const watchClientTask = task.define('watch-client', task.series(util.rimraf('out'), util.buildWebNodePaths('out'), compilation.watchTask('out', false)));
const watchClientTask = task.define('watch-client', task.series(util.rimraf('out'), compilation.watchTask('out', false)));
gulp.task(watchClientTask);
// All

View File

@@ -1,104 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const path = require('path');
const task = require('./lib/task');
const util = require('./lib/util');
const _ = require('underscore');
const electron = require('gulp-atom-electron');
const { config } = require('./lib/electron');
const filter = require('gulp-filter');
const deps = require('./lib/dependencies');
const root = path.dirname(__dirname);
const BUILD_TARGETS = [
{ platform: 'win32', arch: 'ia32' },
{ platform: 'win32', arch: 'x64' },
{ platform: 'win32', arch: 'arm64' },
{ platform: 'darwin', arch: null, opts: { stats: true } },
{ platform: 'linux', arch: 'ia32' },
{ platform: 'linux', arch: 'x64' },
{ platform: 'linux', arch: 'armhf' },
{ platform: 'linux', arch: 'arm64' },
];
BUILD_TARGETS.forEach(buildTarget => {
const dashed = (str) => (str ? `-${str}` : ``);
const platform = buildTarget.platform;
const arch = buildTarget.arch;
const destinationExe = path.join(path.dirname(root), 'scanbin', `VSCode${dashed(platform)}${dashed(arch)}`, 'bin');
const destinationPdb = path.join(path.dirname(root), 'scanbin', `VSCode${dashed(platform)}${dashed(arch)}`, 'pdb');
const tasks = [];
// removal tasks
tasks.push(util.rimraf(destinationExe), util.rimraf(destinationPdb));
// electron
tasks.push(() => electron.dest(destinationExe, _.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch })));
// pdbs for windows
if (platform === 'win32') {
tasks.push(
() => electron.dest(destinationPdb, _.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, pdbs: true })),
util.rimraf(path.join(destinationExe, 'swiftshader')),
util.rimraf(path.join(destinationExe, 'd3dcompiler_47.dll')));
}
if (platform === 'linux') {
tasks.push(
() => electron.dest(destinationPdb, _.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, symbols: true }))
);
}
// node modules
tasks.push(
nodeModules(destinationExe, destinationPdb, platform)
);
const setupSymbolsTask = task.define(`vscode-symbols${dashed(platform)}${dashed(arch)}`,
task.series(...tasks)
);
gulp.task(setupSymbolsTask);
});
function nodeModules(destinationExe, destinationPdb, platform) {
const productionDependencies = deps.getProductionDependencies(root);
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
const exe = () => {
return gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**/*.node']))
.pipe(gulp.dest(destinationExe));
};
if (platform === 'win32') {
const pdb = () => {
return gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**/*.pdb']))
.pipe(gulp.dest(destinationPdb));
};
return gulp.parallel(exe, pdb);
}
if (platform === 'linux') {
const pdb = () => {
return gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**/*.sym']))
.pipe(gulp.dest(destinationPdb));
};
return gulp.parallel(exe, pdb);
}
return exe;
}

View File

@@ -129,19 +129,16 @@ gulp.task('package-external-extensions', task.series(
const packageManifestPath = path.join(packageDir, 'package.json');
const json = require('gulp-json-editor');
const packageJsonStream = gulp.src(packageManifestPath) // Create stream for the original package.json
.pipe(json(data => {
// And now use gulp-json-editor to modify the contents
.pipe(json(data => { // And now use gulp-json-editor to modify the contents
const updateData = JSON.parse(fs.readFileSync(vscodeManifestFullPath)); // Read in the set of values to replace from package.vscode.json
Object.keys(updateData).forEach(key => {
data[key] = updateData[key];
});
if(data.contributes?.menus){
// Remove ADS-only menus. This is a subset of the menus listed in https://github.com/microsoft/azuredatastudio/blob/main/src/vs/workbench/api/common/menusExtensionPoint.ts
// More can be added to the list as needed.
['objectExplorer/item/context', 'dataExplorer/context', 'dashboard/toolbar'].forEach(menu => {
delete data.contributes.menus[menu];
});
}
// Remove ADS-only menus. This is a subset of the menus listed in https://github.com/microsoft/azuredatastudio/blob/main/src/vs/workbench/api/common/menusExtensionPoint.ts
// More can be added to the list as needed.
['objectExplorer/item/context', 'dataExplorer/context', 'dashboard/toolbar'].forEach(menu => {
delete data.contributes.menus[menu];
});
return data;
}, { beautify: false }))
.pipe(gulp.dest(packageDir));

View File

@@ -38,7 +38,6 @@ const vscodeEntryPoints = _.flatten([
buildfile.base,
buildfile.workerExtensionHost,
buildfile.workerNotebook,
buildfile.workerLanguageDetection,
buildfile.workbenchDesktop,
buildfile.code
]);
@@ -65,6 +64,8 @@ const vscodeResources = [
'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/platform/files/**/*.exe',
@@ -115,13 +116,11 @@ const extensionsFilter = filter([
'**/agent.xlf',
'**/arc.xlf',
'**/asde-deployment.xlf',
'**/azcli.xlf',
'**/azurecore.xlf',
'**/azurehybridtoolkit.xlf',
'**/big-data-cluster.xlf',
'**/cms.xlf',
'**/dacpac.xlf',
'**/git.xlf',
'**/data-workspace.xlf',
'**/import.xlf',
'**/kusto.xlf',
@@ -135,7 +134,6 @@ const extensionsFilter = filter([
'**/schema-compare.xlf',
'**/server-report.xlf',
'**/sql-assessment.xlf',
'**/sql-bindings.xlf',
'**/sql-database-projects.xlf',
'**/sql-migration.xlf',
'**/xml-language-features.xlf'
@@ -290,7 +288,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
'**/node-pty/build/Release/*',
'**/node-pty/lib/worker/conoutSocketWorker.js',
'**/node-pty/lib/shared/conout.js',
'**/*.wasm',
'**/*.wasm'
], 'node_modules.asar'));
let all = es.merge(

View File

@@ -27,7 +27,7 @@ const zipPath = arch => path.join(zipDir(arch), `azuredatastudio-win32-${arch}.z
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
const signWin32Path = path.join(repoPath, 'build', 'azure-pipelines', 'common', 'sign-win32');
const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
function packageInnoSetup(iss, options, cb) {
options = options || {};
@@ -50,7 +50,7 @@ function packageInnoSetup(iss, options, cb) {
const args = [
iss,
...defs,
`/sesrp=node ${signWin32Path} $f`
`/sesrp=powershell.exe -ExecutionPolicy bypass ${signPS1} $f`
];
cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] })

View File

@@ -12,7 +12,221 @@ const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
const { all, copyrightFilter, indentationFilter, jsHygieneFilter, tsHygieneFilter } = require('./filters');
/**
* Hygiene works by creating cascading subsets of all our files and
* passing them through a sequence of checks. Here are the current subsets,
* named according to the checks performed on them. Each subset contains
* the following one, as described in mathematical notation:
*
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
*/
const all = [
'*',
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*',
'!test/**/out/**',
'!**/node_modules/**',
'!build/actions/**/*.js', // {{SQL CARBON EDIT}}
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.all = all;
const indentationFilter = [
'**',
// except specific files
'!**/ThirdPartyNotices.txt',
'!**/LICENSE.{txt,rtf}',
'!LICENSES.chromium.html',
'!**/LICENSE',
'!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/semver/semver.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/unit/assert.js',
'!resources/linux/snap/electron-launch',
// except specific folders
'!test/automation/out/**',
'!test/smoke/out/**',
'!extensions/typescript-language-features/test-workspace/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/codicon/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,darwin}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
'!**/Dockerfile',
'!**/Dockerfile.*',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
// {{SQL CARBON EDIT}}
'!**/*.gif',
'!build/actions/**/*.js',
'!**/*.{xlf,lcl,docx,sql,vsix,bacpac,ipynb,jpg}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/integration-tests/testData/**',
'!extensions/arc/src/controller/generated/**',
'!extensions/sql-database-projects/resources/templates/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.json',
'!extensions/sql-database-projects/src/test/baselines/*.sqlproj',
'!extensions/sql-database-projects/BuildDirectory/SystemDacpacs/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts',
'!resources/linux/snap/electron-launch',
'!extensions/markdown-language-features/media/*.js',
'!extensions/simple-browser/media/*.js',
'!resources/xlf/LocProject.json', // {{SQL CARBON EDIT}}
'!build/**/*' // {{SQL CARBON EDIT}}
];
const copyrightFilter = [
'**',
'!**/*.desktop',
'!**/*.json',
'!**/*.html',
'!**/*.template',
'!**/*.md',
'!**/*.bat',
'!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml',
'!**/*.sh',
'!**/*.txt',
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!**/*.js.map',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js',
'!resources/web/code-web.js',
'!resources/completions/**',
'!extensions/configuration-editing/build/inline-allOf.ts',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js',
'!resources/serverless/code-web.js',
'!src/vs/editor/test/node/classification/typescript-test.ts',
// {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/services/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/services/notebook/common/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/notebook/src/prompts/**',
'!extensions/mssql/src/prompts/**',
'!extensions/kusto/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!extensions/azurehybridtoolkit/notebooks/**',
'!extensions/query-history/images/**',
'!extensions/sql/build/update-grammar.js',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac',
'!**/*.bacpac',
'!**/*.py'
];
const jsHygieneFilter = [
'src/**/*.js',
'build/gulpfile.*.js',
'!src/vs/loader.js',
'!src/vs/css.js',
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js',
'!src/**/semver.js',
'!**/test/**',
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.jsHygieneFilter = jsHygieneFilter;
const tsHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/typescript-basics/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}}
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
'!src/vs/workbench/contrib/extensions/browser/extensionRecommendationsService.ts', // {{SQL CARBON EDIT}} skip this because known issue
'!build/**/*' // {{SQL CARBON EDIT}}
];
module.exports.tsHygieneFilter = tsHygieneFilter;
const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------',

Some files were not shown because too many files have changed in this diff Show More