Compare commits

..

19 Commits

Author SHA1 Message Date
Anthony Dresser
e57eb999fc Revert "Revert "fix ctrl+tab keybinding"" (#9121)
This reverts commit 99af31fa1f.
2020-02-11 16:01:53 -08:00
Anthony Dresser
99af31fa1f Revert "fix ctrl+tab keybinding"
This reverts commit e2990db33e.
2020-02-11 14:55:16 -08:00
Anthony Dresser
e2990db33e fix ctrl+tab keybinding 2020-02-11 14:53:59 -08:00
Alan Ren
b2238dc37b fix charting issue (#9094)
* fix charting issue

* fix charting issue 2

* revert version bump

(cherry picked from commit 8c61538a27)
2020-02-08 17:45:32 -08:00
Amir Omidi
6770bf877e ok button targetted fix (#9095) 2020-02-07 17:17:58 -08:00
Amir Omidi
ba66305bb5 Add device code authentication (#9097)
* Add device code authentication

* Change string
2020-02-07 17:15:08 -08:00
Charles Gagnon
8e731c75c3 Remove quality checks for events (#8934)
* Remove quality checks for events

* Update iKey

* Add missed quality check

(cherry picked from commit ec0a7a29d3)
2020-02-07 14:48:23 -08:00
Alex Ma
db0409569a made changes (#9092) 2020-02-07 14:31:53 -08:00
Amir Omidi
fb4ab6b501 Show error to user (#9014)
* Show error to user

* Update the errors

* change message

(cherry picked from commit df3f1768f7)
2020-02-07 14:15:02 -08:00
Amir Omidi
bd1d96a57a Amir/target/notarization (#9082)
* runtime hardening

* Notarization

* Fix the parameters

* Update sql-product-build-darwin.yml

* Format

* Change task name

* Remove the array

* Support new format

* Use new format for notarizing too

* Change timeouts to accommodate how long apple takes

* Change timeout in minutes to 2 hours

* Update sql-product-build-darwin.yml

* Increase timeouts

* Change the pipeline

* Fix formatting

* Fix formatting

* Fix formatting

* Fix formatting

* Change path

* Fix path for cleanup

* Don't do hardening

* Enable hardening again

* Self sign

* add display name

* Change key identity

* Deep codesign

* Fix path

* add extras

* add hardening back

* use ditto instead of zip per apple docs

* Changing signing syntax

* Don't deep sign

* Use --deep and add new entitlement

* Temporarily disable notarization

* entitlements fix

* Don't do runtime hardening for self sign

* Will this fix codesign I wonder

* codesigning

* sign everything first

* Change signing to delete bad files

* remove xattr

* Change the find command

* Delete mssql and big-data-cluster

* Stablize the signing

* Bring in notarization changes
2020-02-07 13:08:21 -08:00
Amir Omidi
6fe49e8cef Targeted fix for saving issue on MacOS (#9073)
* Create a targeted fix

* Add sql carbon edit
2020-02-06 16:22:21 -08:00
Alex Ma
6da98ed439 disable updateNameFromFirstLine from untitledTextEditorModel (#9064)
* disable updateNameFromFirstLine

* change to either

* WIP commit

* final fix

* formatting

* fix for tab space

* replace tab with space

* Fix formatting

Co-authored-by: Charles Gagnon <chgagnon@microsoft.com>
2020-02-06 14:09:45 -08:00
Chris LaFreniere
5ef69f5133 BDC: Add another command for bdc extension activation events (#9050)
* Add another command for bdc extension activation

* spaces

* Add other activation events

* spacing

(cherry picked from commit 8e67b104c3)
2020-02-05 16:05:49 -08:00
Alex Ma
ebf47df35e removed unnecessary formatting (#9045) 2020-02-04 14:01:42 -08:00
Amir Omidi
2a93d7c5e9 Move files locations (#9015) (#9044)
* Move files locations

* Extension path

(cherry picked from commit c6689700f6)
2020-02-04 13:42:06 -08:00
Chris LaFreniere
573c5b4470 Books: Ensure associatedResource is Present for Untitled Notebooks that have a File Associated with them (#9037)
* Conditionally add associatedResource t untitled nb

* Fix Jupyter Book notebook titles (#9039)

* Fix notebook titles

* Fix navigation links for books

* Added comments

Co-authored-by: Chris LaFreniere <40371649+chlafreniere@users.noreply.github.com>

* PR comment nit

Co-authored-by: Charles Gagnon <chgagnon@microsoft.com>
(cherry picked from commit 2208666eef)
2020-02-04 11:49:01 -08:00
Karl Burtram
41838b7720 Import "fromPromise" so Observables work correctly (#9034) (#9042)
* Import "fromPromise" so Observables work correctly

* Bump distro guid

* Fix unit test missing file error

* Bump distro to pick up latest changes
2020-02-04 11:40:26 -08:00
Charles Gagnon
ee1359fd7c Even more robust handling of extension tree loading (#9007)
* Even more robust handling of extension tree loading

* Fix initial loading and add new test for CMS

* Fix compile errors

* Fix logic

* Remove debug log statements

(cherry picked from commit 62df5359e2)
2020-02-04 08:05:08 -08:00
Alex Ma
189bcf703e Fix for table name not showing up on the header. (#9016)
* fix for edit data table header

* Fix for name, and SQL load

(cherry picked from commit 5b2c14f11a)
2020-01-31 14:32:27 -08:00
3852 changed files with 132879 additions and 218499 deletions

View File

@@ -1,17 +0,0 @@
{
"tool": "Credential Scanner",
"suppressions": [
{
"file": "src\\vs\\base\\test\\common\\uri.test.ts",
"_justification": "External code"
},
{
"file": "build\\actions\\AutoLabel\\dist\\index.js",
"_justification": "False positive from webpacked code"
},
{
"file": "build\\actions\\AutoMerge\\dist\\index.js",
"_justification": "False positive from webpacked code"
}
]
}

View File

@@ -3,6 +3,7 @@
**/vs/css.build.js
**/vs/css.js
**/vs/loader.js
**/promise-polyfill/**
**/insane/**
**/marked/**
**/test/**/*.js

File diff suppressed because it is too large Load Diff

7
.eslintrc.sql.json Normal file
View File

@@ -0,0 +1,7 @@
{
"extends": ".eslintrc.json",
"rules": {
"no-sync": "warn",
"strict": ["warn", "never"]
}
}

View File

@@ -1,20 +0,0 @@
{
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 6,
"sourceType": "module",
"project": "./tsconfig.sql.json"
},
"plugins": [
"@typescript-eslint",
"jsdoc"
],
"rules": {
"@typescript-eslint/no-floating-promises": [
"error",
{
"ignoreVoid": true
}
]
}
}

View File

@@ -6,7 +6,6 @@ labels: ''
assignees: ''
---
<!-- ⚠️⚠️ Do Not Delete This! bug_report_template ⚠️⚠️ -->
<!-- Please read our Rules of Conduct: https://opensource.microsoft.com/codeofconduct/ -->
<!-- Please search existing issues to avoid creating duplicates. -->
<!-- Also please test using the latest insiders build to make sure your issue has not already been fixed. -->

View File

@@ -8,13 +8,11 @@
Area - Acquisition: [],
Area - Azure: [],
Area - Backup\Restore: [],
Area - Big Data Cluster: [ charles-gagnon ],
Area - Charting\Insights: [],
Area - Connection: [ ],
Area - Connection: [ charles-gagnon ],
Area - DacFX: [],
Area - Dashboard: [],
Area - Data Explorer: [],
Area - Data Virtualization: [ charles-gagnon ],
Area - Edit Data: [],
Area - Extensibility: [],
Area - External Table: [],
@@ -24,7 +22,6 @@
Area - Notebooks: [ chlafreniere ],
Area - Performance: [],
Area - Query Editor: [ anthonydresser ],
Area - Query History: [ charles-gagnon ],
Area - Query Plan: [],
Area - Reliability: [],
Area - Resource Deployment: [],

View File

@@ -1,11 +1,12 @@
{
perform: true,
perform: false,
commands: [
{
type: 'label',
name: 'Needs Logs',
action: 'comment',
comment: "We need more info to debug your particular issue. If you could attach your logs to the issue (ensure no private data is in them), it would help us fix the issue much faster.\n\nTo find your logs:\n\n- Open command palette (Click **View** -> **Command Palette**)\n- Run the command: **`Developer: Open Logs Folder`**\n\nThis will open the log file locally. Please include renderer.log"
name: 'duplicate',
allowTriggerByBot: true,
action: 'close',
comment: "Thanks for creating this issue! We figured it's covering the same as another one we already have. Thus, we closed this one as a duplicate. You can search for existing issues [here](https://aka.ms/vscodeissuesearch). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
}
]
}

2
.github/copycat.yml vendored
View File

@@ -1,5 +1,5 @@
{
perform: false,
perform: true,
target_owner: 'anthonydresser',
target_repo: 'testissues'
}

34
.github/feature-requests.yml vendored Normal file
View File

@@ -0,0 +1,34 @@
{
typeLabel: {
name: 'feature-request'
},
candidateMilestone: {
number: 107,
name: 'Backlog Candidates'
},
approvedMilestone: {
number: 8,
name: 'Backlog'
},
onLabeled: {
delay: 60,
perform: true
},
onCandidateMilestoned: {
candidatesComment: "This feature request is now a candidate for our backlog. The community has 60 days to upvote the issue. If it receives 20 upvotes we will move it to our backlog. If not, we will close it. To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding!",
perform: true
},
onMonitorUpvotes: {
upvoteThreshold: 20,
acceptanceComment: ":slightly_smiling_face: This feature request received a sufficient number of community upvotes and we moved it to our backlog. To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding!",
perform: true
},
onMonitorDaysOnCandidateMilestone: {
daysOnMilestone: 60,
warningPeriod: 10,
numberOfCommentsToPreventAutomaticRejection: 20,
rejectionComment: ":slightly_frowning_face: In the last 60 days, this feature request has received less than 20 community upvotes and we closed it. Still a big Thank You to you for taking the time to create this issue! To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding!",
warningComment: "This feature request has not yet received the 20 community upvotes it takes to make to our backlog. 10 days to go. To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding",
perform: true
}
}

View File

@@ -1,7 +0,0 @@
[
"kenvanhyning",
"kburtram",
"udeeshagautam",
"qifahs",
"chlafreniere"
]

6
.github/new_release.yml vendored Normal file
View File

@@ -0,0 +1,6 @@
{
newReleaseLabel: 'new-release',
newReleaseColor: '006b75',
daysAfterRelease: 5,
perform: true
}

View File

@@ -11,60 +11,46 @@ on:
- release/*
jobs:
linux:
runs-on: ubuntu-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
- run: |
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
name: Setup Build Environment
- uses: actions/setup-node@v1
with:
node-version: 10
# TODO: cache node modules
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene
name: Run Hygiene Checks
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
name: Run Strict Compile Options
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn valid-layers-check
name: Run Valid Layers Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests" --coverage --runGlob "**/sql/**/*.test.js"
name: Run Unit Tests (Electron)
- run: DISPLAY=:10 ./scripts/test-extensions-unit.sh
name: Run Extension Unit Tests (Electron)
# {{SQL CARBON EDIT}} Add coveralls. We merge first to get around issue where parallel builds weren't being combined correctly
- run: node test/combineCoverage
name: Combine code coverage files
- name: Upload Code Coverage
uses: coverallsapp/github-action@v1.1.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
path-to-lcov: 'test/coverage/lcov.info'
# Fails with cryptic error (e.g. https://github.com/microsoft/vscode/pull/90292/checks?check_run_id=433681926#step:13:9)
# - run: DISPLAY=:10 yarn test-browser --browser chromium
# name: Run Unit Tests (Browser)
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests (Electron)
# linux:
# runs-on: ubuntu-latest
# env:
# CHILD_CONCURRENCY: "1"
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# steps:
# - uses: actions/checkout@v1
# # TODO: rename azure-pipelines/linux/xvfb.init to github-actions
# - run: |
# sudo apt-get update
# sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
# sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
# sudo chmod +x /etc/init.d/xvfb
# sudo update-rc.d xvfb defaults
# sudo service xvfb start
# name: Setup Build Environment
# - uses: actions/setup-node@v1
# with:
# node-version: 10
# # TODO: cache node modules
# - run: yarn --frozen-lockfile
# name: Install Dependencies
# - run: yarn electron x64
# name: Download Electron
# - run: yarn gulp hygiene
# name: Run Hygiene Checks
# - run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
# name: Run Strict Compile Options
# # - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# # name: Run Monaco Editor Checks
# - run: yarn valid-layers-check
# name: Run Valid Layers Checks
# - run: yarn compile
# name: Compile Sources
# # - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# # name: Download Built-in Extensions
# - run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
# name: Run Unit Tests
# # - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# # name: Run Integration Tests
windows:
runs-on: windows-2016
@@ -96,11 +82,9 @@ jobs:
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: .\scripts\test.bat --tfs "Unit Tests"
name: Run Unit Tests (Electron)
# - run: yarn test-browser --browser chromium {{SQL CARBON EDIT}} disable for now @TODO @anthonydresser
# name: Run Unit Tests (Browser)
name: Run Unit Tests
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests (Electron)
# name: Run Integration Tests
darwin:
runs-on: macos-latest
@@ -129,34 +113,6 @@ jobs:
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests (Electron)
# - run: yarn test-browser --browser chromium --browser webkit
# name: Run Unit Tests (Browser)
# - run: ./scripts/test-integration.sh --tfs "Integration Tests"
# name: Run Integration Tests (Electron)
# monaco:
# runs-on: ubuntu-latest
# env:
# CHILD_CONCURRENCY: "1"
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# steps:
# - uses: actions/checkout@v1
# # TODO: rename azure-pipelines/linux/xvfb.init to github-actions
# - run: |
# sudo apt-get update
# sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libgbm1
# sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
# sudo chmod +x /etc/init.d/xvfb
# sudo update-rc.d xvfb defaults
# sudo service xvfb start
# name: Setup Build Environment
# - uses: actions/setup-node@v1
# with:
# node-version: 10
# - run: yarn --frozen-lockfile
# name: Install Dependencies
# - run: yarn monaco-compile-check
# name: Run Monaco Editor Checks
# - run: yarn gulp editor-esm-bundle
# name: Editor Distro & ESM Bundle
name: Run Unit Tests
# - run: ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests

View File

@@ -1,24 +0,0 @@
name: On Issue Open
on:
issues:
types: [opened]
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: 'microsoft/azuredatastudio'
ref: master
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions/build/actions
- name: Run CopyCat
uses: ./actions/build/actions/copycat
with:
token: ${{secrets.TRIAGE_PAT}}
owner: anthonydresser
repo: testissues

View File

@@ -1,23 +0,0 @@
name: On PR Open
on:
pull_request:
branches:
- release/**
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: 'microsoft/azuredatastudio'
ref: master
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions/build/actions
- name: Run Port Labeler
uses: ./actions/build/actions/auto-labeler
with:
label: "Port Request"

1
.gitignore vendored
View File

@@ -11,7 +11,6 @@ out-editor/
out-editor-src/
out-editor-build/
out-editor-esm/
out-editor-esm-bundle/
out-editor-min/
out-monaco-editor-core/
out-vscode/

6
.prettierrc.json Normal file
View File

@@ -0,0 +1,6 @@
{
"useTabs": true,
"printWidth": 120,
"semi": true,
"singleQuote": true
}

73
.vscode/launch.json vendored
View File

@@ -14,26 +14,20 @@
{
"type": "node",
"request": "attach",
"restart": true,
"name": "Attach to Extension Host",
"timeout": 30000,
"port": 5870,
"timeout": 30000,
"restart": true,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"presentation": {
"hidden": true
}
]
},
{
"type": "pwa-chrome",
"type": "chrome",
"request": "attach",
"name": "Attach to Shared Process",
"port": 9222,
"urlFilter": "*sharedProcess.html*",
"presentation": {
"hidden": true
}
"urlFilter": "*"
},
{
"type": "node",
@@ -69,14 +63,13 @@
}
},
{
"type": "pwa-chrome",
"type": "chrome",
"request": "attach",
"name": "Attach to azuredatastudio",
"timeout": 50000,
"port": 9222
},
{
"type": "pwa-chrome",
"type": "chrome",
"request": "launch",
"name": "Launch azuredatastudio",
"windows": {
@@ -101,12 +94,27 @@
],
"webRoot": "${workspaceFolder}",
// Settings for js-debug:
"userDataDir": false,
"pauseForSourceMap": false,
"outFiles": ["${workspaceFolder}/out/**/*.js"],
},
{
"type": "node",
"request": "launch",
"name": "Launch ADS (Main Process)",
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
},
"runtimeArgs": [
"--no-cached-data"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"browserLaunchLocation": "workspace"
"presentation": {
"group": "2_launch",
"order": 1
}
},
{
"type": "chrome",
@@ -141,29 +149,10 @@
"web"
],
"presentation": {
"group": "0_vscode",
"group": "2_launch",
"order": 2
}
},
{
"type": "node",
"request": "launch",
"name": "Main Process",
"runtimeExecutable": "${workspaceFolder}/scripts/code.sh",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/code.bat",
},
"runtimeArgs": [
"--no-cached-data"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"presentation": {
"group": "1_vscode",
"order": 1
}
},
{
"type": "chrome",
"request": "launch",
@@ -171,7 +160,7 @@
"url": "http://localhost:8080",
"preLaunchTask": "Run web",
"presentation": {
"group": "0_vscode",
"group": "2_launch",
"order": 3
}
},
@@ -208,7 +197,7 @@
"type": "node",
"request": "launch",
"name": "Run Unit Tests",
"program": "${workspaceFolder}/test/unit/electron/index.js",
"program": "${workspaceFolder}/test/electron/index.js",
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
"windows": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
@@ -280,15 +269,13 @@
]
},
{
"name": "Azure Data Studio",
"name": "Debug azuredatastudio Main and Renderer",
"configurations": [
"Launch azuredatastudio",
"Attach to Main Process",
"Attach to Extension Host",
"Attach to Shared Process",
"Attach to Main Process"
],
"presentation": {
"group": "0_vscode",
"group": "1_vscode",
"order": 1
}
},

View File

@@ -1,86 +0,0 @@
# Query: @deprecated ES6
# Flags: CaseSensitive WordMatch
# ContextLines: 2
16 results - 5 files
src/vs/base/browser/dom.ts:
81 };
82
83: /** @deprecated ES6 - use classList*/
84 export const hasClass: (node: HTMLElement | SVGElement, className: string) => boolean = _classList.hasClass.bind(_classList);
85: /** @deprecated ES6 - use classList*/
86 export const addClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.addClass.bind(_classList);
87: /** @deprecated ES6 - use classList*/
88 export const addClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.addClasses.bind(_classList);
89: /** @deprecated ES6 - use classList*/
90 export const removeClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.removeClass.bind(_classList);
91: /** @deprecated ES6 - use classList*/
92 export const removeClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.removeClasses.bind(_classList);
93: /** @deprecated ES6 - use classList*/
94 export const toggleClass: (node: HTMLElement | SVGElement, className: string, shouldHaveIt?: boolean) => void = _classList.toggleClass.bind(_classList);
95
src/vs/base/common/arrays.ts:
401
402 /**
403: * @deprecated ES6: use `Array.findIndex`
404 */
405 export function firstIndex<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean): number {
417
418 /**
419: * @deprecated ES6: use `Array.find`
420 */
421 export function first<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean, notFoundValue: T): T;
560
561 /**
562: * @deprecated ES6: use `Array.find`
563 */
564 export function find<T>(arr: ArrayLike<T>, predicate: (value: T, index: number, arr: ArrayLike<T>) => any): T | undefined {
src/vs/base/common/map.ts:
11
12 /**
13: * @deprecated ES6: use `[...SetOrMap.values()]`
14 */
15 export function values<V = any>(set: Set<V>): V[];
22
23 /**
24: * @deprecated ES6: use `[...map.keys()]`
25 */
26 export function keys<K, V>(map: Map<K, V>): K[] {
src/vs/base/common/objects.ts:
115
116 /**
117: * @deprecated ES6
118 */
119 export function assign<T>(destination: T): T;
src/vs/base/common/strings.ts:
15
16 /**
17: * @deprecated ES6: use `String.padStart`
18 */
19 export function pad(n: number, l: number, char: string = '0'): string {
146
147 /**
148: * @deprecated ES6: use `String.startsWith`
149 */
150 export function startsWith(haystack: string, needle: string): boolean {
167
168 /**
169: * @deprecated ES6: use `String.endsWith`
170 */
171 export function endsWith(haystack: string, needle: string): boolean {
853
854 /**
855: * @deprecated ES6
856 */
857 export function repeat(s: string, count: number): string {

View File

@@ -1,167 +0,0 @@
# Query: strict-null
76 results - 44 files
src\vs\base\browser\ui\tree\compressedObjectTreeModel.ts:
455: return null; // {{SQL CARBON EDIT}} strict-null-check
465: return null; // {{SQL CARBON EDIT}} strict-null-check
src\vs\platform\actions\common\menuService.ts:
97: const toggledExpression: ContextKeyExpression = (item.command.toggled as { condition: ContextKeyExpression }).condition || item.command.toggled as ContextKeyExpression; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\platform\clipboard\browser\clipboardService.ts:
57: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\platform\dialogs\electron-main\dialogs.ts:
123: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\platform\driver\electron-main\driver.ts:
214: const driver = instantiationService.createInstance(Driver as any, windowServer, { verbose }) as Driver; // {{SQL CARBON EDIT}} strict-null-check...i guess?
src\vs\platform\extensionManagement\node\extensionManagementService.ts:
558: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\platform\quickinput\browser\pickerQuickAccess.ts:
216: active: activePick as T || additionalActivePick as T // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\api\browser\mainThreadLanguageFeatures.ts:
90: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
600: return undefined; // {{SQL CARBON EDIT}} strict-null-check
610: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\api\common\extHost.api.impl.ts:
538: alignment = alignmentOrOptions as number; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\api\common\extHostComments.ts:
410: return undefined; // {{SQL CARBON EDIT}} @anthonydresser strict-null-check
src\vs\workbench\api\common\extHostTask.ts:
583: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\api\common\extHostTerminalService.ts:
279: this._onProcessExit.fire(e === void 0 ? undefined : e as number); // {{SQL CARBON EDIT}} strict-null-checks
283: this._pty.onDidOverrideDimensions(e => this._onProcessOverrideDimensions.fire(e ? { cols: e.columns, rows: e.rows } : undefined)); // {{SQL CARBONEDIT}} strict-null-checks
src\vs\workbench\browser\actions\workspaceCommands.ts:
87: return undefined; // {{SQL CARBON EDIT}} @anthonydresser strict-null-check
120: return undefined; // {{SQL CARBON EDIT}} @anthonydresser strict-null-check
src\vs\workbench\browser\parts\editor\editorGroupView.ts:
827: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\browser\parts\panel\panelPart.ts:
151: (id: string, focus?: boolean) => <unknown>this.openPanel(id, focus) as Promise<IPaneComposite | undefined>, // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\browser\parts\sidebar\sidebarPart.ts:
59: return undefined; // {{SQL CARBON EDIT}} strict-null-check
64: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\common\editor\editorGroup.ts:
388: return undefined; // {{SQL CARBON EDIT}} strict-null-check
406: return undefined; // not found {{SQL CARBON EDIT}} strict-null-check
433: return undefined; // not found {{SQL CARBON EDIT}} strict-null-check
456: return undefined; // not found {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\contrib\callHierarchy\browser\callHierarchyPeek.ts:
377: const root = <ITreeNode<callHTree.Call, FuzzyScore>>this._tree.getNode(model).children[0]; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\contrib\customEditor\browser\customEditorInput.ts:
230: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\contrib\customEditor\browser\customEditors.ts:
169: return undefined; // {{SQL CARBON EDIT}} strict-nulls
468: return undefined; // {{SQL CARBON EDIT}} Strict-null-checks
493: return undefined; // {{SQL CARBON EDIT}} Strict-null-checks
505: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\contrib\extensions\browser\extensionsActions.ts:
2203: return (<IExtensionsConfigContent>json.parse(content.value.toString()) || {}) as IExtensionsConfigContent; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\contrib\extensions\test\electron-browser\extensionRecommendationsService.test.ts:
508: instantiationService.stub(IStorageService, <any>{ // {{SQL CARBON EDIT}} strict-null-checks?
src\vs\workbench\contrib\files\common\explorerService.ts:
393: const configSortOrder = configuration?.explorer?.sortOrder || SortOrder.Default; // {{SQL CARBON EDIT}} strict-null-checks?
src\vs\workbench\contrib\notebook\browser\notebookEditor.ts:
475: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\contrib\notebook\browser\notebookService.ts:
204: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\contrib\notebook\browser\contrib\notebookActions.ts:
412: return undefined; // {{SQL CARBON EDIT}} strict-null-check
417: return undefined; // {{SQL CARBON EDIT}} strict-null-check
479: return undefined; // {{SQL CARBON EDIT}} strict-null-check
484: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\contrib\notebook\test\testNotebookEditor.ts:
186: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\contrib\remote\browser\remote.ts:
544: return undefined; // {{SQL CARBON EDIT}} strict-null-check
563: return undefined; // {{SQL CARBON EDIT}} strict-null-check;
src\vs\workbench\contrib\remote\browser\tunnelView.ts:
589: const node: ITunnelItem | null = treeEvent.element as ITunnelItem | null; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\contrib\search\browser\anythingQuickAccess.ts:
631: return undefined; // {{SQL CARBON EDIT}} strict-null
636: return undefined; // {{SQL CARBON EDIT}} strict-null
641: return undefined; // {{SQL CARBON EDIT}} strict-null
651: return undefined; // {{SQL CARBON EDIT}} strict-null
663: return undefined; // {{SQL CARBON EDIT}} strict-null
668: return undefined; // {{SQL CARBON EDIT}} strict-null
698: return undefined; // {{SQL CARBON EDIT}} strict-null
src\vs\workbench\contrib\searchEditor\browser\searchEditor.ts:
335: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\contrib\searchEditor\browser\searchEditorInput.ts:
121: if ((await this.headerModel).isDisposed() || (await this.contentsModel).isDisposed()) { return undefined; } // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\contrib\tasks\browser\abstractTaskService.ts:
565: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
586: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\contrib\tasks\browser\taskQuickPick.ts:
204: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
207: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\contrib\webview\browser\webviewWorkbenchService.ts:
148: return undefined; // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\electron-browser\desktop.main.ts:
283: return undefined; // {{SQL CARBON EDIT}} @anthonydresser strict-null-check
src\vs\workbench\services\dialogs\browser\simpleFileDialog.ts:
496: return undefined; // {{SQL CARBON EDIT}} @anthonydresser strict-null-check
502: return undefined; // {{SQL CARBON EDIT}} @anthonydresser strict-null-check
src\vs\workbench\services\dialogs\electron-browser\fileDialogService.ts:
127: return undefined; // {{SQL CARBON EDIT}} strict-null-check
151: return undefined; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\services\extensions\common\abstractExtensionService.ts:
235: result.push(new ExtensionPointContribution<T>(desc, desc.contributes[extPoint.name])); // {{SQL CARBON EDIT}} strict-null-checks
376: value: desc.contributes[extensionPoint.name], // {{SQL CARBON EDIT}} strict-null-checks
src\vs\workbench\services\textfile\browser\textFileService.ts:
221: return undefined; // user canceled // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\services\textfile\common\textFileEditorModel.ts:
611: if ((this.saveSequentializer as TaskSequentializer).hasPending()) { // {{SQL CARBON EDIT}} strict-null-check
619: (this.saveSequentializer as TaskSequentializer).cancelPending(); // {{SQL CARBON EDIT}} strict-null-check
622: return (this.saveSequentializer as TaskSequentializer).setNext(() => this.doSave(options)); // {{SQL CARBON EDIT}} strict-null-check
633: return (this.saveSequentializer as TaskSequentializer).setPending(versionId, (async () => { // {{SQL CARBON EDIT}} strict-null-checks
667: return undefined; // {{SQL CARBON EDIT}} @anthonydresser strict-null-check
672: return undefined; // {{SQL CARBON EDIT}} @anthonydresser strict-null-check
src\vs\workbench\services\textfile\common\textfiles.ts:
421: isDirty(): boolean; // {{SQL CARBON EDIT}} strict-null-check
src\vs\workbench\services\themes\browser\workbenchThemeService.ts:
248: Theme(), initializeFileIconTheme(), initializeProductIconTheme()]) as Promise<[IWorkbenchColorTheme | null, IWorkbenchFileIconTheme | null, IWorkbenchProductIconTheme | null]>; // {{SQL CARBON EDIT}} strict-null-checks maybe?
src\vs\workbench\services\workspaces\browser\abstractWorkspaceEditingService.ts:
56: return undefined; // canceled {{SQL CARBON EDIT}} strict-null-checks

View File

@@ -1,19 +1,10 @@
# Query: \\w+\\?\\.\\w+![(.[]
# Query: \\w+\\?\\..+![(.[]
# Flags: RegExp
# ContextLines: 2
2 results - 2 files
src/vs/base/browser/ui/tree/asyncDataTree.ts:
243 } : () => 'treeitem',
244 isChecked: options.accessibilityProvider!.isChecked ? (e) => {
245: return !!(options.accessibilityProvider?.isChecked!(e.element as T));
246 } : undefined,
247 getAriaLabel(e) {
src/vs/workbench/contrib/debug/browser/debugConfigurationManager.ts:
254
255 return debugDynamicExtensions.map(e => {
256: const type = e.contributes?.debuggers![0].type!;
257 return {
258 label: this.getDebuggerLabel(type)!,
270 } : undefined,
271 isChecked: options.ariaProvider!.isChecked ? (e) => {
272: return options.ariaProvider?.isChecked!(e.element as T);
273 } : undefined
274 },

File diff suppressed because it is too large Load Diff

14
.vscode/settings.json vendored
View File

@@ -7,8 +7,7 @@
"**/.DS_Store": true,
"build/**/*.js": {
"when": "$(basename).ts"
},
"src/vs/server": false
}
},
"files.associations": {
"cglicenses.json": "jsonc"
@@ -23,10 +22,7 @@
"i18n/**": true,
"extensions/**/out/**": true,
"test/smoke/out/**": true,
"test/automation/out/**": true,
"test/integration/browser/out/**": true,
"src/vs/base/test/node/uri.test.data.txt": true,
"src/vs/server": false
"src/vs/base/test/node/uri.test.data.txt": true
},
"lcov.path": [
"./.build/coverage/lcov.info",
@@ -71,9 +67,5 @@
"msjsdiag.debugger-for-chrome": "workspace"
},
"gulp.autoDetect": "off",
"files.insertFinalNewline": true,
"[typescript]": {
"editor.defaultFormatter": "vscode.typescript-language-features"
},
"typescript.tsc.autoDetect": "off"
"files.insertFinalNewline": true
}

26
.vscode/tasks.json vendored
View File

@@ -3,7 +3,7 @@
"tasks": [
{
"type": "npm",
"script": "watchd",
"script": "watch",
"label": "Build VS Code",
"group": {
"kind": "build",
@@ -31,6 +31,20 @@
}
}
},
{
"type": "npm",
"script": "strict-function-types-watch",
"label": "TS - Strict Function Types",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-function-types",
"applyTo": "allDocuments"
}
},
{
"type": "npm",
"script": "strict-vscode-watch",
@@ -45,16 +59,6 @@
"applyTo": "allDocuments"
}
},
{
"type": "npm",
"script": "kill-watchd",
"label": "Kill Build VS Code",
"group": "build",
"presentation": {
"reveal": "never"
},
"problemMatcher": "$tsc"
},
{
"label": "Run tests",
"type": "shell",

View File

@@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron"
target "7.2.4"
target "6.1.6"
runtime "electron"

View File

@@ -1,70 +1,5 @@
# Change Log
## Version 1.18.1
* Release date: May 27, 2020
* Release status: General Availability
* Hotfix for https://github.com/microsoft/azuredatastudio/issues/10538
* Hotfix for https://github.com/microsoft/azuredatastudio/issues/10537
## Version 1.18.0
* Release date: May 20, 2020
* Release status: General Availability
* Announcing Redgate SQL Prompt extension - This extension lets you manage formatting styles directly within Azure Data Studio, so you can create and edit your styles without leaving the IDE.
* Announcing the new machine learning extension. This extension enables you to:
* Manage Python and R packages with SQL Server machine learning services with Azure Data Studio.
* Use ONNX model to make predictions in Azure SQL Edge.
* View ONNX models in an Azure SQL Edge database.
* Import ONNX models from a file or Azure Machine Learning into Azure SQL Edge database.
* Create a notebook to run experiments.
* New notebook features:
* Added new Python dependencies wizard
* Improvements to the notebook markdown toolbar
* Added support for parameterization for Always Encrypted - Allows you to run queries that insert, update or filter by encrypted database columns.
* Bug fixes
## Version 1.17.1
* Release date: April 29, 2020
* Release status: General Availability
* Hotfix for https://github.com/microsoft/azuredatastudio/milestone/54?closed=1
## Version 1.17.0
* Release date: April 27, 2020
* Release status: General Availability
* New Welcome Page
* New Notebook features
* New Markdown editor toolbar
* Books viewlet now works with notebooks
* Improved dashboard
* Always encrypted support
* Accessibility bugs
* VS Code merge
## Version 1.16.0
* Release date: March 18, 2020
* Release status: General Availability
* Notebooks:
* Charting
* Creating Jupyter books
* Postgres extension update - Users can now authenticate to Azure Postgres servers with their linked Azure account
* Accessibility bugs
* VS Code merge
* In the next release, GitHub releases will no longer contain the binary files to the latest version. Rather, they will contain links to the latest release. This will have no impact on users using the in-app update functionality.
## Version 1.15.1
* Release date: February 19, 2020
* Release status: General Availability
* Resolved [#9145 Edit Data render the result grid incorrectly when using custom query](https://github.com/microsoft/azuredatastudio/issues/9145).
* Resolved [#9149 Show Active Connections](https://github.com/microsoft/azuredatastudio/issues/9149).
## Version 1.15.0
* Release date: February 13, 2020
* Release status: General Availability
* New Azure Sign-in improvement - Added improved Azure Sign-in experience, including removal of copy/paste of device code to make a more seamless connected experience.
* Find in Notebook support - Users can now use Ctrl+F inside of a notebook. Find in Notebook support searches line by line through both code and text cells.
* VS Code merge from 1.38 to 1.42 - This release includes updates to VS Code from the 3 previous VS Code releases. Read their [release notes](https://code.visualstudio.com/updates/v1_42) to learn more.
* Fix for the ["white/blank screen"](https://github.com/microsoft/azuredatastudio/issues/8775) issue reported by many users.
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+label%3ABug+milestone%3A%22February+2020%22+is%3Aclosed).
## Version 1.14.1
* Release date: December 26, 2019
* Release status: General Availability

View File

@@ -8,16 +8,15 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
## **Download the latest Azure Data Studio release**
| Platform |
| --------------------------------------- |
| [Windows User Installer][win-user] |
| [Windows System Installer][win-system] |
| [Windows ZIP][win-zip] |
| [macOS ZIP][osx-zip] |
| [Linux TAR.GZ][linux-zip] |
| [Linux RPM][linux-rpm] |
| [Linux DEB][linux-deb] |
Platform | Link
-- | --
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2113530
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2113628
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2113529
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2113528
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2113627
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2113718
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2113344
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
@@ -60,7 +59,9 @@ please see the document [How to Contribute](https://github.com/Microsoft/azureda
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
## Localization
Azure Data Studio is localized into 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). The language packs are available in the Extension Manager marketplace. Simply, search for the specific language using the extension marketplace and install. Once you install the selected language, Azure Data Studio will prompt you to restart with the new language.
Azure Data Studio localization is now open for community contributions. You can contribute to localization for both software and docs. https://aka.ms/SQLOpsStudioLoc
Localization is now opened for 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). Help us make Azure Data Studio available in your language!
## Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
@@ -120,6 +121,17 @@ We would like to thank all our users who raised issues, and in particular the fo
* SebastianPfliegel `Remove sqlExtensionHelp (#312)`
* olljanat for `Implemented npm version check (#314)`
* Adam Machanic for helping with the `whoisactive` extension
* All community localization contributors:
* French: Adrien Clerbois, ANAS BELABBES, Antoine Griffard, Arian Papillon, Eric Macarez, Eric Van Thorre, Jérémy LANDON, Matthias GROSPERRIN, Maxime COQUEREL, Olivier Guinart, thierry DEMAN-BARCELÒ, Thomas Potier
* Italian: Aldo Donetti, Alessandro Alpi, Andrea Dottor, Bruni Luca, Gianluca Hotz, Luca Nardi, Luigi Bruno, Marco Dal Pino, Mirco Vanini, Pasquale Ceglie, Riccardo Cappello, Sergio Govoni, Stefano Demiliani
* German: Anna Henke-Gunvaldson, Ben Weissman, David Ullmer, J.M. ., Kai Modo, Konstantin Staschill, Kostja Klein, Lennart Trunk, Markus Ehrenmüller-Jensen, Mascha Kroenlein, Matthias Knoll, Mourad Louha, Thomas Hütter, Wolfgang Straßer
* Spanish: Alberto Poblacion, Andy Gonzalez, Carlos Mendible, Christian Araujo, Daniel D, Eickhel Mendoza, Ernesto Cardenas, Ivan Toledo Ivanovic, Fran Diaz, JESUS GIL, Jorge Serrano Pérez, José Saturnino Pimentel Juárez, Mauricio Hidalgo, Pablo Iglesias, Rikhardo Estrada Rdez, Thierry DEMAN, YOLANDA CUESTA ALTIERI
* Japanese: Fujio Kojima, Kazushi KAMEGAWA, Masayoshi Yamada, Masayuki Ozawa, Seiji Momoto, Takashi Kanai, Takayoshi Tanaka, Yoshihisa Ozaki, 庄垣内治
* Chinese (simplified): DAN YE, Joel Yang, Lynne Dong, RyanYu Zhang, Sheng Jiang, Wei Zhang, Zhiliang Xu
* Chinese (Traditional): Bruce Chen, Chiayi Yen, Kevin Yang, Winnie Lin, 保哥 Will, 謝政廷
* Korean: Do-Kyun Kim, Evelyn Kim, Helen Jung, Hong Jmee, jeongwoo choi, Jun Hyoung Lee, Jungsun Kim정선, Justin Yoo, Kavrith mucha, Kiwoong Youm, MinGyu Ju, MVP_JUNO BEA, Sejun Kim, SOONMAN KWON, sung man ko, Yeongrak Choi, younggun kim, Youngjae Kim, 소영 이
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
And of course, we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt)
@@ -128,11 +140,3 @@ And of course, we'd like to thank the authors of all upstream dependencies. Ple
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the [Source EULA](LICENSE.txt).
[win-user]: https://go.microsoft.com/fwlink/?linkid=2127522
[win-system]: https://go.microsoft.com/fwlink/?linkid=2127432
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2127716
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2127431
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2127523
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2127433
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2127524

View File

@@ -12,7 +12,6 @@ expressly granted herein, whether by implication, estoppel or otherwise.
angular2-grid: https://github.com/BTMorton/angular2-grid
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
axios: https://github.com/axios/axios
bootstrap: https://github.com/twbs/bootstrap
chart.js: https://github.com/Timer/chartjs
chokidar: https://github.com/paulmillr/chokidar
@@ -39,7 +38,6 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab
keytar: https://github.com/atom/node-keytar
make-error: https://github.com/JsCommunity/make-error
minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment
@@ -49,11 +47,9 @@ expressly granted herein, whether by implication, estoppel or otherwise.
node-fetch: https://github.com/bitinn/node-fetch
node-pty: https://github.com/Tyriar/node-pty
nsfw: https://github.com/Axosoft/nsfw
optimist: https://github.com/substack/node-optimist
primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js
qs: https://github.com/ljharb/qs
reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS
@@ -73,7 +69,6 @@ expressly granted herein, whether by implication, estoppel or otherwise.
vscode-textmate: https://github.com/Microsoft/vscode-textmate
winreg: https://github.com/fresc81/node-winreg
xterm: https://github.com/sourcelair/xterm.js
yargs: https://github.com/yargs/yargs
yauzl: https://github.com/thejoshwolfe/yauzl
zone.js: https://www.npmjs.com/package/zone
@@ -1453,32 +1448,6 @@ SOFTWARE.
=========================================
END OF nsfw NOTICES AND INFORMATION
%% optimist NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright 2010 James Halliday (mail@substack.net)
This project is free software released under the MIT/X11 license:
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
=========================================
END OF optimist NOTICES AND INFORMATION
%% primeng NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
@@ -2250,32 +2219,6 @@ THE SOFTWARE.
=========================================
END OF xterm NOTICES AND INFORMATION
%% yargs NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright 2010 James Halliday (mail@substack.net); Modified work Copyright 2014 Contributors (ben@npmjs.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
=========================================
END OF yargs NOTICES AND INFORMATION
%% yauzl NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)

View File

@@ -1 +1 @@
2020-04-29T05:20:58.491Z
2019-12-01T02:20:58.491Z

View File

@@ -19,6 +19,13 @@ vscode-sqlite3/build/**
vscode-sqlite3/src/**
!vscode-sqlite3/build/Release/*.node
oniguruma/binding.gyp
oniguruma/build/**
oniguruma/src/**
oniguruma/deps/**
!oniguruma/build/Release/*.node
!oniguruma/src/*.js
windows-mutex/binding.gyp
windows-mutex/build/**
windows-mutex/src/**
@@ -76,6 +83,11 @@ node-pty/deps/**
!node-pty/build/Release/*.dll
!node-pty/build/Release/*.node
emmet/node_modules/**
pty.js/build/**
!pty.js/build/Release/**
# START SQL Modules
@angular/**/src/**
@@ -96,17 +108,15 @@ jquery-ui/demos/**
slickgrid/node_modules/**
slickgrid/examples/**
kerberos/build/**
# END SQL Modules
vscode-nsfw/binding.gyp
vscode-nsfw/build/**
vscode-nsfw/src/**
vscode-nsfw/openpa/**
vscode-nsfw/includes/**
!vscode-nsfw/build/Release/*.node
!vscode-nsfw/**/*.a
nsfw/binding.gyp
nsfw/build/**
nsfw/src/**
nsfw/openpa/**
nsfw/includes/**
!nsfw/build/Release/*.node
!nsfw/**/*.a
vsda/build/**
vsda/ci/**

View File

@@ -1,17 +0,0 @@
{
"env": {
"commonjs": true,
"es6": true,
"node": true
},
"extends": "eslint:recommended",
"globals": {
"Atomics": "readonly",
"SharedArrayBuffer": "readonly"
},
"parserOptions": {
"ecmaVersion": 2018
},
"rules": {
}
}

View File

@@ -1,2 +0,0 @@
node_modules
*.js.map

View File

@@ -1,96 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
export interface GitHub {
query(query: Query): AsyncIterableIterator<GitHubIssue[]>
hasWriteAccess(user: User): Promise<boolean>
repoHasLabel(label: string): Promise<boolean>
createLabel(label: string, color: string, description: string): Promise<void>
deleteLabel(label: string): Promise<void>
readConfig(path: string): Promise<any>
createIssue(owner: string, repo: string, title: string, body: string): Promise<void>
releaseContainsCommit(release: string, commit: string): Promise<boolean>
}
export interface GitHubIssue extends GitHub {
getIssue(): Promise<Issue>
postComment(body: string): Promise<void>
deleteComment(id: number): Promise<void>
getComments(last?: boolean): AsyncIterableIterator<Comment[]>
closeIssue(): Promise<void>
lockIssue(): Promise<void>
setMilestone(milestoneId: number): Promise<void>
addLabel(label: string): Promise<void>
removeLabel(label: string): Promise<void>
addAssignee(assignee: string): Promise<void>
getClosingInfo(): Promise<{ hash: string | undefined; timestamp: number } | undefined>
}
type SortVar =
| 'comments'
| 'reactions'
| 'reactions-+1'
| 'reactions--1'
| 'reactions-smile'
| 'reactions-thinking_face'
| 'reactions-heart'
| 'reactions-tada'
| 'interactions'
| 'created'
| 'updated'
type SortOrder = 'asc' | 'desc'
export type Reactions = {
'+1': number
'-1': number
laugh: number
hooray: number
confused: number
heart: number
rocket: number
eyes: number
}
export interface User {
name: string
isGitHubApp?: boolean
}
export interface Comment {
author: User
body: string
id: number
timestamp: number
}
export interface Issue {
author: User
body: string
title: string
labels: string[]
open: boolean
locked: boolean
number: number
numComments: number
reactions: Reactions
milestoneId: number | null
assignee?: string
createdAt: number
updatedAt: number
closedAt?: number
}
export interface Query {
q: string
sort?: SortVar
order?: SortOrder
}

View File

@@ -1,293 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const core_1 = require("@actions/core");
const github_1 = require("@actions/github");
const child_process_1 = require("child_process");
const utils_1 = require("../utils/utils");
class OctoKit {
constructor(token, params, options = { readonly: false }) {
this.token = token;
this.params = params;
this.options = options;
// when in readonly mode, record labels just-created so at to not throw unneccesary errors
this.mockLabels = new Set();
this.writeAccessCache = {};
this.octokit = new github_1.GitHub(token);
}
async *query(query) {
const q = query.q + ` repo:${this.params.owner}/${this.params.repo}`;
console.log(`Querying for ${q}:`);
const options = this.octokit.search.issuesAndPullRequests.endpoint.merge({
...query,
q,
per_page: 100,
headers: { Accept: 'application/vnd.github.squirrel-girl-preview+json' },
});
let pageNum = 0;
const timeout = async () => {
if (pageNum < 2) {
/* pass */
}
else if (pageNum < 4) {
await new Promise((resolve) => setTimeout(resolve, 3000));
}
else {
await new Promise((resolve) => setTimeout(resolve, 30000));
}
};
for await (const pageResponse of this.octokit.paginate.iterator(options)) {
await timeout();
await utils_1.logRateLimit(this.token);
const page = pageResponse.data;
console.log(`Page ${++pageNum}: ${page.map(({ number }) => number).join(' ')}`);
yield page.map((issue) => new OctoKitIssue(this.token, this.params, this.octokitIssueToIssue(issue)));
}
}
async createIssue(owner, repo, title, body) {
core_1.debug(`Creating issue \`${title}\` on ${owner}/${repo}`);
if (!this.options.readonly)
await this.octokit.issues.create({ owner, repo, title, body });
}
octokitIssueToIssue(issue) {
var _a, _b, _c, _d, _e, _f;
return {
author: { name: issue.user.login, isGitHubApp: issue.user.type === 'Bot' },
body: issue.body,
number: issue.number,
title: issue.title,
labels: issue.labels.map((label) => label.name),
open: issue.state === 'open',
locked: issue.locked,
numComments: issue.comments,
reactions: issue.reactions,
assignee: (_b = (_a = issue.assignee) === null || _a === void 0 ? void 0 : _a.login) !== null && _b !== void 0 ? _b : (_d = (_c = issue.assignees) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d.login,
milestoneId: (_f = (_e = issue.milestone) === null || _e === void 0 ? void 0 : _e.number) !== null && _f !== void 0 ? _f : null,
createdAt: +new Date(issue.created_at),
updatedAt: +new Date(issue.updated_at),
closedAt: issue.closed_at ? +new Date(issue.closed_at) : undefined,
};
}
async hasWriteAccess(user) {
if (user.name in this.writeAccessCache) {
core_1.debug('Got permissions from cache for ' + user);
return this.writeAccessCache[user.name];
}
core_1.debug('Fetching permissions for ' + user);
const permissions = (await this.octokit.repos.getCollaboratorPermissionLevel({
...this.params,
username: user.name,
})).data.permission;
return (this.writeAccessCache[user.name] = permissions === 'admin' || permissions === 'write');
}
async repoHasLabel(name) {
try {
await this.octokit.issues.getLabel({ ...this.params, name });
return true;
}
catch (err) {
if (err.status === 404) {
return this.options.readonly && this.mockLabels.has(name);
}
throw err;
}
}
async createLabel(name, color, description) {
core_1.debug('Creating label ' + name);
if (!this.options.readonly)
await this.octokit.issues.createLabel({ ...this.params, color, description, name });
else
this.mockLabels.add(name);
}
async deleteLabel(name) {
core_1.debug('Deleting label ' + name);
try {
if (!this.options.readonly)
await this.octokit.issues.deleteLabel({ ...this.params, name });
}
catch (err) {
if (err.status === 404) {
return;
}
throw err;
}
}
async readConfig(path) {
core_1.debug('Reading config at ' + path);
const repoPath = `.github/${path}.json`;
const data = (await this.octokit.repos.getContents({ ...this.params, path: repoPath })).data;
if ('type' in data && data.type === 'file') {
if (data.encoding === 'base64' && data.content) {
return JSON.parse(Buffer.from(data.content, 'base64').toString('utf-8'));
}
throw Error(`Could not read contents "${data.content}" in encoding "${data.encoding}"`);
}
throw Error('Found directory at config path when expecting file' + JSON.stringify(data));
}
async releaseContainsCommit(release, commit) {
if (utils_1.getInput('commitReleasedDebuggingOverride')) {
return true;
}
return new Promise((resolve, reject) => child_process_1.exec(`git -C ./repo merge-base --is-ancestor ${commit} ${release}`, (err) => !err || err.code === 1 ? resolve(!err) : reject(err)));
}
}
exports.OctoKit = OctoKit;
class OctoKitIssue extends OctoKit {
constructor(token, params, issueData, options = { readonly: false }) {
super(token, params, options);
this.params = params;
this.issueData = issueData;
}
async addAssignee(assignee) {
core_1.debug('Adding assignee ' + assignee + ' to ' + this.issueData.number);
if (!this.options.readonly) {
await this.octokit.issues.addAssignees({
...this.params,
issue_number: this.issueData.number,
assignees: [assignee],
});
}
}
async closeIssue() {
core_1.debug('Closing issue ' + this.issueData.number);
if (!this.options.readonly)
await this.octokit.issues.update({
...this.params,
issue_number: this.issueData.number,
state: 'closed',
});
}
async lockIssue() {
core_1.debug('Locking issue ' + this.issueData.number);
if (!this.options.readonly)
await this.octokit.issues.lock({ ...this.params, issue_number: this.issueData.number });
}
async getIssue() {
if (isIssue(this.issueData)) {
core_1.debug('Got issue data from query result ' + this.issueData.number);
return this.issueData;
}
console.log('Fetching issue ' + this.issueData.number);
const issue = (await this.octokit.issues.get({
...this.params,
issue_number: this.issueData.number,
mediaType: { previews: ['squirrel-girl'] },
})).data;
return (this.issueData = this.octokitIssueToIssue(issue));
}
async postComment(body) {
core_1.debug(`Posting comment ${body} on ${this.issueData.number}`);
if (!this.options.readonly)
await this.octokit.issues.createComment({
...this.params,
issue_number: this.issueData.number,
body,
});
}
async deleteComment(id) {
core_1.debug(`Deleting comment ${id} on ${this.issueData.number}`);
if (!this.options.readonly)
await this.octokit.issues.deleteComment({
owner: this.params.owner,
repo: this.params.repo,
comment_id: id,
});
}
async setMilestone(milestoneId) {
core_1.debug(`Setting milestone for ${this.issueData.number} to ${milestoneId}`);
if (!this.options.readonly)
await this.octokit.issues.update({
...this.params,
issue_number: this.issueData.number,
milestone: milestoneId,
});
}
async *getComments(last) {
core_1.debug('Fetching comments for ' + this.issueData.number);
const response = this.octokit.paginate.iterator(this.octokit.issues.listComments.endpoint.merge({
...this.params,
issue_number: this.issueData.number,
per_page: 100,
...(last ? { per_page: 1, page: (await this.getIssue()).numComments } : {}),
}));
for await (const page of response) {
yield page.data.map((comment) => ({
author: { name: comment.user.login, isGitHubApp: comment.user.type === 'Bot' },
body: comment.body,
id: comment.id,
timestamp: +new Date(comment.created_at),
}));
}
}
async addLabel(name) {
core_1.debug(`Adding label ${name} to ${this.issueData.number}`);
if (!(await this.repoHasLabel(name))) {
throw Error(`Action could not execute becuase label ${name} is not defined.`);
}
if (!this.options.readonly)
await this.octokit.issues.addLabels({
...this.params,
issue_number: this.issueData.number,
labels: [name],
});
}
async removeLabel(name) {
core_1.debug(`Removing label ${name} from ${this.issueData.number}`);
try {
if (!this.options.readonly)
await this.octokit.issues.removeLabel({
...this.params,
issue_number: this.issueData.number,
name,
});
}
catch (err) {
if (err.status === 404) {
console.log(`Label ${name} not found on issue`);
return;
}
throw err;
}
}
async getClosingInfo() {
var _a;
if ((await this.getIssue()).open) {
return;
}
const options = this.octokit.issues.listEventsForTimeline.endpoint.merge({
...this.params,
issue_number: this.issueData.number,
});
let closingCommit;
for await (const event of this.octokit.paginate.iterator(options)) {
const timelineEvents = event.data;
for (const timelineEvent of timelineEvents) {
if (timelineEvent.event === 'closed') {
closingCommit = {
hash: (_a = timelineEvent.commit_id) !== null && _a !== void 0 ? _a : undefined,
timestamp: +new Date(timelineEvent.created_at),
};
}
}
}
console.log(`Got ${closingCommit} as closing commit of ${this.issueData.number}`);
return closingCommit;
}
}
exports.OctoKitIssue = OctoKitIssue;
function isIssue(object) {
const isIssue = 'author' in object &&
'body' in object &&
'title' in object &&
'labels' in object &&
'open' in object &&
'locked' in object &&
'number' in object &&
'numComments' in object &&
'reactions' in object &&
'milestoneId' in object;
return isIssue;
}

View File

@@ -1,336 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { debug } from '@actions/core'
import { GitHub as GitHubAPI } from '@actions/github'
import { Octokit } from '@octokit/rest'
import { exec } from 'child_process'
import { getInput, logRateLimit } from '../utils/utils'
import { Comment, GitHub, GitHubIssue, Issue, Query, User } from './api'
export class OctoKit implements GitHub {
protected octokit: GitHubAPI
// when in readonly mode, record labels just-created so at to not throw unneccesary errors
protected mockLabels: Set<string> = new Set()
constructor(
private token: string,
protected params: { repo: string; owner: string },
protected options: { readonly: boolean } = { readonly: false },
) {
this.octokit = new GitHubAPI(token)
}
async *query(query: Query): AsyncIterableIterator<GitHubIssue[]> {
const q = query.q + ` repo:${this.params.owner}/${this.params.repo}`
console.log(`Querying for ${q}:`)
const options = this.octokit.search.issuesAndPullRequests.endpoint.merge({
...query,
q,
per_page: 100,
headers: { Accept: 'application/vnd.github.squirrel-girl-preview+json' },
})
let pageNum = 0
const timeout = async () => {
if (pageNum < 2) {
/* pass */
} else if (pageNum < 4) {
await new Promise((resolve) => setTimeout(resolve, 3000))
} else {
await new Promise((resolve) => setTimeout(resolve, 30000))
}
}
for await (const pageResponse of this.octokit.paginate.iterator(options)) {
await timeout()
await logRateLimit(this.token)
const page: Array<Octokit.SearchIssuesAndPullRequestsResponseItemsItem> = pageResponse.data
console.log(`Page ${++pageNum}: ${page.map(({ number }) => number).join(' ')}`)
yield page.map(
(issue) => new OctoKitIssue(this.token, this.params, this.octokitIssueToIssue(issue)),
)
}
}
async createIssue(owner: string, repo: string, title: string, body: string): Promise<void> {
debug(`Creating issue \`${title}\` on ${owner}/${repo}`)
if (!this.options.readonly) await this.octokit.issues.create({ owner, repo, title, body })
}
protected octokitIssueToIssue(
issue: Octokit.IssuesGetResponse | Octokit.SearchIssuesAndPullRequestsResponseItemsItem,
): Issue {
return {
author: { name: issue.user.login, isGitHubApp: issue.user.type === 'Bot' },
body: issue.body,
number: issue.number,
title: issue.title,
labels: (issue.labels as Octokit.IssuesGetLabelResponse[]).map((label) => label.name),
open: issue.state === 'open',
locked: (issue as any).locked,
numComments: issue.comments,
reactions: (issue as any).reactions,
assignee: issue.assignee?.login ?? (issue as any).assignees?.[0]?.login,
milestoneId: issue.milestone?.number ?? null,
createdAt: +new Date(issue.created_at),
updatedAt: +new Date(issue.updated_at),
closedAt: issue.closed_at ? +new Date((issue.closed_at as unknown) as string) : undefined,
}
}
private writeAccessCache: Record<string, boolean> = {}
async hasWriteAccess(user: User): Promise<boolean> {
if (user.name in this.writeAccessCache) {
debug('Got permissions from cache for ' + user)
return this.writeAccessCache[user.name]
}
debug('Fetching permissions for ' + user)
const permissions = (
await this.octokit.repos.getCollaboratorPermissionLevel({
...this.params,
username: user.name,
})
).data.permission
return (this.writeAccessCache[user.name] = permissions === 'admin' || permissions === 'write')
}
async repoHasLabel(name: string): Promise<boolean> {
try {
await this.octokit.issues.getLabel({ ...this.params, name })
return true
} catch (err) {
if (err.status === 404) {
return this.options.readonly && this.mockLabels.has(name)
}
throw err
}
}
async createLabel(name: string, color: string, description: string): Promise<void> {
debug('Creating label ' + name)
if (!this.options.readonly)
await this.octokit.issues.createLabel({ ...this.params, color, description, name })
else this.mockLabels.add(name)
}
async deleteLabel(name: string): Promise<void> {
debug('Deleting label ' + name)
try {
if (!this.options.readonly) await this.octokit.issues.deleteLabel({ ...this.params, name })
} catch (err) {
if (err.status === 404) {
return
}
throw err
}
}
async readConfig(path: string): Promise<any> {
debug('Reading config at ' + path)
const repoPath = `.github/${path}.json`
const data = (await this.octokit.repos.getContents({ ...this.params, path: repoPath })).data
if ('type' in data && data.type === 'file') {
if (data.encoding === 'base64' && data.content) {
return JSON.parse(Buffer.from(data.content, 'base64').toString('utf-8'))
}
throw Error(`Could not read contents "${data.content}" in encoding "${data.encoding}"`)
}
throw Error('Found directory at config path when expecting file' + JSON.stringify(data))
}
async releaseContainsCommit(release: string, commit: string): Promise<boolean> {
if (getInput('commitReleasedDebuggingOverride')) {
return true
}
return new Promise((resolve, reject) =>
exec(`git -C ./repo merge-base --is-ancestor ${commit} ${release}`, (err) =>
!err || err.code === 1 ? resolve(!err) : reject(err),
),
)
}
}
export class OctoKitIssue extends OctoKit implements GitHubIssue {
constructor(
token: string,
protected params: { repo: string; owner: string },
private issueData: { number: number } | Issue,
options: { readonly: boolean } = { readonly: false },
) {
super(token, params, options)
}
async addAssignee(assignee: string): Promise<void> {
debug('Adding assignee ' + assignee + ' to ' + this.issueData.number)
if (!this.options.readonly) {
await this.octokit.issues.addAssignees({
...this.params,
issue_number: this.issueData.number,
assignees: [assignee],
})
}
}
async closeIssue(): Promise<void> {
debug('Closing issue ' + this.issueData.number)
if (!this.options.readonly)
await this.octokit.issues.update({
...this.params,
issue_number: this.issueData.number,
state: 'closed',
})
}
async lockIssue(): Promise<void> {
debug('Locking issue ' + this.issueData.number)
if (!this.options.readonly)
await this.octokit.issues.lock({ ...this.params, issue_number: this.issueData.number })
}
async getIssue(): Promise<Issue> {
if (isIssue(this.issueData)) {
debug('Got issue data from query result ' + this.issueData.number)
return this.issueData
}
console.log('Fetching issue ' + this.issueData.number)
const issue = (
await this.octokit.issues.get({
...this.params,
issue_number: this.issueData.number,
mediaType: { previews: ['squirrel-girl'] },
})
).data
return (this.issueData = this.octokitIssueToIssue(issue))
}
async postComment(body: string): Promise<void> {
debug(`Posting comment ${body} on ${this.issueData.number}`)
if (!this.options.readonly)
await this.octokit.issues.createComment({
...this.params,
issue_number: this.issueData.number,
body,
})
}
async deleteComment(id: number): Promise<void> {
debug(`Deleting comment ${id} on ${this.issueData.number}`)
if (!this.options.readonly)
await this.octokit.issues.deleteComment({
owner: this.params.owner,
repo: this.params.repo,
comment_id: id,
})
}
async setMilestone(milestoneId: number) {
debug(`Setting milestone for ${this.issueData.number} to ${milestoneId}`)
if (!this.options.readonly)
await this.octokit.issues.update({
...this.params,
issue_number: this.issueData.number,
milestone: milestoneId,
})
}
async *getComments(last?: boolean): AsyncIterableIterator<Comment[]> {
debug('Fetching comments for ' + this.issueData.number)
const response = this.octokit.paginate.iterator(
this.octokit.issues.listComments.endpoint.merge({
...this.params,
issue_number: this.issueData.number,
per_page: 100,
...(last ? { per_page: 1, page: (await this.getIssue()).numComments } : {}),
}),
)
for await (const page of response) {
yield (page.data as Octokit.IssuesListCommentsResponseItem[]).map((comment) => ({
author: { name: comment.user.login, isGitHubApp: comment.user.type === 'Bot' },
body: comment.body,
id: comment.id,
timestamp: +new Date(comment.created_at),
}))
}
}
async addLabel(name: string): Promise<void> {
debug(`Adding label ${name} to ${this.issueData.number}`)
if (!(await this.repoHasLabel(name))) {
throw Error(`Action could not execute becuase label ${name} is not defined.`)
}
if (!this.options.readonly)
await this.octokit.issues.addLabels({
...this.params,
issue_number: this.issueData.number,
labels: [name],
})
}
async removeLabel(name: string): Promise<void> {
debug(`Removing label ${name} from ${this.issueData.number}`)
try {
if (!this.options.readonly)
await this.octokit.issues.removeLabel({
...this.params,
issue_number: this.issueData.number,
name,
})
} catch (err) {
if (err.status === 404) {
console.log(`Label ${name} not found on issue`)
return
}
throw err
}
}
async getClosingInfo(): Promise<{ hash: string | undefined; timestamp: number } | undefined> {
if ((await this.getIssue()).open) {
return
}
const options = this.octokit.issues.listEventsForTimeline.endpoint.merge({
...this.params,
issue_number: this.issueData.number,
})
let closingCommit: { hash: string | undefined; timestamp: number } | undefined
for await (const event of this.octokit.paginate.iterator(options)) {
const timelineEvents = event.data as Octokit.IssuesListEventsForTimelineResponseItem[]
for (const timelineEvent of timelineEvents) {
if (timelineEvent.event === 'closed') {
closingCommit = {
hash: timelineEvent.commit_id ?? undefined,
timestamp: +new Date(timelineEvent.created_at),
}
}
}
}
console.log(`Got ${closingCommit} as closing commit of ${this.issueData.number}`)
return closingCommit
}
}
function isIssue(object: any): object is Issue {
const isIssue =
'author' in object &&
'body' in object &&
'title' in object &&
'labels' in object &&
'open' in object &&
'locked' in object &&
'number' in object &&
'numComments' in object &&
'reactions' in object &&
'milestoneId' in object
return isIssue
}

View File

@@ -1,123 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
class Testbed {
constructor(config) {
var _a, _b, _c, _d, _e;
this.config = {
globalLabels: (_a = config === null || config === void 0 ? void 0 : config.globalLabels) !== null && _a !== void 0 ? _a : [],
configs: (_b = config === null || config === void 0 ? void 0 : config.configs) !== null && _b !== void 0 ? _b : {},
writers: (_c = config === null || config === void 0 ? void 0 : config.writers) !== null && _c !== void 0 ? _c : [],
releasedCommits: (_d = config === null || config === void 0 ? void 0 : config.releasedCommits) !== null && _d !== void 0 ? _d : [],
queryRunner: (_e = config === null || config === void 0 ? void 0 : config.queryRunner) !== null && _e !== void 0 ? _e : async function* () {
yield [];
},
};
}
async *query(query) {
for await (const page of this.config.queryRunner(query)) {
yield page.map((issue) => issue instanceof TestbedIssue ? issue : new TestbedIssue(this.config, issue));
}
}
async createIssue(_owner, _repo, _title, _body) {
// pass...
}
async readConfig(path) {
return JSON.parse(JSON.stringify(this.config.configs[path]));
}
async hasWriteAccess(user) {
return this.config.writers.includes(user.name);
}
async repoHasLabel(label) {
return this.config.globalLabels.includes(label);
}
async createLabel(label, _color, _description) {
this.config.globalLabels.push(label);
}
async deleteLabel(labelToDelete) {
this.config.globalLabels = this.config.globalLabels.filter((label) => label !== labelToDelete);
}
async releaseContainsCommit(_release, commit) {
return this.config.releasedCommits.includes(commit);
}
}
exports.Testbed = Testbed;
class TestbedIssue extends Testbed {
constructor(globalConfig, issueConfig) {
var _a, _b, _c;
super(globalConfig);
issueConfig = issueConfig !== null && issueConfig !== void 0 ? issueConfig : {};
issueConfig.comments = (_a = issueConfig === null || issueConfig === void 0 ? void 0 : issueConfig.comments) !== null && _a !== void 0 ? _a : [];
issueConfig.labels = (_b = issueConfig === null || issueConfig === void 0 ? void 0 : issueConfig.labels) !== null && _b !== void 0 ? _b : [];
issueConfig.issue = {
author: { name: 'JacksonKearl' },
body: 'issue body',
locked: false,
numComments: ((_c = issueConfig === null || issueConfig === void 0 ? void 0 : issueConfig.comments) === null || _c === void 0 ? void 0 : _c.length) || 0,
number: 1,
open: true,
title: 'issue title',
assignee: undefined,
reactions: {
'+1': 0,
'-1': 0,
confused: 0,
eyes: 0,
heart: 0,
hooray: 0,
laugh: 0,
rocket: 0,
},
closedAt: undefined,
createdAt: +new Date(),
updatedAt: +new Date(),
...issueConfig.issue,
};
this.issueConfig = issueConfig;
}
async addAssignee(assignee) {
this.issueConfig.issue.assignee = assignee;
}
async setMilestone(milestoneId) {
this.issueConfig.issue.milestoneId = milestoneId;
}
async getIssue() {
const labels = [...this.issueConfig.labels];
return { ...this.issueConfig.issue, labels };
}
async postComment(body, author) {
this.issueConfig.comments.push({
author: { name: author !== null && author !== void 0 ? author : 'bot' },
body,
id: Math.random(),
timestamp: +new Date(),
});
}
async deleteComment(id) {
this.issueConfig.comments = this.issueConfig.comments.filter((comment) => comment.id !== id);
}
async *getComments(last) {
yield last
? [this.issueConfig.comments[this.issueConfig.comments.length - 1]]
: this.issueConfig.comments;
}
async addLabel(label) {
this.issueConfig.labels.push(label);
}
async removeLabel(labelToDelete) {
this.issueConfig.labels = this.issueConfig.labels.filter((label) => label !== labelToDelete);
}
async closeIssue() {
this.issueConfig.issue.open = false;
}
async lockIssue() {
this.issueConfig.issue.locked = true;
}
async getClosingInfo() {
return this.issueConfig.closingCommit;
}
}
exports.TestbedIssue = TestbedIssue;

View File

@@ -1,170 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Comment, GitHub, GitHubIssue, Issue, Query, User } from './api'
type TestbedConfig = {
globalLabels: string[]
configs: Record<string, any>
writers: string[]
releasedCommits: string[]
queryRunner: (query: Query) => AsyncIterableIterator<(TestbedIssueConstructorArgs | TestbedIssue)[]>
}
export type TestbedConstructorArgs = Partial<TestbedConfig>
export class Testbed implements GitHub {
public config: TestbedConfig
constructor(config?: TestbedConstructorArgs) {
this.config = {
globalLabels: config?.globalLabels ?? [],
configs: config?.configs ?? {},
writers: config?.writers ?? [],
releasedCommits: config?.releasedCommits ?? [],
queryRunner:
config?.queryRunner ??
async function* () {
yield []
},
}
}
async *query(query: Query): AsyncIterableIterator<GitHubIssue[]> {
for await (const page of this.config.queryRunner(query)) {
yield page.map((issue) =>
issue instanceof TestbedIssue ? issue : new TestbedIssue(this.config, issue),
)
}
}
async createIssue(_owner: string, _repo: string, _title: string, _body: string): Promise<void> {
// pass...
}
async readConfig(path: string): Promise<any> {
return JSON.parse(JSON.stringify(this.config.configs[path]))
}
async hasWriteAccess(user: User): Promise<boolean> {
return this.config.writers.includes(user.name)
}
async repoHasLabel(label: string): Promise<boolean> {
return this.config.globalLabels.includes(label)
}
async createLabel(label: string, _color: string, _description: string): Promise<void> {
this.config.globalLabels.push(label)
}
async deleteLabel(labelToDelete: string): Promise<void> {
this.config.globalLabels = this.config.globalLabels.filter((label) => label !== labelToDelete)
}
async releaseContainsCommit(_release: string, commit: string): Promise<boolean> {
return this.config.releasedCommits.includes(commit)
}
}
type TestbedIssueConfig = {
issue: Omit<Issue, 'labels'>
comments: Comment[]
labels: string[]
closingCommit: { hash: string | undefined; timestamp: number } | undefined
}
export type TestbedIssueConstructorArgs = Partial<Omit<TestbedIssueConfig, 'issue'>> & {
issue?: Partial<Omit<Issue, 'labels'>>
}
export class TestbedIssue extends Testbed implements GitHubIssue {
public issueConfig: TestbedIssueConfig
constructor(globalConfig?: TestbedConstructorArgs, issueConfig?: TestbedIssueConstructorArgs) {
super(globalConfig)
issueConfig = issueConfig ?? {}
issueConfig.comments = issueConfig?.comments ?? []
issueConfig.labels = issueConfig?.labels ?? []
issueConfig.issue = {
author: { name: 'JacksonKearl' },
body: 'issue body',
locked: false,
numComments: issueConfig?.comments?.length || 0,
number: 1,
open: true,
title: 'issue title',
assignee: undefined,
reactions: {
'+1': 0,
'-1': 0,
confused: 0,
eyes: 0,
heart: 0,
hooray: 0,
laugh: 0,
rocket: 0,
},
closedAt: undefined,
createdAt: +new Date(),
updatedAt: +new Date(),
...issueConfig.issue,
}
this.issueConfig = issueConfig as TestbedIssueConfig
}
async addAssignee(assignee: string): Promise<void> {
this.issueConfig.issue.assignee = assignee
}
async setMilestone(milestoneId: number): Promise<void> {
this.issueConfig.issue.milestoneId = milestoneId
}
async getIssue(): Promise<Issue> {
const labels = [...this.issueConfig.labels]
return { ...this.issueConfig.issue, labels }
}
async postComment(body: string, author?: string): Promise<void> {
this.issueConfig.comments.push({
author: { name: author ?? 'bot' },
body,
id: Math.random(),
timestamp: +new Date(),
})
}
async deleteComment(id: number): Promise<void> {
this.issueConfig.comments = this.issueConfig.comments.filter((comment) => comment.id !== id)
}
async *getComments(last?: boolean): AsyncIterableIterator<Comment[]> {
yield last
? [this.issueConfig.comments[this.issueConfig.comments.length - 1]]
: this.issueConfig.comments
}
async addLabel(label: string): Promise<void> {
this.issueConfig.labels.push(label)
}
async removeLabel(labelToDelete: string): Promise<void> {
this.issueConfig.labels = this.issueConfig.labels.filter((label) => label !== labelToDelete)
}
async closeIssue(): Promise<void> {
this.issueConfig.issue.open = false
}
async lockIssue(): Promise<void> {
this.issueConfig.issue.locked = true
}
async getClosingInfo(): Promise<{ hash: string | undefined; timestamp: number } | undefined> {
return this.issueConfig.closingCommit
}
}

View File

@@ -1,12 +0,0 @@
name: 'PR Labeler'
description: 'Automatically add a Label to a PR'
inputs:
token:
description: GitHub token with issue, comment, and label read/write permissions
default: ${{ github.token }}
label:
description: Github label to add to the PR
required: true
runs:
using: 'node12'
main: 'index.js'

View File

@@ -1,22 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const core = require("@actions/core");
const github_1 = require("@actions/github");
const octokit_1 = require("../api/octokit");
const utils_1 = require("../utils/utils");
const token = utils_1.getRequiredInput('token');
const label = utils_1.getRequiredInput('label');
async function main() {
const pr = new octokit_1.OctoKitIssue(token, github_1.context.repo, { number: github_1.context.issue.number });
pr.addLabel(label);
}
main()
.then(() => utils_1.logRateLimit(token))
.catch(async (error) => {
core.setFailed(error.message);
await utils_1.logErrorToIssue(error.message, true, token);
});

View File

@@ -1,26 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as core from '@actions/core'
import { context } from '@actions/github'
import { OctoKitIssue } from '../api/octokit'
import { getRequiredInput, logErrorToIssue, logRateLimit } from '../utils/utils'
const token = getRequiredInput('token');
const label = getRequiredInput('label');
async function main() {
const pr = new OctoKitIssue(token, context.repo, { number: context.issue.number });
pr.addLabel(label);
}
main()
.then(() => logRateLimit(token))
.catch(async (error) => {
core.setFailed(error.message)
await logErrorToIssue(error.message, true, token)
})

View File

@@ -1,15 +0,0 @@
name: Copycat
description: Copy all new issues to a different repo
inputs:
token:
description: GitHub token with issue, comment, and label read/write permissions to both repos
default: ${{ github.token }}
owner:
description: account/organization that owns the destination repo (the microsoft part of microsoft/vscode)
required: true
repo:
description: name of the destination repo (the vscode part of microsoft/vscode)
required: true
runs:
using: 'node12'
main: 'index.js'

View File

@@ -1,19 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
class CopyCat {
constructor(github, owner, repo) {
this.github = github;
this.owner = owner;
this.repo = repo;
}
async run() {
const issue = await this.github.getIssue();
console.log(`Mirroring issue \`${issue.title}\` to ${this.owner}/${this.repo}`);
await this.github.createIssue(this.owner, this.repo, issue.title, issue.body.replace(/@|#|issues/g, '-'));
}
}
exports.CopyCat = CopyCat;

View File

@@ -1,21 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { GitHubIssue } from '../api/api'
export class CopyCat {
constructor(private github: GitHubIssue, private owner: string, private repo: string) {}
async run() {
const issue = await this.github.getIssue()
console.log(`Mirroring issue \`${issue.title}\` to ${this.owner}/${this.repo}`)
await this.github.createIssue(
this.owner,
this.repo,
issue.title,
issue.body.replace(/@|#|issues/g, '-'),
)
}
}

View File

@@ -1,21 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const core = require("@actions/core");
const github_1 = require("@actions/github");
const octokit_1 = require("../api/octokit");
const utils_1 = require("../utils/utils");
const copyCat_1 = require("./copyCat");
const token = utils_1.getRequiredInput('token');
const main = async () => {
await new copyCat_1.CopyCat(new octokit_1.OctoKitIssue(token, github_1.context.repo, { number: github_1.context.issue.number }), utils_1.getRequiredInput('owner'), utils_1.getRequiredInput('repo')).run();
};
main()
.then(() => utils_1.logRateLimit(token))
.catch(async (error) => {
core.setFailed(error.message);
await utils_1.logErrorToIssue(error.message, true, token);
});

View File

@@ -1,27 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as core from '@actions/core'
import { context } from '@actions/github'
import { OctoKitIssue } from '../api/octokit'
import { getRequiredInput, logErrorToIssue, logRateLimit } from '../utils/utils'
import { CopyCat } from './copyCat'
const token = getRequiredInput('token')
const main = async () => {
await new CopyCat(
new OctoKitIssue(token, context.repo, { number: context.issue.number }),
getRequiredInput('owner'),
getRequiredInput('repo'),
).run()
}
main()
.then(() => logRateLimit(token))
.catch(async (error) => {
core.setFailed(error.message)
await logErrorToIssue(error.message, true, token)
})

View File

@@ -1,24 +0,0 @@
{
"name": "github-actions",
"version": "1.0.0",
"description": "GitHub Actions",
"scripts": {
"test": "mocha -r ts-node/register **/*.test.ts",
"build": "tsc -p ./tsconfig.json",
"lint": "eslint -c .eslintrc --fix --ext .ts .",
"watch-typecheck": "tsc --watch"
},
"repository": {
"type": "git",
"url": "git+https://github.com/microsoft/azuredatastudio.git"
},
"keywords": [],
"author": "",
"dependencies": {
"@actions/core": "^1.2.3",
"@actions/github": "^2.1.1",
"axios": "^0.19.2",
"ts-node": "^8.6.2",
"typescript": "^3.8.3"
}
}

View File

@@ -1,19 +0,0 @@
{
"compilerOptions": {
"target": "es2019",
"strict": true,
"module": "commonjs",
"moduleResolution": "node",
"removeComments": false,
"resolveJsonModule": true,
"lib": [
"es2020"
],
},
"include": [
"./**/*.ts"
],
"exclude": [
"node_modules"
]
}

View File

@@ -1,72 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const core = require("@actions/core");
const github_1 = require("@actions/github");
const axios_1 = require("axios");
const octokit_1 = require("../api/octokit");
exports.getInput = (name) => core.getInput(name) || undefined;
exports.getRequiredInput = (name) => core.getInput(name, { required: true });
exports.normalizeIssue = (issue) => {
const { body, title } = issue;
const isBug = body.includes('bug_report_template') || /Issue Type:.*Bug.*/.test(body);
const isFeatureRequest = body.includes('feature_request_template') || /Issue Type:.*Feature Request.*/.test(body);
const cleanse = (str) => str
.toLowerCase()
.replace(/<!--.*?-->/gu, '')
.replace(/.* version: .*/gu, '')
.replace(/issue type: .*/gu, '')
.replace(/<details>(.|\s)*?<\/details>/gu, '')
.replace(/vs ?code/gu, '')
.replace(/we have written.*please paste./gu, '')
.replace(/steps to reproduce:/gu, '')
.replace(/does this issue occur when all extensions are disabled.*/gu, '')
.replace(/```(.|\s)*?```/gu, '')
.replace(/!?\[.*?\]\(.*?\)/gu, '')
.replace(/\s+/gu, ' ');
return {
body: cleanse(body),
title: cleanse(title),
issueType: isBug ? 'bug' : isFeatureRequest ? 'feature_request' : 'unknown',
};
};
exports.loadLatestRelease = async (quality) => (await axios_1.default.get(`https://vscode-update.azurewebsites.net/api/update/darwin/${quality}/latest`)).data;
exports.daysAgoToTimestamp = (days) => +new Date(Date.now() - days * 24 * 60 * 60 * 1000);
exports.daysAgoToHumanReadbleDate = (days) => new Date(Date.now() - days * 24 * 60 * 60 * 1000).toISOString().replace(/\.\d{3}\w$/, '');
exports.logRateLimit = async (token) => {
const usageData = (await new github_1.GitHub(token).rateLimit.get()).data.resources;
['core', 'graphql', 'search'].forEach(async (category) => {
const usage = 1 - usageData[category].remaining / usageData[category].limit;
const message = `Usage at ${usage} for ${category}`;
if (usage > 0) {
console.log(message);
}
if (usage > 0.5) {
await exports.logErrorToIssue(message, false, token);
}
});
};
exports.logErrorToIssue = async (message, ping, token) => {
// Attempt to wait out abuse detection timeout if present
await new Promise((resolve) => setTimeout(resolve, 10000));
const dest = github_1.context.repo.repo === 'vscode-internalbacklog'
? { repo: 'vscode-internalbacklog', issue: 974 }
: { repo: 'vscode', issue: 93814 };
return new octokit_1.OctoKitIssue(token, { owner: 'Microsoft', repo: dest.repo }, { number: dest.issue })
.postComment(`
Workflow: ${github_1.context.workflow}
Error: ${message}
Issue: ${ping ? `${github_1.context.repo.owner}/${github_1.context.repo.repo}#` : ''}${github_1.context.issue.number}
Repo: ${github_1.context.repo.owner}/${github_1.context.repo.repo}
<!-- Context:
${JSON.stringify(github_1.context, null, 2).replace(/<!--/gu, '<@--').replace(/-->/gu, '--@>')}
-->
`);
};

View File

@@ -1,95 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as core from '@actions/core'
import { context, GitHub } from '@actions/github'
import axios from 'axios'
import { OctoKitIssue } from '../api/octokit'
import { Issue } from '../api/api'
export const getInput = (name: string) => core.getInput(name) || undefined
export const getRequiredInput = (name: string) => core.getInput(name, { required: true })
export const normalizeIssue = (
issue: Issue,
): { body: string; title: string; issueType: 'bug' | 'feature_request' | 'unknown' } => {
const { body, title } = issue
const isBug = body.includes('bug_report_template') || /Issue Type:.*Bug.*/.test(body)
const isFeatureRequest =
body.includes('feature_request_template') || /Issue Type:.*Feature Request.*/.test(body)
const cleanse = (str: string) =>
str
.toLowerCase()
.replace(/<!--.*?-->/gu, '')
.replace(/.* version: .*/gu, '')
.replace(/issue type: .*/gu, '')
.replace(/<details>(.|\s)*?<\/details>/gu, '')
.replace(/vs ?code/gu, '')
.replace(/we have written.*please paste./gu, '')
.replace(/steps to reproduce:/gu, '')
.replace(/does this issue occur when all extensions are disabled.*/gu, '')
.replace(/```(.|\s)*?```/gu, '')
.replace(/!?\[.*?\]\(.*?\)/gu, '')
.replace(/\s+/gu, ' ')
return {
body: cleanse(body),
title: cleanse(title),
issueType: isBug ? 'bug' : isFeatureRequest ? 'feature_request' : 'unknown',
}
}
export interface Release {
productVersion: string
timestamp: number
version: string
}
export const loadLatestRelease = async (quality: 'stable' | 'insider'): Promise<Release | undefined> =>
(await axios.get(`https://vscode-update.azurewebsites.net/api/update/darwin/${quality}/latest`)).data
export const daysAgoToTimestamp = (days: number): number => +new Date(Date.now() - days * 24 * 60 * 60 * 1000)
export const daysAgoToHumanReadbleDate = (days: number) =>
new Date(Date.now() - days * 24 * 60 * 60 * 1000).toISOString().replace(/\.\d{3}\w$/, '')
export const logRateLimit = async (token: string) => {
const usageData = (await new GitHub(token).rateLimit.get()).data.resources
;(['core', 'graphql', 'search'] as const).forEach(async (category) => {
const usage = 1 - usageData[category].remaining / usageData[category].limit
const message = `Usage at ${usage} for ${category}`
if (usage > 0) {
console.log(message)
}
if (usage > 0.5) {
await logErrorToIssue(message, false, token)
}
})
}
export const logErrorToIssue = async (message: string, ping: boolean, token: string): Promise<void> => {
// Attempt to wait out abuse detection timeout if present
await new Promise((resolve) => setTimeout(resolve, 10000))
const dest =
context.repo.repo === 'vscode-internalbacklog'
? { repo: 'vscode-internalbacklog', issue: 974 }
: { repo: 'vscode', issue: 93814 }
return new OctoKitIssue(token, { owner: 'Microsoft', repo: dest.repo }, { number: dest.issue })
.postComment(`
Workflow: ${context.workflow}
Error: ${message}
Issue: ${ping ? `${context.repo.owner}/${context.repo.repo}#` : ''}${context.issue.number}
Repo: ${context.repo.owner}/${context.repo.repo}
<!-- Context:
${JSON.stringify(context, null, 2).replace(/<!--/gu, '<@--').replace(/-->/gu, '--@>')}
-->
`)
}

View File

@@ -1,435 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@actions/core@^1.2.3":
version "1.2.3"
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.3.tgz#e844b4fa0820e206075445079130868f95bfca95"
integrity sha512-Wp4xnyokakM45Uuj4WLUxdsa8fJjKVl1fDTsPbTEcTcuu0Nb26IPQbOtjmnfaCPGcaoPOOqId8H9NapZ8gii4w==
"@actions/github@^2.1.1":
version "2.1.1"
resolved "https://registry.yarnpkg.com/@actions/github/-/github-2.1.1.tgz#bcabedff598196d953f58ba750d5e75549a75142"
integrity sha512-kAgTGUx7yf5KQCndVeHSwCNZuDBvPyxm5xKTswW2lofugeuC1AZX73nUUVDNaysnM9aKFMHv9YCdVJbg7syEyA==
dependencies:
"@actions/http-client" "^1.0.3"
"@octokit/graphql" "^4.3.1"
"@octokit/rest" "^16.43.1"
"@actions/http-client@^1.0.3":
version "1.0.8"
resolved "https://registry.yarnpkg.com/@actions/http-client/-/http-client-1.0.8.tgz#8bd76e8eca89dc8bcf619aa128eba85f7a39af45"
integrity sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==
dependencies:
tunnel "0.0.6"
"@octokit/auth-token@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.4.0.tgz#b64178975218b99e4dfe948253f0673cbbb59d9f"
integrity sha512-eoOVMjILna7FVQf96iWc3+ZtE/ZT6y8ob8ZzcqKY1ibSQCnu4O/B7pJvzMx5cyZ/RjAff6DAdEb0O0Cjcxidkg==
dependencies:
"@octokit/types" "^2.0.0"
"@octokit/endpoint@^6.0.1":
version "6.0.1"
resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.1.tgz#16d5c0e7a83e3a644d1ddbe8cded6c3d038d31d7"
integrity sha512-pOPHaSz57SFT/m3R5P8MUu4wLPszokn5pXcB/pzavLTQf2jbU+6iayTvzaY6/BiotuRS0qyEUkx3QglT4U958A==
dependencies:
"@octokit/types" "^2.11.1"
is-plain-object "^3.0.0"
universal-user-agent "^5.0.0"
"@octokit/graphql@^4.3.1":
version "4.3.1"
resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-4.3.1.tgz#9ee840e04ed2906c7d6763807632de84cdecf418"
integrity sha512-hCdTjfvrK+ilU2keAdqNBWOk+gm1kai1ZcdjRfB30oA3/T6n53UVJb7w0L5cR3/rhU91xT3HSqCd+qbvH06yxA==
dependencies:
"@octokit/request" "^5.3.0"
"@octokit/types" "^2.0.0"
universal-user-agent "^4.0.0"
"@octokit/plugin-paginate-rest@^1.1.1":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-1.1.2.tgz#004170acf8c2be535aba26727867d692f7b488fc"
integrity sha512-jbsSoi5Q1pj63sC16XIUboklNw+8tL9VOnJsWycWYR78TKss5PVpIPb1TUUcMQ+bBh7cY579cVAWmf5qG+dw+Q==
dependencies:
"@octokit/types" "^2.0.1"
"@octokit/plugin-request-log@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.0.tgz#eef87a431300f6148c39a7f75f8cfeb218b2547e"
integrity sha512-ywoxP68aOT3zHCLgWZgwUJatiENeHE7xJzYjfz8WI0goynp96wETBF+d95b8g/uL4QmS6owPVlaxiz3wyMAzcw==
"@octokit/plugin-rest-endpoint-methods@2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-2.4.0.tgz#3288ecf5481f68c494dd0602fc15407a59faf61e"
integrity sha512-EZi/AWhtkdfAYi01obpX0DF7U6b1VRr30QNQ5xSFPITMdLSfhcBqjamE3F+sKcxPbD7eZuMHu3Qkk2V+JGxBDQ==
dependencies:
"@octokit/types" "^2.0.1"
deprecation "^2.3.1"
"@octokit/request-error@^1.0.2":
version "1.2.1"
resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-1.2.1.tgz#ede0714c773f32347576c25649dc013ae6b31801"
integrity sha512-+6yDyk1EES6WK+l3viRDElw96MvwfJxCt45GvmjDUKWjYIb3PJZQkq3i46TwGwoPD4h8NmTrENmtyA1FwbmhRA==
dependencies:
"@octokit/types" "^2.0.0"
deprecation "^2.0.0"
once "^1.4.0"
"@octokit/request-error@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.0.0.tgz#94ca7293373654400fbb2995f377f9473e00834b"
integrity sha512-rtYicB4Absc60rUv74Rjpzek84UbVHGHJRu4fNVlZ1mCcyUPPuzFfG9Rn6sjHrd95DEsmjSt1Axlc699ZlbDkw==
dependencies:
"@octokit/types" "^2.0.0"
deprecation "^2.0.0"
once "^1.4.0"
"@octokit/request@^5.2.0", "@octokit/request@^5.3.0":
version "5.4.2"
resolved "https://registry.yarnpkg.com/@octokit/request/-/request-5.4.2.tgz#74f8e5bbd39dc738a1b127629791f8ad1b3193ee"
integrity sha512-zKdnGuQ2TQ2vFk9VU8awFT4+EYf92Z/v3OlzRaSh4RIP0H6cvW1BFPXq4XYvNez+TPQjqN+0uSkCYnMFFhcFrw==
dependencies:
"@octokit/endpoint" "^6.0.1"
"@octokit/request-error" "^2.0.0"
"@octokit/types" "^2.11.1"
deprecation "^2.0.0"
is-plain-object "^3.0.0"
node-fetch "^2.3.0"
once "^1.4.0"
universal-user-agent "^5.0.0"
"@octokit/rest@^16.43.1":
version "16.43.1"
resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-16.43.1.tgz#3b11e7d1b1ac2bbeeb23b08a17df0b20947eda6b"
integrity sha512-gfFKwRT/wFxq5qlNjnW2dh+qh74XgTQ2B179UX5K1HYCluioWj8Ndbgqw2PVqa1NnVJkGHp2ovMpVn/DImlmkw==
dependencies:
"@octokit/auth-token" "^2.4.0"
"@octokit/plugin-paginate-rest" "^1.1.1"
"@octokit/plugin-request-log" "^1.0.0"
"@octokit/plugin-rest-endpoint-methods" "2.4.0"
"@octokit/request" "^5.2.0"
"@octokit/request-error" "^1.0.2"
atob-lite "^2.0.0"
before-after-hook "^2.0.0"
btoa-lite "^1.0.0"
deprecation "^2.0.0"
lodash.get "^4.4.2"
lodash.set "^4.3.2"
lodash.uniq "^4.5.0"
octokit-pagination-methods "^1.1.0"
once "^1.4.0"
universal-user-agent "^4.0.0"
"@octokit/types@^2.0.0", "@octokit/types@^2.0.1", "@octokit/types@^2.11.1":
version "2.12.1"
resolved "https://registry.yarnpkg.com/@octokit/types/-/types-2.12.1.tgz#4a26b4a85ec121043d3b0745b5798f9d8fd968ca"
integrity sha512-LRLR1tjbcCfAmUElvTmMvLEzstpx6Xt/aQVTg2xvd+kHA2Ekp1eWl5t+gU7bcwjXHYEAzh4hH4WH+kS3vh+wRw==
dependencies:
"@types/node" ">= 8"
"@types/node@>= 8":
version "13.13.2"
resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.2.tgz#160d82623610db590a64e8ca81784e11117e5a54"
integrity sha512-LB2R1Oyhpg8gu4SON/mfforE525+Hi/M1ineICEDftqNVTyFg1aRIeGuTvXAoWHc4nbrFncWtJgMmoyRvuGh7A==
arg@^4.1.0:
version "4.1.3"
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
atob-lite@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/atob-lite/-/atob-lite-2.0.0.tgz#0fef5ad46f1bd7a8502c65727f0367d5ee43d696"
integrity sha1-D+9a1G8b16hQLGVyfwNn1e5D1pY=
axios@^0.19.2:
version "0.19.2"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.19.2.tgz#3ea36c5d8818d0d5f8a8a97a6d36b86cdc00cb27"
integrity sha512-fjgm5MvRHLhx+osE2xoekY70AhARk3a6hkN+3Io1jc00jtquGvxYlKlsFUhmUET0V5te6CcZI7lcv2Ym61mjHA==
dependencies:
follow-redirects "1.5.10"
before-after-hook@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/before-after-hook/-/before-after-hook-2.1.0.tgz#b6c03487f44e24200dd30ca5e6a1979c5d2fb635"
integrity sha512-IWIbu7pMqyw3EAJHzzHbWa85b6oud/yfKYg5rqB5hNE8CeMi3nX+2C2sj0HswfblST86hpVEOAb9x34NZd6P7A==
btoa-lite@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/btoa-lite/-/btoa-lite-1.0.0.tgz#337766da15801210fdd956c22e9c6891ab9d0337"
integrity sha1-M3dm2hWAEhD92VbCLpxokaudAzc=
buffer-from@^1.0.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
cross-spawn@^6.0.0:
version "6.0.5"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
dependencies:
nice-try "^1.0.4"
path-key "^2.0.1"
semver "^5.5.0"
shebang-command "^1.2.0"
which "^1.2.9"
debug@=3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==
dependencies:
ms "2.0.0"
deprecation@^2.0.0, deprecation@^2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919"
integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==
diff@^4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
end-of-stream@^1.1.0:
version "1.4.4"
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
dependencies:
once "^1.4.0"
execa@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==
dependencies:
cross-spawn "^6.0.0"
get-stream "^4.0.0"
is-stream "^1.1.0"
npm-run-path "^2.0.0"
p-finally "^1.0.0"
signal-exit "^3.0.0"
strip-eof "^1.0.0"
follow-redirects@1.5.10:
version "1.5.10"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.5.10.tgz#7b7a9f9aea2fdff36786a94ff643ed07f4ff5e2a"
integrity sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==
dependencies:
debug "=3.1.0"
get-stream@^4.0.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
dependencies:
pump "^3.0.0"
is-plain-object@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-3.0.0.tgz#47bfc5da1b5d50d64110806c199359482e75a928"
integrity sha512-tZIpofR+P05k8Aocp7UI/2UTa9lTJSebCXpFFoR9aibpokDj/uXBsJ8luUu0tTVYKkMU6URDUuOfJZ7koewXvg==
dependencies:
isobject "^4.0.0"
is-stream@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
isobject@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/isobject/-/isobject-4.0.0.tgz#3f1c9155e73b192022a80819bacd0343711697b0"
integrity sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA==
lodash.get@^4.4.2:
version "4.4.2"
resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99"
integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=
lodash.set@^4.3.2:
version "4.3.2"
resolved "https://registry.yarnpkg.com/lodash.set/-/lodash.set-4.3.2.tgz#d8757b1da807dde24816b0d6a84bea1a76230b23"
integrity sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=
lodash.uniq@^4.5.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=
macos-release@^2.2.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/macos-release/-/macos-release-2.3.0.tgz#eb1930b036c0800adebccd5f17bc4c12de8bb71f"
integrity sha512-OHhSbtcviqMPt7yfw5ef5aghS2jzFVKEFyCJndQt2YpSQ9qRVSEv2axSJI1paVThEu+FFGs584h/1YhxjVqajA==
make-error@^1.1.1:
version "1.3.6"
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
ms@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
nice-try@^1.0.4:
version "1.0.5"
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
node-fetch@^2.3.0:
version "2.6.0"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
npm-run-path@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=
dependencies:
path-key "^2.0.0"
octokit-pagination-methods@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/octokit-pagination-methods/-/octokit-pagination-methods-1.1.0.tgz#cf472edc9d551055f9ef73f6e42b4dbb4c80bea4"
integrity sha512-fZ4qZdQ2nxJvtcasX7Ghl+WlWS/d9IgnBIwFZXVNNZUmzpno91SX5bc5vuxiuKoCtK78XxGGNuSCrDC7xYB3OQ==
once@^1.3.1, once@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
dependencies:
wrappy "1"
os-name@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/os-name/-/os-name-3.1.0.tgz#dec19d966296e1cd62d701a5a66ee1ddeae70801"
integrity sha512-h8L+8aNjNcMpo/mAIBPn5PXCM16iyPGjHNWo6U1YO8sJTMHtEtyczI6QJnLoplswm6goopQkqc7OAnjhWcugVg==
dependencies:
macos-release "^2.2.0"
windows-release "^3.1.0"
p-finally@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=
path-key@^2.0.0, path-key@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
pump@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
dependencies:
end-of-stream "^1.1.0"
once "^1.3.1"
semver@^5.5.0:
version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
shebang-command@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
dependencies:
shebang-regex "^1.0.0"
shebang-regex@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
signal-exit@^3.0.0:
version "3.0.3"
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c"
integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==
source-map-support@^0.5.17:
version "0.5.19"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61"
integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==
dependencies:
buffer-from "^1.0.0"
source-map "^0.6.0"
source-map@^0.6.0:
version "0.6.1"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
strip-eof@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
ts-node@^8.6.2:
version "8.9.0"
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.9.0.tgz#d7bf7272dcbecd3a2aa18bd0b96c7d2f270c15d4"
integrity sha512-rwkXfOs9zmoHrV8xE++dmNd6ZIS+nmHHCxcV53ekGJrxFLMbp+pizpPS07ARvhwneCIECPppOwbZHvw9sQtU4w==
dependencies:
arg "^4.1.0"
diff "^4.0.1"
make-error "^1.1.1"
source-map-support "^0.5.17"
yn "3.1.1"
tunnel@0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c"
integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==
typescript@^3.8.3:
version "3.8.3"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.8.3.tgz#409eb8544ea0335711205869ec458ab109ee1061"
integrity sha512-MYlEfn5VrLNsgudQTVJeNaQFUAI7DkhnOjdpAp4T+ku1TfQClewlbSuTVHiA+8skNBgaf02TL/kLOvig4y3G8w==
universal-user-agent@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-4.0.1.tgz#fd8d6cb773a679a709e967ef8288a31fcc03e557"
integrity sha512-LnST3ebHwVL2aNe4mejI9IQh2HfZ1RLo8Io2HugSif8ekzD1TlWpHpColOB/eh8JHMLkGH3Akqf040I+4ylNxg==
dependencies:
os-name "^3.1.0"
universal-user-agent@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-5.0.0.tgz#a3182aa758069bf0e79952570ca757de3579c1d9"
integrity sha512-B5TPtzZleXyPrUMKCpEHFmVhMN6EhmJYjG5PQna9s7mXeSqGTLap4OpqLl5FCEFUI3UBmllkETwKf/db66Y54Q==
dependencies:
os-name "^3.1.0"
which@^1.2.9:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
dependencies:
isexe "^2.0.0"
windows-release@^3.1.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/windows-release/-/windows-release-3.3.0.tgz#dce167e9f8be733f21c849ebd4d03fe66b29b9f0"
integrity sha512-2HetyTg1Y+R+rUgrKeUEhAG/ZuOmTrI1NBb3ZyAGQMYmOJjBBPe4MTodghRkmLJZHwkuPi02anbeGP+Zf401LQ==
dependencies:
execa "^1.0.0"
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
yn@3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==

View File

@@ -6,33 +6,24 @@
'use strict';
import * as vfs from 'vinyl-fs';
import * as path from 'path';
import * as es from 'event-stream';
import * as fs from 'fs';
const files = [
'.build/extensions/**/*.vsix', // external extensions
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
'.build/linux/sha256hashes.txt', // linux hashes
'.build/linux/deb/amd64/deb/*.deb', // linux debs
'.build/linux/rpm/x86_64/*.rpm', // linux rpms
'.build/linux/deb/amd64/deb/*', // linux debs
'.build/linux/rpm/x86_64/*', // linux rpms
'.build/linux/server/*', // linux server
'.build/linux/archive/*', // linux archive
'.build/docker/*', // docker images
'.build/darwin/*', // darwin binaries
'.build/docker/**', // docker images
'.build/darwin/**', // darwin binaries
'.build/version.json' // version information
];
async function main() {
return new Promise((resolve, reject) => {
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
.pipe(es.through(file => {
const filePath = path.join(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!,
//Preserve intermediate directories after .build folder
file.path.substr(path.resolve('.build').length + 1));
fs.mkdirSync(path.dirname(filePath), { recursive: true });
fs.renameSync(file.path, filePath);
}));
.pipe(vfs.dest(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!));
stream.on('end', () => resolve());
stream.on('error', e => reject(e));

View File

@@ -44,16 +44,15 @@ async function doesAssetExist(blobService: azure.BlobService, quality: string, b
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, filePath: string, fileName: string): Promise<void> {
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(filePath),
contentDisposition: `attachment; filename="${fileName}"`,
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
}
function getEnv(name: string): string {
@@ -67,24 +66,24 @@ function getEnv(name: string): string {
}
async function main(): Promise<void> {
const [, , platform, type, fileName, filePath] = process.argv;
const [, , platform, type, name, file] = process.argv;
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
console.log('Creating asset...');
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(filePath, (err, stat) => err ? e(err) : c(stat)));
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
const size = stat.size;
console.log('Size:', size);
const stream = fs.createReadStream(filePath);
const stream = fs.createReadStream(file);
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
console.log('SHA1:', sha1hash);
console.log('SHA256:', sha256hash);
const blobName = commit + '/' + fileName;
const blobName = commit + '/' + name;
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
@@ -99,7 +98,7 @@ async function main(): Promise<void> {
console.log('Uploading blobs to Azure storage...');
await uploadBlob(blobService, quality, blobName, filePath, fileName);
await uploadBlob(blobService, quality, blobName, file);
console.log('Blobs successfully uploaded.');

View File

@@ -53,7 +53,7 @@ function getConfig(quality: string): Promise<Config> {
};
return new Promise<Config>((c, e) => {
client.queryDocuments(collection, query, { enableCrossPartitionQuery: true }).toArray((err, results) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) { return e(err); }
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
@@ -86,7 +86,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
updateTries++;
return new Promise<void>((c, e) => {
client.queryDocuments(collection, updateQuery, { enableCrossPartitionQuery: true }).toArray((err, results) => {
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
@@ -217,13 +217,7 @@ async function publish(commit: string, quality: string, platform: string, type:
// {{SQL CARBON EDIT}}
// Insiders: nightly build from master
const isReleased = (
(
(quality === 'insider' && /^master$|^refs\/heads\/master$/.test(sourceBranch)) ||
(quality === 'rc1' && /^release\/|^refs\/heads\/release\//.test(sourceBranch))
) &&
/Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy)
);
const isReleased = (quality === 'insider' && /^master$|^refs\/heads\/master$/.test(sourceBranch) && /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy));
const release = {
id: commit,

View File

@@ -2,76 +2,51 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-vscode
displayName: Run Strict Null Check.
# - script: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- script: |
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser webkit --browser firefox
# displayName: Run Unit Tests (Browser)
# - script: | {{SQL CARBON EDIT}} disable
displayName: Run Unit Tests
# - script: | {{SQL CARBON EDIT}} remove step
# ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
# - task: PublishPipelineArtifact@0
# inputs:
# artifactName: crash-dump-macos
# targetPath: .build/crashes
# displayName: 'Publish Crash Reports'
# condition: succeededOrFailed()
# displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:

View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
</dict>
</plist>

View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-jit</key>
<true/>
</dict>
</plist>

View File

@@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

View File

@@ -1,14 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
</dict>
</plist>

View File

@@ -96,13 +96,9 @@ steps:
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn test-browser --build --browser chromium --browser webkit --browser firefox
displayName: Run unit tests (Browser)
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -115,71 +111,33 @@ steps:
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
# Web Smoke Tests disabled due to https://github.com/microsoft/vscode/issues/80308
# - script: |
# set -e
# cd test/smoke
# yarn compile
# cd -
# yarn smoketest --web --headless
# continueOnError: true
# displayName: Run web smoke tests
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
./resources/server/test/test-web-integration.sh --browser webkit
displayName: Run integration tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME"
continueOnError: true
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
cd test/smoke
yarn compile
cd -
yarn smoketest --web --headless
continueOnError: true
displayName: Run smoke tests (Browser)
displayName: Run smoke tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-macos
targetPath: .build/crashes
displayName: 'Publish Crash Reports'
condition: succeededOrFailed()
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
HELPER_APP_NAME="`echo $APP_NAME | sed -e 's/^Visual Studio //;s/\.app$//'`"
APP_FRAMEWORK_PATH="$APP_ROOT/$APP_NAME/Contents/Frameworks"
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
security default-keychain -s $(agent.tempdirectory)/buildagent.keychain
security unlock-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
codesign -s 99FM488X57 --deep --force --options runtime --entitlements build/azure-pipelines/darwin/entitlements.plist "$APP_ROOT"/*.app
codesign -s 99FM488X57 --force --options runtime --entitlements build/azure-pipelines/darwin/helper-gpu-entitlements.plist "$APP_FRAMEWORK_PATH/$HELPER_APP_NAME Helper (GPU).app"
codesign -s 99FM488X57 --force --options runtime --entitlements build/azure-pipelines/darwin/helper-plugin-entitlements.plist "$APP_FRAMEWORK_PATH/$HELPER_APP_NAME Helper (Plugin).app"
codesign -s 99FM488X57 --force --options runtime --entitlements build/azure-pipelines/darwin/helper-renderer-entitlements.plist "$APP_FRAMEWORK_PATH/$HELPER_APP_NAME Helper (Renderer).app"
displayName: Set Hardened Entitlements
- script: |
set -e
pushd $(agent.builddirectory)/VSCode-darwin && zip -r -X -y $(agent.builddirectory)/VSCode-darwin.zip * && popd
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd
displayName: Archive build
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
@@ -193,61 +151,14 @@ steps:
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [
{
"parameterName": "Hardening",
"parameterValue": "--options=runtime"
}
],
"parameters": [ ],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
SessionTimeout: 120
displayName: Codesign
- script: |
zip -d $(agent.builddirectory)/VSCode-darwin.zip "*.pkg"
displayName: Clean Archive
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/createAsset.js darwin-unnotarized archive "VSCode-darwin-$VSCODE_QUALITY.zip" $(agent.builddirectory)/VSCode-darwin.zip
displayName: Publish Unnotarized Build
- script: |
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: 'VSCode-darwin.zip'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppNotarize",
"parameters": [
{
"parameterName": "BundleId",
"parameterValue": "$(BundleIdentifier)"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Notarization
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \

View File

@@ -1,6 +1,9 @@
#!/usr/bin/env bash
set -e
# remove pkg from archive
zip -d ../VSCode-darwin.zip "*.pkg"
# publish the build
node build/azure-pipelines/common/createAsset.js \
darwin \
@@ -19,9 +22,7 @@ node build/azure-pipelines/common/createAsset.js \
../vscode-server-darwin.zip
# publish hockeyapp symbols
# node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" x64 "$VSCODE_HOCKEYAPP_ID_MACOS"
# Skip hockey app because build failure.
# https://github.com/microsoft/vscode/issues/90491
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" x64 "$VSCODE_HOCKEYAPP_ID_MACOS"
# upload configuration
yarn gulp upload-vscode-configuration

View File

@@ -3,7 +3,6 @@ steps:
displayName: 'Install developer certificate'
inputs:
certSecureFile: 'osx_signing_key.p12'
condition: eq(variables['signed'], true)
- script: |
mkdir -p .build
@@ -92,6 +91,11 @@ steps:
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
yarn gulp install-sqltoolsservice
displayName: Install sqltoolsservice
- script: |
set -e
yarn gulp package-rebuild-extensions
@@ -104,38 +108,10 @@ steps:
- script: |
set -e
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests"
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter
displayName: Run unit tests
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots"
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
# - script: |
# set -e
# VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-web-darwin" \
# yarn smoketest --web --headless --screenshots "$(build.artifactstagingdirectory)/smokeshots"
# displayName: Run smoke tests (Browser)
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
pushd ../azuredatastudio-darwin
@@ -163,7 +139,6 @@ steps:
codesign --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS *.app
popd
displayName: 'Manual codesign'
condition: and(succeeded(), eq(variables['signed'], true))
- script: |
set -e
@@ -173,6 +148,12 @@ steps:
popd
displayName: 'Archive'
- task: PublishPipelineArtifact@0
displayName: 'Publish SelfSigned'
inputs:
artifactName: darwin-selfsigned
targetPath: $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
displayName: 'ESRP CodeSigning'
inputs:
@@ -193,12 +174,19 @@ steps:
}
]
SessionTimeout: 90
condition: and(succeeded(), eq(variables['signed'], true))
condition: and(succeeded(), eq(variables['signtba'], true))
- script: |
zip -d $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
displayName: Clean Archive
condition: and(succeeded(), eq(variables['signed'], true))
condition: and(succeeded(), eq(variables['signtba'], true))
- task: PublishPipelineArtifact@0
displayName: 'Publish Signed'
inputs:
artifactName: darwin-signed
targetPath: $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
condition: and(succeeded(), eq(variables['signtba'], true))
- task: EsrpCodeSigning@1
displayName: 'ESRP Notarization'
@@ -220,7 +208,14 @@ steps:
}
]
SessionTimeout: 120
condition: and(succeeded(), eq(variables['signed'], true))
condition: and(succeeded(), eq(variables['signtba'], true))
- task: PublishPipelineArtifact@0
displayName: 'Publish Notarized'
inputs:
artifactName: darwin-notarized
targetPath: $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
condition: and(succeeded(), eq(variables['signtba'], true))
- script: |
set -e
@@ -229,7 +224,6 @@ steps:
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
condition: always()
- task: PublishTestResults@2
displayName: 'Publish Test Results test-results.xml'
@@ -239,6 +233,14 @@ steps:
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishTestResults@2
displayName: 'Publish Integration and Smoke Test Results'
inputs:
testResultsFiles: 'dawin-integration-tests-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishCodeCoverageResults@1
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
inputs:

View File

@@ -15,10 +15,5 @@ $CommitId = $VersionJson.commit
$ZipName = "azuredatastudio-darwin.zip"
$Zip = "$artifactsDir\darwin\archive\$ZipName"
$UploadName = "azuredatastudio-macos-$Version"
If (-NOT ($Quality -eq "stable")) {
$UploadName = "$UploadName-$Quality"
}
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive "$UploadName.zip" $Version true $Zip $CommitId
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive $ZipName $Version true $Zip $CommitId

View File

@@ -1,13 +0,0 @@
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
PLATFORM_LINUX="linux-x64"
SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
# create docker
mkdir -p $REPO/.build/docker
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
node build/azure-pipelines/common/copyArtifacts.js

View File

@@ -1,96 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '10.15.1'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: ado-secrets
SecretsFilter: 'github-distro-mixin-password'
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login azuredatastudio
password $(github-distro-mixin-password)
EOF
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
env:
GITHUB_TOKEN: $(github-distro-mixin-password)
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- task: DownloadBuildArtifacts@0
inputs:
buildType: 'current'
downloadPath: '$(Build.SourcesDirectory)/.build'
artifactName: drop
itemPattern: |
drop/linux/server/*.tar.gz
- script: |
set -e
for f in $(Build.SourcesDirectory)/.build/drop/linux/server/*.tar.gz
do
tar -C $(agent.builddirectory) -zxvf $f
rm $f
done
displayName: Unzip artifacts
- script: |
set -e
./build/azure-pipelines/docker/createDrop.sh
displayName: Create Drop
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
inputs:
failOnAlert: true

View File

@@ -1,12 +1,8 @@
pool:
vmImage: 'Ubuntu-16.04'
trigger:
branches:
include: ['master']
pr:
branches:
include: ['master']
trigger: none
pr: none
steps:
- task: NodeTool@0
@@ -31,10 +27,10 @@ steps:
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git checkout origin/electron-8.0.x
git checkout origin/electron-6.0.x
git merge origin/master
# Push master branch into exploration branch
git push origin HEAD:electron-8.0.x
git push origin HEAD:electron-6.0.x
displayName: Sync & Merge Exploration

View File

@@ -0,0 +1,39 @@
trigger:
branches:
include: ['master']
pr: none
jobs:
- job: ExplorationMerge
pool:
vmImage: Ubuntu-16.04
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- script: |
set -e
cat << EOF > ~/.netrc
machine mssqltools.visualstudio.com
login azuredatastudio
password $(DEVOPS_PASSWORD)
EOF
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
git remote add explore "$ADS_EXPLORE_REPO"
git fetch explore
git checkout -b merge-branch explore/master
git merge origin/master
git push explore HEAD:master
displayName: Sync & Merge Explore
env:
ADS_EXPLORE_REPO: $(ADS_EXPLORE_REPO)
DEVOPS_PASSWORD: $(DEVOPS_PASSWORD)

View File

@@ -7,80 +7,54 @@ steps:
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-vscode
displayName: Run Strict Null Check
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- script: |
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
# - script: | {{SQL CARBON EDIT}} disable
# DISPLAY=:10 yarn test-browser --browser chromium
# displayName: Run Unit Tests (Browser)
# - script: | {{SQL CARBON EDIT}} disable
displayName: Run Unit Tests
# - script: | {{SQL CARBON EDIT}} remove step
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
# - task: PublishPipelineArtifact@0
# inputs:
# artifactName: crash-dump-linux
# targetPath: .build/crashes
# displayName: 'Publish Crash Reports'
# condition: succeededOrFailed()
# displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:

View File

@@ -11,6 +11,12 @@ BUILD="$ROOT/$BUILDNAME"
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
# create version
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
COMMIT_ID=$(git rev-parse HEAD)
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$REPO/.build/version.json"
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
@@ -23,4 +29,9 @@ SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/azuredatastudio-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
# create docker
mkdir -p $REPO/.build/docker
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
node build/azure-pipelines/common/copyArtifacts.js

View File

@@ -101,13 +101,7 @@ steps:
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 yarn test-browser --build --browser chromium
displayName: Run unit tests (Browser)
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -120,33 +114,10 @@ steps:
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64"
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-x64" \
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
displayName: Run integration tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-linux
targetPath: .build/crashes
displayName: 'Publish Crash Reports'
condition: succeededOrFailed()
- script: |
set -e
yarn gulp "vscode-linux-x64-build-deb"

View File

@@ -28,9 +28,7 @@ rm -rf $ROOT/vscode-server-*.tar.*
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
# Publish hockeyapp symbols
# node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
# Skip hockey app because build failure.
# https://github.com/microsoft/vscode/issues/90491
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
# Publish DEB
PLATFORM_DEB="linux-deb-x64"

View File

@@ -85,6 +85,12 @@ steps:
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
yarn gulp install-sqltoolsservice
yarn gulp install-ssmsmin
displayName: Install extension binaries
- script: |
set -e
yarn gulp vscode-linux-x64-min-ci
@@ -94,6 +100,12 @@ steps:
env:
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
- script: |
set -e
service xvfb start
displayName: Start xvfb
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn gulp package-rebuild-extensions
@@ -101,31 +113,6 @@ steps:
yarn gulp package-external-extensions
displayName: Package External extensions
- script: |
set -e
service xvfb start
displayName: Start xvfb
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests"
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-linux-x64" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
@@ -160,23 +147,24 @@ steps:
./build/azure-pipelines/linux/createDrop.sh
displayName: Create Drop
- task: CopyFiles@2
displayName: 'Copy Extension Unit Test Coverage Files to: $(Build.ArtifactStagingDirectory)'
inputs:
SourceFolder: '$(Build.SourcesDirectory)/extensions'
Contents: '*/coverage/**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/test-results/coverage'
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- task: PublishTestResults@2
displayName: 'Publish Test Results test-results.xml'
inputs:
testResultsFiles: '*.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
testResultsFiles: 'test-results.xml'
searchFolder: '$(Build.SourcesDirectory)'
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- task: PublishCodeCoverageResults@1
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
continueOnError: true
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'

View File

@@ -18,34 +18,19 @@ $Arch = "x64"
$PlatformLinux = "linux-$Arch"
$TarballFilename = "azuredatastudio-linux-$Arch.tar.gz"
$TarballPath = "$artifactsDir\linux\archive\$TarballFilename"
$TarballUploadName = "azuredatastudio-linux-$Version"
If (-NOT ($Quality -eq "stable")) {
$TarballUploadName = "$TarballUploadName-$Quality"
}
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned "$TarballUploadName.tar.gz" $Version true $TarballPath $CommitId
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned $TarballFilename $Version true $TarballPath $CommitId
# Publish DEB
$PlatformDeb = "linux-deb-$Arch"
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
$DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
$DebUploadName = "azuredatastudio-linux-$Version"
If (-NOT ($Quality -eq "stable")) {
$DebUploadName = "$DebUploadName-$Quality"
}
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package "$DebUploadName.deb" $Version true $DebPath $CommitId
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package $DebFilename $Version true $DebPath $CommitId
# Publish RPM
$PlatformRpm = "linux-rpm-$Arch"
$RpmFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\rpm\x86_64\*.rpm)"
$RpmPath = "$artifactsDir\linux\rpm\x86_64\$RpmFilename"
$RpmUploadName = "azuredatastudio-linux-$Version"
If (-NOT ($Quality -eq "stable")) {
$RpmUploadName = "$RpmUploadName-$Quality"
}
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package "$RpmUploadName.rpm" $Version true $RpmPath $CommitId
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package $RpmFilename $Version true $RpmPath $CommitId

View File

@@ -19,7 +19,7 @@
[ "${NETWORKING}" = "no" ] && exit 0
PROG="/usr/bin/Xvfb"
PROG_OPTIONS=":10 -ac -screen 0 1024x768x24"
PROG_OPTIONS=":10 -ac"
PROG_OUTPUT="/tmp/Xvfb.out"
case "$1" in
@@ -50,4 +50,4 @@ case "$1" in
exit 1
esac
exit $RETVAL
exit $RETVAL

View File

@@ -72,29 +72,6 @@ steps:
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn generate-github-config
displayName: Generate GitHub config
condition: succeeded()
env:
OSS_GITHUB_ID: "a5d3c261b032765a78de"
OSS_GITHUB_SECRET: $(oss-github-client-secret)
INSIDERS_GITHUB_ID: "31f02627809389d9f111"
INSIDERS_GITHUB_SECRET: $(insiders-github-client-secret)
STABLE_GITHUB_ID: "baa8a44b5e861d918709"
STABLE_GITHUB_SECRET: $(stable-github-client-secret)
EXPLORATION_GITHUB_ID: "94e8376d3a90429aeaea"
EXPLORATION_GITHUB_SECRET: $(exploration-github-client-secret)
VSO_GITHUB_ID: "3d4be8f37a0325b5817d"
VSO_GITHUB_SECRET: $(vso-github-client-secret)
VSO_PPE_GITHUB_ID: "eabf35024dc2e891a492"
VSO_PPE_GITHUB_SECRET: $(vso-ppe-github-client-secret)
VSO_DEV_GITHUB_ID: "84383ebd8a7c5f5efc5c"
VSO_DEV_GITHUB_SECRET: $(vso-dev-github-client-secret)
GITHUB_APP_ID: "Iv1.ae51e546bef24ff1"
GITHUB_APP_SECRET: $(github-app-client-secret)
- script: |
set -e
yarn postinstall

View File

@@ -63,7 +63,7 @@ steps:
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame master, please open this link, examine changes and create a PR:"
LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
MESSAGE2="[@eamodio, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
MESSAGE2="[@octref, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \

View File

@@ -10,14 +10,10 @@ jobs:
vmImage: 'Ubuntu-16.04'
container: linux-x64
steps:
- script: |
set -e
echo "##vso[build.addbuildtag]$(VSCODE_QUALITY)"
displayName: Add Quality Build Tag
- template: sql-product-compile.yml
- job: macOS
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
pool:
vmImage: macOS-latest
dependsOn:
@@ -27,7 +23,7 @@ jobs:
timeoutInMinutes: 180
- job: Linux
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
container: linux-x64
@@ -35,27 +31,15 @@ jobs:
- Compile
steps:
- template: linux/sql-product-build-linux.yml
timeoutInMinutes: 70
- job: Docker
condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
container: linux-x64
dependsOn:
- Linux
steps:
- template: docker/sql-product-build-docker.yml
- job: Windows
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
pool:
vmImage: VS2017-Win2016
dependsOn:
- Compile
steps:
- template: win32/sql-product-build-win32.yml
timeoutInMinutes: 70
- job: Windows_Test
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
@@ -74,7 +58,6 @@ jobs:
dependsOn:
- macOS
- Linux
- Docker
- Windows
- Windows_Test
steps:

View File

@@ -16,10 +16,12 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -54,25 +56,26 @@ steps:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
@@ -84,11 +87,10 @@ steps:
- script: |
set -e
yarn sqllint
yarn gulp hygiene
yarn strict-vscode
yarn valid-layers-check
displayName: Run hygiene, eslint
displayName: Run hygiene, tslint
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -101,19 +103,6 @@ steps:
displayName: Compile
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
VERSION=$(node -p "require(\"./package.json\").version")
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$BUILD_SOURCEVERSION\" }" > ".build/version.json"
node build/azure-pipelines/common/copyArtifacts.js
displayName: Write Version Information
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'

View File

@@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Microsoft.ESRPClient" version="1.2.25" />
<package id="EsrpClient" version="1.0.27" />
</packages>

View File

@@ -2,83 +2,56 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
- powershell: |
yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: "1"
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
yarn electron
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-vscode
displayName: Run Strict Null Check
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: |
yarn valid-layers-check
displayName: Run Valid Layers Checks
- powershell: |
yarn compile
displayName: Compile Sources
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests (Electron)
# - powershell: | {{SQL CARBON EDIT}} disable
# yarn test-browser --browser chromium --browser firefox
# displayName: Run Unit Tests (Browser)
# - powershell: | {{SQL CARBON EDIT}} disable
displayName: Run Unit Tests
# - powershell: | {{SQL CARBON EDIT}} remove step
# .\scripts\test-integration.bat --tfs "Integration Tests"
# displayName: Run Integration Tests (Electron)
# - task: PublishPipelineArtifact@0
# displayName: 'Publish Crash Reports'
# inputs:
# artifactName: crash-dump-windows
# targetPath: .build\crashes
# condition: succeededOrFailed()
# displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:

View File

@@ -109,14 +109,7 @@ steps:
$ErrorActionPreference = "Stop"
exec { yarn electron $(VSCODE_ARCH) }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
displayName: Run unit tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn test-browser --build --browser chromium --browser firefox }
displayName: Run unit tests (Browser)
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
@@ -129,33 +122,9 @@ steps:
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests (Electron)
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
displayName: Run remote integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
displayName: Run integration tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0
inputs:
artifactName: crash-dump-windows-$(VSCODE_ARCH)
targetPath: .build\crashes
displayName: 'Publish Crash Reports'
condition: succeededOrFailed()
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'

View File

@@ -11,12 +11,13 @@ $SystemExe = "$Repo\.build\win32-$Arch\system-setup\VSCodeSetup.exe"
$UserExe = "$Repo\.build\win32-$Arch\user-setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$Arch\archive\VSCode-win32-$Arch.zip"
$LegacyServer = "$Root\vscode-reh-win32-$Arch"
$Server = "$Root\vscode-server-win32-$Arch"
$ServerName = "vscode-server-win32-$Arch"
$Server = "$Root\$ServerName"
$ServerZip = "$Repo\.build\vscode-server-win32-$Arch.zip"
$Build = "$Root\VSCode-win32-$Arch"
# Create server archive
exec { xcopy $LegacyServer $Server /H /E /I }
exec { Rename-Item -Path $LegacyServer -NewName $ServerName }
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
# get version
@@ -30,8 +31,6 @@ exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform" setup "
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $UserExe }
exec { node build/azure-pipelines/common/createAsset.js "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $ServerZip }
# Skip hockey app because build failure.
# https://github.com/microsoft/vscode/issues/90491
# publish hockeyapp symbols
# $hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
# exec { node build/azure-pipelines/common/symbols.js "$env:VSCODE_MIXIN_PASSWORD" "$env:VSCODE_HOCKEYAPP_TOKEN" "$Arch" $hockeyAppId }
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
exec { node build/azure-pipelines/common/symbols.js "$env:VSCODE_MIXIN_PASSWORD" "$env:VSCODE_HOCKEYAPP_TOKEN" "$Arch" $hockeyAppId }

View File

@@ -67,4 +67,4 @@ $Input = Create-TmpJson @{
$Output = [System.IO.Path]::GetTempFileName()
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
& "$ScriptPath\ESRPClient\packages\Microsoft.ESRPClient.1.2.25\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output
& "$ScriptPath\ESRPClient\packages\EsrpClient.1.0.27\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output

View File

@@ -90,6 +90,12 @@ steps:
exec { node build/azure-pipelines/mixin }
displayName: Mix in quality
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "install-sqltoolsservice" }
displayName: Install sqltoolsservice
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
@@ -97,37 +103,14 @@ steps:
exec { yarn gulp "vscode-win32-x64-min-ci" }
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
exec { yarn gulp "vscode-win32-x64-code-helper" }
exec { yarn gulp "vscode-win32-x64-inno-updater" }
displayName: Build
env:
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
# - powershell: | @anthonydresser unit tests timeout never existing the node process
# . build/azure-pipelines/win32/exec.ps1
# $ErrorActionPreference = "Stop"
# exec { yarn electron x64 }
# exec { .\scripts\test.bat --build --coverage --reporter mocha-junit-reporter --tfs "Unit Tests" }
# displayName: Run unit tests (Electron)
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\azuredatastudio-win32-x64"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\azuredatastudio-reh-win32-x64"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { .\scripts\test-unstable.bat --build --tfs }
exec { .\scripts\test-unstable.bat --build --coverage --reporter mocha-junit-reporter }
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
displayName: Run unstable tests

View File

@@ -19,9 +19,6 @@ steps:
buildType: 'current'
targetPath: '$(Build.SourcesDirectory)\.build'
artifactName: drop
itemPattern: |
drop/extensions/**
drop/win32-x64/**
- powershell: |
. build/azure-pipelines/win32/exec.ps1
@@ -50,7 +47,7 @@ steps:
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
continueOnError: false
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
displayName: Run stable tests
env:

View File

@@ -20,20 +20,10 @@ $Version = $VersionJson.version
$Quality = $VersionJson.quality
$CommitId = $VersionJson.commit
$ZipUploadName = "azuredatastudio-windows-$Version"
$SetupUploadName = "azuredatastudio-windows-setup-$Version"
$UserUploadName = "azuredatastudio-windows-user-setup-$Version"
$assetPlatform = "win32-x64"
If (-NOT ($Quality -eq "stable")) {
$ZipUploadName = "$ZipUploadName-$Quality"
$SetupUploadName = "$SetupUploadName-$Quality"
$UserUploadName = "$UserUploadName-$Quality"
}
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-archive" archive $ZipName $Version true $Zip $CommitId
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-archive" archive "$ZipUploadName.zip" $Version true $Zip $CommitId
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform" setup $ExeName $Version true $SystemExe $CommitId
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform" setup "$SetupUploadName.exe" $Version true $SystemExe $CommitId
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-user" setup "$UserUploadName.exe" $Version true $UserExe $CommitId
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-user" setup $UserExeName $Version true $UserExe $CommitId

View File

@@ -0,0 +1,7 @@
[
{
"name": "Microsoft.sqlservernotebook",
"version": "0.3.4",
"repo": "https://github.com/Microsoft/azuredatastudio"
}
]

View File

@@ -6,10 +6,12 @@
const fs = require('fs');
const path = require('path');
const os = require('os');
// @ts-ignore review
const { remote } = require('electron');
const dialog = remote.dialog;
const builtInExtensionsPath = path.join(__dirname, '..', '..', 'product.json');
const productJsonPath = path.join(__dirname, '..', '..', 'product.json');
const builtInExtensionsPath = path.join(__dirname, '..', 'builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
function readJson(filePath) {
@@ -50,6 +52,7 @@ function render(el, state) {
}
const ul = document.createElement('ul');
const { quality } = readJson(productJsonPath);
const { builtin, control } = state;
for (const ext of builtin) {
@@ -60,6 +63,10 @@ function render(el, state) {
const name = document.createElement('code');
name.textContent = ext.name;
if (quality && ext.forQualities && !ext.forQualities.includes(quality)) {
name.textContent += ` (only on ${ext.forQualities.join(', ')})`;
}
li.appendChild(name);
const form = document.createElement('form');
@@ -110,7 +117,7 @@ function render(el, state) {
function main() {
const el = document.getElementById('extensions');
const builtin = readJson(builtInExtensionsPath).builtInExtensions;
const builtin = readJson(builtInExtensionsPath);
let control;
try {

View File

@@ -16,8 +16,6 @@ const cp = require('child_process');
const compilation = require('./lib/compilation');
const monacoapi = require('./monaco/api');
const fs = require('fs');
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
let root = path.dirname(__dirname);
let sha1 = util.getVersion(root);
@@ -43,7 +41,7 @@ let editorEntryPoints = [
];
let editorResources = [
'out-editor-build/vs/base/browser/ui/codicons/**/*.ttf'
'out-editor-build/vs/base/browser/ui/codiconLabel/**/*.ttf'
];
let BUNDLED_FILE_HEADER = [
@@ -72,8 +70,13 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
apiusages,
extrausages
],
libs: [
`lib.es5.d.ts`,
`lib.dom.d.ts`,
`lib.webworker.importscripts.d.ts`
],
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /(^vs\/css!)/,
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
destRoot: path.join(root, 'out-editor-src'),
redirects: []
});
@@ -126,7 +129,6 @@ const createESMSourcesAndResourcesTask = task.define('extract-editor-esm', () =>
});
const compileEditorESMTask = task.define('compile-editor-esm', () => {
const KEEP_PREV_ANALYSIS = false;
console.log(`Launching the TS compiler at ${path.join(__dirname, '../out-editor-esm')}...`);
let result;
if (process.platform === 'win32') {
@@ -145,45 +147,41 @@ const compileEditorESMTask = task.define('compile-editor-esm', () => {
if (result.status !== 0) {
console.log(`The TS Compilation failed, preparing analysis folder...`);
const destPath = path.join(__dirname, '../../vscode-monaco-editor-esm-analysis');
const keepPrevAnalysis = (KEEP_PREV_ANALYSIS && fs.existsSync(destPath));
const cleanDestPath = (keepPrevAnalysis ? Promise.resolve() : util.rimraf(destPath)());
return cleanDestPath.then(() => {
return util.rimraf(destPath)().then(() => {
fs.mkdirSync(destPath);
// initialize a new repository
cp.spawnSync(`git`, [`init`], {
cwd: destPath
});
// build a list of files to copy
const files = util.rreddir(path.join(__dirname, '../out-editor-esm'));
if (!keepPrevAnalysis) {
fs.mkdirSync(destPath);
// initialize a new repository
cp.spawnSync(`git`, [`init`], {
cwd: destPath
});
// copy files from src
for (const file of files) {
const srcFilePath = path.join(__dirname, '../src', file);
const dstFilePath = path.join(destPath, file);
if (fs.existsSync(srcFilePath)) {
util.ensureDir(path.dirname(dstFilePath));
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
fs.writeFileSync(dstFilePath, contents);
}
// copy files from src
for (const file of files) {
const srcFilePath = path.join(__dirname, '../src', file);
const dstFilePath = path.join(destPath, file);
if (fs.existsSync(srcFilePath)) {
util.ensureDir(path.dirname(dstFilePath));
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
fs.writeFileSync(dstFilePath, contents);
}
// create an initial commit to diff against
cp.spawnSync(`git`, [`add`, `.`], {
cwd: destPath
});
// create the commit
cp.spawnSync(`git`, [`commit`, `-m`, `"original sources"`, `--no-gpg-sign`], {
cwd: destPath
});
}
// copy files from tree shaken src
// create an initial commit to diff against
cp.spawnSync(`git`, [`add`, `.`], {
cwd: destPath
});
// create the commit
cp.spawnSync(`git`, [`commit`, `-m`, `"original sources"`, `--no-gpg-sign`], {
cwd: destPath
});
// copy files from esm
for (const file of files) {
const srcFilePath = path.join(__dirname, '../out-editor-src', file);
const srcFilePath = path.join(__dirname, '../out-editor-esm', file);
const dstFilePath = path.join(destPath, file);
if (fs.existsSync(srcFilePath)) {
util.ensureDir(path.dirname(dstFilePath));
@@ -229,13 +227,8 @@ function toExternalDTS(contents) {
if (line.indexOf('declare namespace monaco.') === 0) {
lines[i] = line.replace('declare namespace monaco.', 'export namespace ');
}
if (line.indexOf('declare let MonacoEnvironment') === 0) {
lines[i] = `declare global {\n let MonacoEnvironment: Environment | undefined;\n}`;
// lines[i] = line.replace('declare namespace monaco.', 'export namespace ');
}
}
return lines.join('\n').replace(/\n\n\n+/g, '\n\n');
return lines.join('\n');
}
function filterStream(testFunc) {
@@ -334,13 +327,6 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
);
});
gulp.task('extract-editor-src',
task.series(
util.rimraf('out-editor-src'),
extractEditorSrcTask
)
);
gulp.task('editor-distro',
task.series(
task.parallel(
@@ -367,49 +353,6 @@ gulp.task('editor-distro',
)
);
const bundleEditorESMTask = task.define('editor-esm-bundle-webpack', () => {
const result = es.through();
const webpackConfigPath = path.join(root, 'build/monaco/monaco.webpack.config.js');
const webpackConfig = {
...require(webpackConfigPath),
...{ mode: 'production' }
};
const webpackDone = (err, stats) => {
if (err) {
result.emit('error', err);
return;
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('data', compilation.warnings.join('\n'));
}
};
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(gulp.dest('out-editor-esm-bundle'));
});
gulp.task('editor-esm-bundle',
task.series(
task.parallel(
util.rimraf('out-editor-src'),
util.rimraf('out-editor-esm'),
util.rimraf('out-monaco-editor-core'),
util.rimraf('out-editor-esm-bundle'),
),
extractEditorSrcTask,
createESMSourcesAndResourcesTask,
compileEditorESMTask,
bundleEditorESMTask,
)
);
gulp.task('monacodts', task.define('monacodts', () => {
const result = monacoapi.execute();
fs.writeFileSync(result.filePath, result.content);
@@ -455,8 +398,10 @@ function createTscCompileTask(watch) {
// e.g. src/vs/base/common/strings.ts(663,5): error TS2322: Type '1234' is not assignable to type 'string'.
let fullpath = path.join(root, match[1]);
let message = match[3];
// @ts-ignore
reporter(fullpath + message);
} else {
// @ts-ignore
reporter(str);
}
}

View File

@@ -33,9 +33,7 @@ const all = [
'scripts/**/*',
'src/**/*',
'test/**/*',
'!test/**/out/**',
'!**/node_modules/**',
'!build/actions/**/*.js' // {{ SQL CARBON EDIT }}
'!**/node_modules/**'
];
const indentationFilter = [
@@ -55,7 +53,8 @@ const indentationFilter = [
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/unit/assert.js',
'!test/assert.js',
'!build/testSetup.js',
// except specific folders
'!test/automation/out/**',
@@ -67,6 +66,7 @@ const indentationFilter = [
// except multiple specific files
'!**/package.json',
'!**/package-lock.json', // {{SQL CARBON EDIT}}
'!**/yarn.lock',
'!**/yarn-error.log',
@@ -84,7 +84,7 @@ const indentationFilter = [
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns}',
'!build/{lib,download}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
@@ -95,22 +95,15 @@ const indentationFilter = [
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
// {{SQL CARBON EDIT}}
'!build/actions/**/*.js',
'!**/*.{xlf,docx,sql,vsix,bacpac,ipynb,jpg}',
'!**/*.{xlf,docx,sql,vsix,bacpac,ipynb}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/integration-tests/testData/**',
'!extensions/arc/src/controller/generated/**',
'!extensions/sql-database-projects/resources/templates/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.xml',
'!extensions/sql-database-projects/src/test/baselines/*.json',
'!extensions/sql-database-projects/src/test/baselines/*.sqlproj',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts',
'!resources/linux/snap/electron-launch'
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts'
];
const copyrightFilter = [
@@ -132,6 +125,7 @@ const copyrightFilter = [
'!**/*.disabled',
'!**/*.code-workspace',
'!**/*.js.map',
'!**/promise-polyfill/polyfill.js',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/linux/snap/electron-launch',
@@ -147,9 +141,9 @@ const copyrightFilter = [
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
'!src/sql/workbench/services/notebook/browser/outputs/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/renderMimeInterfaces.ts',
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/services/notebook/browser/outputs/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/models/mimemodel.ts',
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
@@ -157,11 +151,11 @@ const copyrightFilter = [
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/services/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/services/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/services/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/services/notebook/common/nbformat.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/contrib/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/contrib/notebook/common/models/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
@@ -205,7 +199,14 @@ const tsHygieneFilter = [
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}},
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
'!src/vs/workbench/contrib/extensions/browser/extensionRecommendationsService.ts' // {{SQL CARBON EDIT}} skip this because known issue
'!src/vs/workbench/contrib/extensions/browser/extensionTipsService.ts' // {{SQL CARBON EDIT}} skip this because known issue
];
const sqlHygieneFilter = [ // for rules we want to only apply to our code
'src/sql/**/*.ts',
'!**/node_modules/**',
'extensions/**/*.ts',
'!extensions/{git,search-result,vscode-test-resolver,extension-editing,json-language-features,vscode-colorize-tests}/**/*.ts',
];
const copyrightHeaderLines = [
@@ -375,8 +376,20 @@ function hygiene(some) {
errorCount += results.errorCount;
}));
const sqlJavascript = result
.pipe(filter(sqlHygieneFilter))
.pipe(gulpeslint({
configFile: '.eslintrc.sql.json',
rulePaths: ['./build/lib/eslint']
}))
.pipe(gulpeslint.formatEach('compact'))
.pipe(gulpeslint.results(results => {
errorCount += results.warningCount;
errorCount += results.errorCount;
}));
let count = 0;
return es.merge(typescript, javascript)
return es.merge(typescript, javascript, sqlJavascript)
.pipe(es.through(function (data) {
count++;
if (process.env['TRAVIS'] && count % 10 === 0) {
@@ -407,7 +420,7 @@ function createGitIndexVinyls(paths) {
return e(err);
}
cp.exec(`git show :${relativePath}`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
cp.exec(`git show ":${relativePath}"`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
if (err) {
return e(err);
}

View File

@@ -5,9 +5,13 @@
'use strict';
const gulp = require('gulp');
const util = require('./lib/util');
const tsfmt = require('typescript-formatter');
const es = require('event-stream');
const filter = require('gulp-filter');
const del = require('del');
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platform = require('service-downloader/out/platform').PlatformInformation;
const path = require('path');
const ext = require('./lib/extensions');
const task = require('./lib/task');
@@ -15,6 +19,9 @@ const glob = require('glob');
const vsce = require('vsce');
const mkdirp = require('mkdirp');
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
gulp.task('fmt', () => formatStagedFiles());
const formatFiles = (some) => {
const formatting = es.map(function (file, cb) {
@@ -89,6 +96,45 @@ const formatStagedFiles = () => {
});
};
function installService() {
let config = require('../extensions/mssql/config.json');
return platform.getCurrent().then(p => {
let runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
let installer = new serviceDownloader(config);
let serviceInstallFolder = installer.getInstallDirectory(runtime);
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
return del(serviceInstallFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
});
}
gulp.task('install-sqltoolsservice', () => {
return installService();
});
gulp.task('install-ssmsmin', () => {
const config = require('../extensions/admin-tool-ext-win/config.json');
const runtime = 'Windows_64'; // admin-tool-ext is a windows only extension, and we only ship a 64 bit version, so locking the binaries as such
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
let installer = new serviceDownloader(config);
const serviceInstallFolder = installer.getInstallDirectory(runtime);
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
return del(serviceCleanupFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
});
const root = path.dirname(__dirname);
gulp.task('package-external-extensions', task.series(

View File

@@ -36,8 +36,10 @@ const { compileBuildTask } = require('./gulpfile.compile');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
const nodeModules = [ // {{SQL CARBON EDIT}}
// {{SQL CARBON EDIT}}
const nodeModules = [
'electron',
'original-fs',
'rxjs/Observable',
@@ -76,7 +78,7 @@ const vscodeResources = [
'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/languagePacks.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
'out-build/vs/base/browser/ui/codicons/codicon/**',
'out-build/vs/base/browser/ui/codiconLabel/codicon/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
@@ -121,6 +123,7 @@ const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
out: 'out-vscode',
inlineAmdImages: true,
bundleInfo: undefined
})
));
@@ -186,7 +189,6 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const checksums = computeChecksums(out, [
'vs/workbench/workbench.desktop.main.js',
'vs/workbench/workbench.desktop.main.css',
'vs/workbench/services/extensions/node/extensionHostProcess.js',
'vs/code/electron-browser/workbench/workbench.html',
'vs/code/electron-browser/workbench/workbench.js'
]);
@@ -244,7 +246,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const deps = gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*', '**/*.wasm'], 'app/node_modules.asar'));
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
let all = es.merge(
packageJsonStream,
@@ -273,9 +275,6 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
}
let result = all
.pipe(fileLengthFilter)
.pipe(filelength)
.pipe(fileLengthFilter.restore)
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions())
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
@@ -330,35 +329,11 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
};
}
const fileLengthFilter = filter([
'**',
'!extensions/import/*.docx',
'!extensions/admin-tool-ext-win/license/**'
], {restore: true});
const filelength = es.through(function (file) {
const fileName = path.basename(file.relative);
const fileDir = path.dirname(file.relative);
//check the filename is < 50 characters (basename gets the filename with extension).
if (fileName.length > 50) {
console.error(`File name '${fileName}' under ${fileDir} is too long. Rename file to have less than 50 characters.`);
throw new Error('File name exceeds acceptable length of 50 characters: ' + fileName);
}
if (file.relative.length > 150) {
console.error(`File path ${file.relative} exceeds acceptable file-length. Rename the path to have less than 150 characters.`);
throw new Error('File path exceeds acceptable path-length of 150 characters: ' + file.relative);
}
this.emit('data', file);
});
const buildRoot = path.dirname(root);
const BUILD_TARGETS = [
{ platform: 'win32', arch: 'ia32' },
{ platform: 'win32', arch: 'x64' },
{ platform: 'win32', arch: 'arm64' },
{ platform: 'darwin', arch: null, opts: { stats: true } },
{ platform: 'linux', arch: 'ia32' },
{ platform: 'linux', arch: 'x64' },
@@ -490,30 +465,20 @@ const generateVSCodeConfigurationTask = task.define('generate-vscode-configurati
const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
const appPath = path.join(buildDir, `VSCode-darwin/${appName}/Contents/Resources/app/bin/code`);
const codeProc = cp.exec(
`${appPath} --export-default-configuration='${allConfigDetailsPath}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`,
(err, stdout, stderr) => {
clearTimeout(timer);
if (err) {
console.log(`err: ${err} ${err.message} ${err.toString()}`);
reject(err);
}
const codeProc = cp.exec(`${appPath} --export-default-configuration='${allConfigDetailsPath}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`);
if (stdout) {
console.log(`stdout: ${stdout}`);
}
if (stderr) {
console.log(`stderr: ${stderr}`);
}
resolve();
}
);
const timer = setTimeout(() => {
codeProc.kill();
reject(new Error('export-default-configuration process timed out'));
}, 12 * 1000);
}, 10 * 1000);
codeProc.stdout.on('data', d => console.log(d.toString()));
codeProc.stderr.on('data', d => console.log(d.toString()));
codeProc.on('exit', () => {
clearTimeout(timer);
resolve();
});
codeProc.on('error', err => {
clearTimeout(timer);

View File

@@ -44,7 +44,7 @@ function prepareDebPackage(arch) {
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
@@ -92,7 +92,9 @@ function prepareDebPackage(arch) {
const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ARCHITECTURE@@', debArch))
// @ts-ignore JSON checking: quality is optional
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
// @ts-ignore JSON checking: updateUrl is optional
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst'));
@@ -167,7 +169,9 @@ function prepareRpmPackage(arch) {
.pipe(replace('@@RELEASE@@', linuxPackageRevision))
.pipe(replace('@@ARCHITECTURE@@', rpmArch))
.pipe(replace('@@LICENSE@@', product.licenseName))
// @ts-ignore JSON checking: quality is optional
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
// @ts-ignore JSON checking: updateUrl is optional
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(replace('@@DEPENDENCIES@@', rpmDependencies[rpmArch].join(', ')))
.pipe(rename('SPECS/' + product.applicationName + '.spec'));

View File

@@ -128,7 +128,6 @@ function archiveWin32Setup(arch) {
gulp.task(task.define('vscode-win32-ia32-archive', task.series(util.rimraf(zipDir('ia32')), archiveWin32Setup('ia32'))));
gulp.task(task.define('vscode-win32-x64-archive', task.series(util.rimraf(zipDir('x64')), archiveWin32Setup('x64'))));
gulp.task(task.define('vscode-win32-arm64-archive', task.series(util.rimraf(zipDir('arm64')), archiveWin32Setup('arm64'))));
function copyInnoUpdater(arch) {
return () => {
@@ -151,4 +150,3 @@ gulp.task(task.define('vscode-win32-x64-inno-updater', task.series(copyInnoUpdat
gulp.task(task.define('vscode-win32-ia32-code-helper', task.series(updateIcon(path.join(buildPath('ia32'), 'resources', 'app', 'out', 'vs', 'platform', 'files', 'node', 'watcher', 'win32', 'CodeHelper.exe')))));
gulp.task(task.define('vscode-win32-x64-code-helper', task.series(updateIcon(path.join(buildPath('x64'), 'resources', 'app', 'out', 'vs', 'platform', 'files', 'node', 'watcher', 'win32', 'CodeHelper.exe')))));
gulp.task(task.define('vscode-win32-arm64-code-helper', task.series(updateIcon(path.join(buildPath('arm64'), 'resources', 'app', 'out', 'vs', 'platform', 'files', 'node', 'watcher', 'win32', 'CodeHelper.exe')))));

View File

@@ -4,7 +4,6 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.createAsar = void 0;
const path = require("path");
const es = require("event-stream");
const pickle = require('chromium-pickle-js');

View File

@@ -18,7 +18,7 @@ const fancyLog = require('fancy-log');
const ansiColors = require('ansi-colors');
const root = path.dirname(path.dirname(__dirname));
const builtInExtensions = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8')).builtInExtensions;
const builtInExtensions = require('../builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];

View File

@@ -4,7 +4,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.bundle = void 0;
const fs = require("fs");
const path = require("path");
const vm = require("vm");

View File

@@ -4,7 +4,6 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.watchTask = exports.compileTask = void 0;
const es = require("event-stream");
const fs = require("fs");
const gulp = require("gulp");

View File

@@ -4,7 +4,6 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.config = exports.getElectronVersion = void 0;
const fs = require("fs");
const path = require("path");
const vfs = require("vinyl-fs");

View File

@@ -4,7 +4,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.createImportRuleListener = void 0;
function createImportRuleListener(validateImport) {
function _checkImport(node) {
if (node && node.type === 'Literal' && typeof node.value === 'string') {

View File

@@ -1,35 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
module.exports = new class ApiLiteralOrTypes {
constructor() {
this.meta = {
docs: { url: 'https://github.com/microsoft/vscode/wiki/Extension-API-guidelines#creating-objects' },
messages: { sync: '`createXYZ`-functions are constructor-replacements and therefore must return sync', }
};
}
create(context) {
return {
['TSDeclareFunction Identifier[name=/create.*/]']: (node) => {
var _a;
const decl = node.parent;
if (((_a = decl.returnType) === null || _a === void 0 ? void 0 : _a.typeAnnotation.type) !== experimental_utils_1.AST_NODE_TYPES.TSTypeReference) {
return;
}
if (decl.returnType.typeAnnotation.typeName.type !== experimental_utils_1.AST_NODE_TYPES.Identifier) {
return;
}
const ident = decl.returnType.typeAnnotation.typeName.name;
if (ident === 'Promise' || ident === 'Thenable') {
context.report({
node,
messageId: 'sync'
});
}
}
};
}
};

Some files were not shown because too many files have changed in this diff Show More