mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-01-13 17:22:15 -05:00
Merge from vscode bead496a613e475819f89f08e9e882b841bc1fe8 (#14883)
* Merge from vscode bead496a613e475819f89f08e9e882b841bc1fe8 * Bump distro * Upgrade GCC to 4.9 due to yarn install errors * Update build image * Fix bootstrap base url * Bump distro * Fix build errors * Update source map file * Disable checkbox for blocking migration issues (#15131) * disable checkbox for blocking issues * wip * disable checkbox fixes * fix strings * Remove duplicate tsec command * Default to off for tab color if settings not present * re-skip failing tests * Fix mocha error * Bump sqlite version & fix notebooks search view * Turn off esbuild warnings * Update esbuild log level * Fix overflowactionbar tests * Fix ts-ignore in dropdown tests * cleanup/fixes * Fix hygiene * Bundle in entire zone.js module * Remove extra constructor param * bump distro for web compile break * bump distro for web compile break v2 * Undo log level change * New distro * Fix integration test scripts * remove the "no yarn.lock changes" workflow * fix scripts v2 * Update unit test scripts * Ensure ads-kerberos2 updates in .vscodeignore * Try fix unit tests * Upload crash reports * remove nogpu * always upload crashes * Use bash script * Consolidate data/ext dir names * Create in tmp directory Co-authored-by: chlafreniere <hichise@gmail.com> Co-authored-by: Christopher Suh <chsuh@microsoft.com> Co-authored-by: chgagnon <chgagnon@microsoft.com>
This commit is contained in:
@@ -14,3 +14,4 @@
|
||||
**/extensions/**/build/**
|
||||
**/extensions/markdown-language-features/media/**
|
||||
**/extensions/typescript-basics/test/colorize-fixtures/**
|
||||
**/extensions/**/dist/**
|
||||
|
||||
@@ -939,6 +939,13 @@
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/test/monaco/**",
|
||||
"restrictions": [
|
||||
"**/test/monaco/**",
|
||||
"*" // node modules
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/api/**.test.ts",
|
||||
"restrictions": [
|
||||
@@ -946,9 +953,9 @@
|
||||
"assert",
|
||||
"sinon",
|
||||
"crypto",
|
||||
"vscode",
|
||||
"typemoq",
|
||||
"azdata"
|
||||
"vscode",
|
||||
"typemoq",
|
||||
"azdata"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
35
.github/workflows/build-chat.yml
vendored
Normal file
35
.github/workflows/build-chat.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: "Build Chat"
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- master
|
||||
- release/*
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "microsoft/vscode-github-triage-actions"
|
||||
path: ./actions
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Install Additional Dependencies
|
||||
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
||||
run: npm install @azure/storage-blob@12.1.1
|
||||
- name: Build Chat
|
||||
uses: ./actions/build-chat
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
slack_token: ${{ secrets.SLACK_TOKEN }}
|
||||
storage_connection_string: ${{ secrets.BUILD_CHAT_STORAGE_CONNECTION_STRING }}
|
||||
workflow_run_url: ${{ github.event.workflow_run.url }}
|
||||
notification_channel: build
|
||||
log_channel: bot-log
|
||||
2
.github/workflows/deep-classifier-runner.yml
vendored
2
.github/workflows/deep-classifier-runner.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "microsoft/vscode-github-triage-actions"
|
||||
ref: v40
|
||||
ref: v42
|
||||
path: ./actions
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
|
||||
@@ -11,7 +11,7 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: "microsoft/vscode-github-triage-actions"
|
||||
ref: v40
|
||||
ref: v42
|
||||
path: ./actions
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
|
||||
2
.github/workflows/latest-release-monitor.yml
vendored
2
.github/workflows/latest-release-monitor.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
with:
|
||||
repository: "microsoft/vscode-github-triage-actions"
|
||||
path: ./actions
|
||||
ref: v40
|
||||
ref: v42
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Install Storage Module
|
||||
|
||||
22
.gitignore
vendored
22
.gitignore
vendored
@@ -5,26 +5,8 @@ Thumbs.db
|
||||
node_modules/
|
||||
.build/
|
||||
extensions/**/dist/
|
||||
out/
|
||||
out-build/
|
||||
out-editor/
|
||||
out-editor-src/
|
||||
out-editor-build/
|
||||
out-editor-esm/
|
||||
out-editor-esm-bundle/
|
||||
out-editor-min/
|
||||
out-monaco-editor-core/
|
||||
out-vscode/
|
||||
out-vscode-min/
|
||||
out-vscode-reh/
|
||||
out-vscode-reh-min/
|
||||
out-vscode-reh-pkg/
|
||||
out-vscode-reh-web/
|
||||
out-vscode-reh-web-min/
|
||||
out-vscode-reh-web-pkg/
|
||||
out-vscode-web/
|
||||
out-vscode-web-min/
|
||||
out-vscode-web-pkg/
|
||||
/out*/
|
||||
/extensions/**/out/
|
||||
src/vs/server
|
||||
resources/server
|
||||
build/node_modules
|
||||
|
||||
3
.vscode/extensions.json
vendored
3
.vscode/extensions.json
vendored
@@ -3,7 +3,6 @@
|
||||
// for the documentation about the extensions.json format
|
||||
"recommendations": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"msjsdiag.debugger-for-chrome"
|
||||
"EditorConfig.EditorConfig"
|
||||
]
|
||||
}
|
||||
|
||||
3
.vscode/launch.json
vendored
3
.vscode/launch.json
vendored
@@ -105,8 +105,7 @@
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
],
|
||||
"browserLaunchLocation": "workspace",
|
||||
"preLaunchTask": "Ensure Prelaunch Dependencies",
|
||||
"browserLaunchLocation": "workspace"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
|
||||
2
.vscode/notebooks/api.github-issues
vendored
2
.vscode/notebooks/api.github-issues
vendored
@@ -8,7 +8,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"November 2020\"",
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"February 2021\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
28
.vscode/notebooks/endgame.github-issues
vendored
28
.vscode/notebooks/endgame.github-issues
vendored
@@ -8,8 +8,8 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"November 2020\"",
|
||||
"editable": false
|
||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper\n\n$MILESTONE=milestone:\"January 2021\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -80,7 +80,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request label:verification-needed",
|
||||
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request label:verification-needed -label:verified",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
@@ -89,10 +89,28 @@
|
||||
"value": "# Verification",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Verifiable Fixes",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
|
||||
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified -label:unreleased",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Unreleased Fixes",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified label:unreleased",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
@@ -104,7 +122,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE label:candidate",
|
||||
"value": "$REPOS $MILESTONE is:open label:candidate",
|
||||
"editable": true
|
||||
}
|
||||
]
|
||||
@@ -176,17 +176,20 @@
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "# vscode-pull-request-github"
|
||||
"value": "# vscode-pull-request-github",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-pull-request-github is:issue closed:>$since"
|
||||
"value": "repo:microsoft/vscode-pull-request-github is:issue closed:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode-test is:issue created:>$since"
|
||||
"value": "repo:microsoft/vscode-test is:issue created:>$since",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
|
||||
26
.vscode/notebooks/my-endgame.github-issues
vendored
26
.vscode/notebooks/my-endgame.github-issues
vendored
@@ -8,8 +8,8 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"November 2020\"\n\n$MINE=assignee:@me",
|
||||
"editable": false
|
||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"January 2021\"\n\n$MINE=assignee:@me",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
@@ -122,7 +122,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE $MINE is:issue is:open",
|
||||
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:endgame-plan",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
@@ -152,7 +152,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:bug label:verification-steps-needed",
|
||||
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-steps-needed",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
@@ -164,7 +164,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:bug label:verification-found",
|
||||
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-found",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
@@ -176,7 +176,19 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Issues filed from outside team",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
@@ -188,7 +200,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
|
||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
20
.vscode/notebooks/my-work.github-issues
vendored
20
.vscode/notebooks/my-work.github-issues
vendored
@@ -8,7 +8,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"November 2020\"",
|
||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"February 2021\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
@@ -83,6 +83,24 @@
|
||||
"value": "$repos assignee:@me is:open milestone:\"Backlog Candidates\"",
|
||||
"editable": false
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "### Personal Inbox\n",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "\n#### Missing Type label",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repos assignee:@me is:open type:issue -label:bug -label:\"needs more info\" -label:feature-request -label:under-discussion -label:debt -label:plan-item -label:upstream",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
|
||||
44
.vscode/notebooks/papercuts.github-issues
vendored
Normal file
44
.vscode/notebooks/papercuts.github-issues
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
[
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Papercuts\n\nThis notebook serves as an ongoing collection of papercut issues that we encounter while dogfooding. With that in mind only promote issues that really turn you off, e.g. issues that make you want to stop using VS Code or its extensions. To mark an issue (bug, feature-request, etc.) as papercut add the labels: `papercut :drop_of_blood:`",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## All Papercuts\n\nThese are all papercut issues that we encounter while dogfooding vscode or extensions that we author.",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode is:open -label:notebook label:\"papercut :drop_of_blood:\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Native Notebook",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode is:open label:notebook label:\"papercut :drop_of_blood:\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "### My Papercuts",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "repo:microsoft/vscode is:open assignee:@me label:\"papercut :drop_of_blood:\"",
|
||||
"editable": true
|
||||
}
|
||||
]
|
||||
2
.vscode/notebooks/verification.github-issues
vendored
2
.vscode/notebooks/verification.github-issues
vendored
@@ -14,7 +14,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"October 2020\"",
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"January 2021\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
8
.vscode/tasks.json
vendored
8
.vscode/tasks.json
vendored
@@ -62,7 +62,8 @@
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
}
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
@@ -90,7 +91,8 @@
|
||||
"Kill Build VS Code Core",
|
||||
"Kill Build VS Code Extensions"
|
||||
],
|
||||
"group": "build"
|
||||
"group": "build",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
@@ -217,7 +219,7 @@
|
||||
"base": "$tsc",
|
||||
"applyTo": "allDocuments",
|
||||
"owner": "tsec"
|
||||
},
|
||||
}
|
||||
],
|
||||
"group": "build",
|
||||
"label": "npm: tsec-compile-check",
|
||||
|
||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://electronjs.org/headers"
|
||||
target "9.4.3"
|
||||
target "11.2.2"
|
||||
runtime "electron"
|
||||
|
||||
@@ -20,3 +20,8 @@ jobs:
|
||||
vmImage: macOS-latest
|
||||
steps:
|
||||
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
exclude:
|
||||
- electron-11.x.y
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
# cleanup rules for native node modules, .gitignore style
|
||||
# cleanup rules for node modules, .gitignore style
|
||||
|
||||
# native node modules
|
||||
|
||||
nan/**
|
||||
*/node_modules/nan/**
|
||||
@@ -46,6 +48,7 @@ spdlog/build/**
|
||||
spdlog/deps/**
|
||||
spdlog/src/**
|
||||
spdlog/test/**
|
||||
spdlog/*.yml
|
||||
!spdlog/build/Release/*.node
|
||||
|
||||
jschardet/dist/**
|
||||
@@ -72,6 +75,7 @@ node-pty/build/**
|
||||
node-pty/src/**
|
||||
node-pty/tools/**
|
||||
node-pty/deps/**
|
||||
node-pty/scripts/**
|
||||
!node-pty/build/Release/*.exe
|
||||
!node-pty/build/Release/*.dll
|
||||
!node-pty/build/Release/*.node
|
||||
@@ -117,8 +121,55 @@ vsda/README.md
|
||||
vsda/targets
|
||||
!vsda/build/Release/vsda.node
|
||||
|
||||
vscode-encrypt/build/**
|
||||
vscode-encrypt/src/**
|
||||
vscode-encrypt/vendor/**
|
||||
vscode-encrypt/.gitignore
|
||||
vscode-encrypt/binding.gyp
|
||||
vscode-encrypt/README.md
|
||||
!vscode-encrypt/build/Release/vscode-encrypt-native.node
|
||||
|
||||
vscode-windows-ca-certs/**/*
|
||||
!vscode-windows-ca-certs/package.json
|
||||
!vscode-windows-ca-certs/**/*.node
|
||||
|
||||
node-addon-api/**/*
|
||||
|
||||
# other node modules
|
||||
|
||||
**/docs/**
|
||||
**/example/**
|
||||
**/examples/**
|
||||
**/test/**
|
||||
**/tests/**
|
||||
|
||||
**/History.md
|
||||
**/CHANGELOG.md
|
||||
**/README.md
|
||||
**/readme.md
|
||||
**/readme.markdown
|
||||
|
||||
**/*.ts
|
||||
!typescript/**/*.d.ts
|
||||
|
||||
jschardet/dist/**
|
||||
|
||||
es6-promise/lib/**
|
||||
|
||||
vscode-textmate/webpack.config.js
|
||||
|
||||
# {{SQL CARBON EDIT }} We need more than just zone-node.js
|
||||
# zone.js/dist/**
|
||||
# !zone.js/dist/zone-node.js
|
||||
|
||||
# https://github.com/xtermjs/xterm.js/issues/3137
|
||||
xterm/src/**
|
||||
xterm/tsconfig.all.json
|
||||
|
||||
# https://github.com/xtermjs/xterm.js/issues/3138
|
||||
xterm-addon-*/src/**
|
||||
xterm-addon-*/fixtures/**
|
||||
xterm-addon-*/out/**
|
||||
xterm-addon-*/out-test/**
|
||||
|
||||
|
||||
|
||||
2
build/azure-pipelines/common/.gitignore
vendored
2
build/azure-pipelines/common/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
node_modules/
|
||||
*.js
|
||||
25
build/azure-pipelines/common/computeNodeModulesCacheKey.js
Normal file
25
build/azure-pipelines/common/computeNodeModulesCacheKey.js
Normal file
@@ -0,0 +1,25 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const crypto = require("crypto");
|
||||
const { dirs } = require('../../npm/dirs');
|
||||
const ROOT = path.join(__dirname, '../../../');
|
||||
const shasum = crypto.createHash('sha1');
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
|
||||
// Add `yarn.lock` files
|
||||
for (let dir of dirs) {
|
||||
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
|
||||
shasum.update(fs.readFileSync(yarnLockPath));
|
||||
}
|
||||
// Add any other command line arguments
|
||||
for (let i = 2; i < process.argv.length; i++) {
|
||||
shasum.update(process.argv[i]);
|
||||
}
|
||||
process.stdout.write(shasum.digest('hex'));
|
||||
32
build/azure-pipelines/common/computeNodeModulesCacheKey.ts
Normal file
32
build/azure-pipelines/common/computeNodeModulesCacheKey.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as crypto from 'crypto';
|
||||
const { dirs } = require('../../npm/dirs');
|
||||
|
||||
const ROOT = path.join(__dirname, '../../../');
|
||||
|
||||
const shasum = crypto.createHash('sha1');
|
||||
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
|
||||
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
|
||||
|
||||
// Add `yarn.lock` files
|
||||
for (let dir of dirs) {
|
||||
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
|
||||
shasum.update(fs.readFileSync(yarnLockPath));
|
||||
}
|
||||
|
||||
// Add any other command line arguments
|
||||
for (let i = 2; i < process.argv.length; i++) {
|
||||
shasum.update(process.argv[i]);
|
||||
}
|
||||
|
||||
process.stdout.write(shasum.digest('hex'));
|
||||
40
build/azure-pipelines/common/copyArtifacts.js
Normal file
40
build/azure-pipelines/common/copyArtifacts.js
Normal file
@@ -0,0 +1,40 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const vfs = require("vinyl-fs");
|
||||
const path = require("path");
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const files = [
|
||||
'.build/extensions/**/*.vsix',
|
||||
'.build/win32-x64/**/*.{exe,zip}',
|
||||
'.build/linux/sha256hashes.txt',
|
||||
'.build/linux/deb/amd64/deb/*.deb',
|
||||
'.build/linux/rpm/x86_64/*.rpm',
|
||||
'.build/linux/server/*',
|
||||
'.build/linux/archive/*',
|
||||
'.build/docker/*',
|
||||
'.build/darwin/*',
|
||||
'.build/version.json' // version information
|
||||
];
|
||||
async function main() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
|
||||
.pipe(es.through(file => {
|
||||
const filePath = path.join(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY,
|
||||
//Preserve intermediate directories after .build folder
|
||||
file.path.substr(path.resolve('.build').length + 1));
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.renameSync(file.path, filePath);
|
||||
}));
|
||||
stream.on('end', () => resolve());
|
||||
stream.on('error', e => reject(e));
|
||||
});
|
||||
}
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
94
build/azure-pipelines/common/createAsset.js
Normal file
94
build/azure-pipelines/common/createAsset.js
Normal file
@@ -0,0 +1,94 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const crypto = require("crypto");
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
if (process.argv.length !== 6) {
|
||||
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
|
||||
process.exit(-1);
|
||||
}
|
||||
function hashStream(hashName, stream) {
|
||||
return new Promise((c, e) => {
|
||||
const shasum = crypto.createHash(hashName);
|
||||
stream
|
||||
.on('data', shasum.update.bind(shasum))
|
||||
.on('error', e)
|
||||
.on('close', () => c(shasum.digest('hex')));
|
||||
});
|
||||
}
|
||||
async function doesAssetExist(blobService, quality, blobName) {
|
||||
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
async function uploadBlob(blobService, quality, blobName, filePath, fileName) {
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(filePath),
|
||||
contentDisposition: `attachment; filename="${fileName}"`,
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
function getEnv(name) {
|
||||
const result = process.env[name];
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function main() {
|
||||
const [, , platform, type, fileName, filePath] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
console.log('Creating asset...');
|
||||
const stat = await new Promise((c, e) => fs.stat(filePath, (err, stat) => err ? e(err) : c(stat)));
|
||||
const size = stat.size;
|
||||
console.log('Size:', size);
|
||||
const stream = fs.createReadStream(filePath);
|
||||
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
|
||||
console.log('SHA1:', sha1hash);
|
||||
console.log('SHA256:', sha256hash);
|
||||
const blobName = commit + '/' + fileName;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
await uploadBlob(blobService, quality, blobName, filePath, fileName);
|
||||
console.log('Blobs successfully uploaded.');
|
||||
const asset = {
|
||||
platform,
|
||||
type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Asset successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -11,6 +11,7 @@ import * as crypto from 'crypto';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
import { retry } from './retry';
|
||||
|
||||
interface Asset {
|
||||
platform: string;
|
||||
@@ -121,7 +122,7 @@ async function main(): Promise<void> {
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('createAsset').execute('', [commit, asset, true]);
|
||||
await retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
|
||||
51
build/azure-pipelines/common/createBuild.js
Normal file
51
build/azure-pipelines/common/createBuild.js
Normal file
@@ -0,0 +1,51 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
if (process.argv.length !== 3) {
|
||||
console.error('Usage: node createBuild.js VERSION');
|
||||
process.exit(-1);
|
||||
}
|
||||
function getEnv(name) {
|
||||
const result = process.env[name];
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function main() {
|
||||
const [, , _version] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const queuedBy = getEnv('BUILD_QUEUEDBY');
|
||||
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
|
||||
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
|
||||
console.log('Creating build...');
|
||||
console.log('Quality:', quality);
|
||||
console.log('Version:', version);
|
||||
console.log('Commit:', commit);
|
||||
const build = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: false,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Build successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -6,6 +6,7 @@
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
import { retry } from './retry';
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
console.error('Usage: node createBuild.js VERSION');
|
||||
@@ -48,7 +49,7 @@ async function main(): Promise<void> {
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]);
|
||||
await retry(() => scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]));
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
|
||||
@@ -10,8 +10,8 @@ git clone --depth 1 https://github.com/Microsoft/vscode-node-debug2.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-html-languageservice.git
|
||||
git clone --depth 1 https://github.com/Microsoft/vscode-json-languageservice.git
|
||||
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
|
||||
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
|
||||
node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
|
||||
node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
|
||||
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
|
||||
mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json
|
||||
mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json
|
||||
|
||||
14
build/azure-pipelines/common/installPlaywright.js
Normal file
14
build/azure-pipelines/common/installPlaywright.js
Normal file
@@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const retry_1 = require("./retry");
|
||||
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
|
||||
const playwrightPath = path.dirname(require.resolve('playwright'));
|
||||
async function install() {
|
||||
await retry_1.retry(() => installBrowsersWithProgressBar(playwrightPath));
|
||||
}
|
||||
install();
|
||||
40
build/azure-pipelines/common/listNodeModules.js
Normal file
40
build/azure-pipelines/common/listNodeModules.js
Normal file
@@ -0,0 +1,40 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
if (process.argv.length !== 3) {
|
||||
console.error('Usage: node listNodeModules.js OUTPUT_FILE');
|
||||
process.exit(-1);
|
||||
}
|
||||
const ROOT = path.join(__dirname, '../../../');
|
||||
function findNodeModulesFiles(location, inNodeModules, result) {
|
||||
const entries = fs.readdirSync(path.join(ROOT, location));
|
||||
for (const entry of entries) {
|
||||
const entryPath = `${location}/${entry}`;
|
||||
if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) {
|
||||
continue;
|
||||
}
|
||||
let stat;
|
||||
try {
|
||||
stat = fs.statSync(path.join(ROOT, entryPath));
|
||||
}
|
||||
catch (err) {
|
||||
continue;
|
||||
}
|
||||
if (stat.isDirectory()) {
|
||||
findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result);
|
||||
}
|
||||
else {
|
||||
if (inNodeModules) {
|
||||
result.push(entryPath.substr(1));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const result = [];
|
||||
findNodeModulesFiles('', false, result);
|
||||
fs.writeFileSync(process.argv[2], result.join('\n') + '\n');
|
||||
71
build/azure-pipelines/common/publish-webview.js
Normal file
71
build/azure-pipelines/common/publish-webview.js
Normal file
@@ -0,0 +1,71 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const minimist = require("minimist");
|
||||
const path_1 = require("path");
|
||||
const fileNames = [
|
||||
'fake.html',
|
||||
'host.js',
|
||||
'index.html',
|
||||
'main.js',
|
||||
'service-worker.js'
|
||||
];
|
||||
async function assertContainer(blobService, container) {
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
async function doesBlobExist(blobService, container, blobName) {
|
||||
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
async function uploadBlob(blobService, container, blobName, file) {
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
async function publish(commit, files) {
|
||||
console.log('Publishing...');
|
||||
console.log('Commit:', commit);
|
||||
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT'];
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
await assertContainer(blobService, commit);
|
||||
for (const file of files) {
|
||||
const blobName = path_1.basename(file);
|
||||
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
||||
continue;
|
||||
}
|
||||
console.log('Uploading blob to Azure storage...');
|
||||
await uploadBlob(blobService, commit, blobName, file);
|
||||
}
|
||||
console.log('Blobs successfully uploaded.');
|
||||
}
|
||||
function main() {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
if (!commit) {
|
||||
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
const opts = minimist(process.argv.slice(2));
|
||||
const [directory] = opts._;
|
||||
const files = fileNames.map(fileName => path_1.join(directory, fileName));
|
||||
publish(commit, files).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
if (process.argv.length < 3) {
|
||||
console.error('Usage: node publish.js <directory>');
|
||||
process.exit(-1);
|
||||
}
|
||||
main();
|
||||
224
build/azure-pipelines/common/publish.js
Normal file
224
build/azure-pipelines/common/publish.js
Normal file
@@ -0,0 +1,224 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const crypto = require("crypto");
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const minimist = require("minimist");
|
||||
const documentdb_1 = require("documentdb");
|
||||
// {{SQL CARBON EDIT}}
|
||||
if (process.argv.length < 9) {
|
||||
console.error('Usage: node publish.js <product_quality> <platform> <file_type> <file_name> <version> <is_update> <file> [commit_id]');
|
||||
process.exit(-1);
|
||||
}
|
||||
function hashStream(hashName, stream) {
|
||||
return new Promise((c, e) => {
|
||||
const shasum = crypto.createHash(hashName);
|
||||
stream
|
||||
.on('data', shasum.update.bind(shasum))
|
||||
.on('error', e)
|
||||
.on('close', () => c(shasum.digest('hex')));
|
||||
});
|
||||
}
|
||||
function createDefaultConfig(quality) {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
function getConfig(quality) {
|
||||
console.log(`Getting config for quality ${quality}`);
|
||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
return retry(() => new Promise((c, e) => {
|
||||
client.queryDocuments(collection, query, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
||||
if (err && err.code !== 409) {
|
||||
return e(err);
|
||||
}
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
||||
});
|
||||
}));
|
||||
}
|
||||
function createOrUpdate(commit, quality, platform, type, release, asset, isUpdate) {
|
||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const updateQuery = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
let updateTries = 0;
|
||||
function update() {
|
||||
updateTries++;
|
||||
return new Promise((c, e) => {
|
||||
console.log(`Querying existing documents to update...`);
|
||||
client.queryDocuments(collection, updateQuery, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
||||
if (err) {
|
||||
return e(err);
|
||||
}
|
||||
if (results.length !== 1) {
|
||||
return e(new Error('No documents'));
|
||||
}
|
||||
const release = results[0];
|
||||
release.assets = [
|
||||
...release.assets.filter((a) => !(a.platform === platform && a.type === type)),
|
||||
asset
|
||||
];
|
||||
if (isUpdate) {
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
console.log(`Replacing existing document with updated version`);
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) {
|
||||
return c(update());
|
||||
}
|
||||
if (err) {
|
||||
return e(err);
|
||||
}
|
||||
console.log('Build successfully updated.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
return retry(() => new Promise((c, e) => {
|
||||
console.log(`Attempting to create document`);
|
||||
client.createDocument(collection, release, err => {
|
||||
if (err && err.code === 409) {
|
||||
return c(update());
|
||||
}
|
||||
if (err) {
|
||||
return e(err);
|
||||
}
|
||||
console.log('Build successfully published.');
|
||||
c();
|
||||
});
|
||||
}));
|
||||
}
|
||||
async function assertContainer(blobService, quality) {
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
async function doesAssetExist(blobService, quality, blobName) {
|
||||
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
async function uploadBlob(blobService, quality, blobName, file) {
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
async function publish(commit, quality, platform, type, name, version, _isUpdate, file, opts) {
|
||||
const isUpdate = _isUpdate === 'true';
|
||||
const queuedBy = process.env['BUILD_QUEUEDBY'];
|
||||
const sourceBranch = process.env['BUILD_SOURCEBRANCH'];
|
||||
console.log('Publishing...');
|
||||
console.log('Quality:', quality);
|
||||
console.log('Platform:', platform);
|
||||
console.log('Type:', type);
|
||||
console.log('Name:', name);
|
||||
console.log('Version:', version);
|
||||
console.log('Commit:', commit);
|
||||
console.log('Is Update:', isUpdate);
|
||||
console.log('File:', file);
|
||||
const stat = await new Promise((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
|
||||
const size = stat.size;
|
||||
console.log('Size:', size);
|
||||
const stream = fs.createReadStream(file);
|
||||
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
|
||||
console.log('SHA1:', sha1hash);
|
||||
console.log('SHA256:', sha256hash);
|
||||
const blobName = commit + '/' + name;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
await assertContainer(blobService, quality);
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
await uploadBlob(blobService, quality, blobName, file);
|
||||
console.log('Blobs successfully uploaded.');
|
||||
const config = await getConfig(quality);
|
||||
console.log('Quality config:', config);
|
||||
const asset = {
|
||||
platform: platform,
|
||||
type: type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Insiders: nightly build from main
|
||||
const isReleased = (((quality === 'insider' && /^main$|^refs\/heads\/main$/.test(sourceBranch)) ||
|
||||
(quality === 'rc1' && /^release\/|^refs\/heads\/release\//.test(sourceBranch))) &&
|
||||
/Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy));
|
||||
const release = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: isReleased,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
if (!opts['upload-only']) {
|
||||
release.assets.push(asset);
|
||||
if (isUpdate) {
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
}
|
||||
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
||||
}
|
||||
const RETRY_TIMES = 10;
|
||||
async function retry(fn) {
|
||||
for (let run = 1; run <= RETRY_TIMES; run++) {
|
||||
try {
|
||||
return await fn();
|
||||
}
|
||||
catch (err) {
|
||||
if (!/ECONNRESET/.test(err.message)) {
|
||||
throw err;
|
||||
}
|
||||
console.log(`Caught error ${err} - ${run}/${RETRY_TIMES}`);
|
||||
}
|
||||
}
|
||||
throw new Error('Retried too many times');
|
||||
}
|
||||
function main() {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
if (!commit) {
|
||||
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
const opts = minimist(process.argv.slice(2), {
|
||||
boolean: ['upload-only']
|
||||
});
|
||||
const [quality, platform, type, name, version, _isUpdate, file] = opts._;
|
||||
publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
main();
|
||||
91
build/azure-pipelines/common/release.js
Normal file
91
build/azure-pipelines/common/release.js
Normal file
@@ -0,0 +1,91 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const documentdb_1 = require("documentdb");
|
||||
function createDefaultConfig(quality) {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
function getConfig(quality) {
|
||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
return new Promise((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err && err.code !== 409) {
|
||||
return e(err);
|
||||
}
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
||||
});
|
||||
});
|
||||
}
|
||||
function doRelease(commit, quality) {
|
||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const query = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
let updateTries = 0;
|
||||
function update() {
|
||||
updateTries++;
|
||||
return new Promise((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err) {
|
||||
return e(err);
|
||||
}
|
||||
if (results.length !== 1) {
|
||||
return e(new Error('No documents'));
|
||||
}
|
||||
const release = results[0];
|
||||
release.isReleased = true;
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) {
|
||||
return c(update());
|
||||
}
|
||||
if (err) {
|
||||
return e(err);
|
||||
}
|
||||
console.log('Build successfully updated.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
return update();
|
||||
}
|
||||
async function release(commit, quality) {
|
||||
const config = await getConfig(quality);
|
||||
console.log('Quality config:', config);
|
||||
if (config.frozen) {
|
||||
console.log(`Skipping release because quality ${quality} is frozen.`);
|
||||
return;
|
||||
}
|
||||
await doRelease(commit, quality);
|
||||
}
|
||||
function env(name) {
|
||||
const result = process.env[name];
|
||||
if (!result) {
|
||||
throw new Error(`Skipping release due to missing env: ${name}`);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function main() {
|
||||
const commit = env('BUILD_SOURCEVERSION');
|
||||
const quality = env('VSCODE_QUALITY');
|
||||
await release(commit, quality);
|
||||
}
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
50
build/azure-pipelines/common/releaseBuild.js
Normal file
50
build/azure-pipelines/common/releaseBuild.js
Normal file
@@ -0,0 +1,50 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
function getEnv(name) {
|
||||
const result = process.env[name];
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function createDefaultConfig(quality) {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
async function getConfig(client, quality) {
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
|
||||
const res = await client.database('builds').container('config').items.query(query).fetchAll();
|
||||
if (res.resources.length === 0) {
|
||||
return createDefaultConfig(quality);
|
||||
}
|
||||
return res.resources[0];
|
||||
}
|
||||
async function main() {
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const config = await getConfig(client, quality);
|
||||
console.log('Quality config:', config);
|
||||
if (config.frozen) {
|
||||
console.log(`Skipping release because quality ${quality} is frozen.`);
|
||||
return;
|
||||
}
|
||||
console.log(`Releasing build ${commit}...`);
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
|
||||
}
|
||||
main().then(() => {
|
||||
console.log('Build successfully released');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -6,6 +6,7 @@
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
import { retry } from './retry';
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
@@ -58,7 +59,7 @@ async function main(): Promise<void> {
|
||||
console.log(`Releasing build ${commit}...`);
|
||||
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('releaseBuild').execute('', [commit]);
|
||||
await retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
|
||||
25
build/azure-pipelines/common/retry.js
Normal file
25
build/azure-pipelines/common/retry.js
Normal file
@@ -0,0 +1,25 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.retry = void 0;
|
||||
async function retry(fn) {
|
||||
for (let run = 1; run <= 10; run++) {
|
||||
try {
|
||||
return await fn();
|
||||
}
|
||||
catch (err) {
|
||||
if (!/ECONNRESET/.test(err.message)) {
|
||||
throw err;
|
||||
}
|
||||
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
|
||||
console.log(`Failed with ECONNRESET, retrying in ${millis}ms...`);
|
||||
// maximum delay is 10th retry: ~3 seconds
|
||||
await new Promise(c => setTimeout(c, millis));
|
||||
}
|
||||
}
|
||||
throw new Error('Retried too many times');
|
||||
}
|
||||
exports.retry = retry;
|
||||
26
build/azure-pipelines/common/retry.ts
Normal file
26
build/azure-pipelines/common/retry.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
export async function retry<T>(fn: () => Promise<T>): Promise<T> {
|
||||
for (let run = 1; run <= 10; run++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (err) {
|
||||
if (!/ECONNRESET/.test(err.message)) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
|
||||
console.log(`Failed with ECONNRESET, retrying in ${millis}ms...`);
|
||||
|
||||
// maximum delay is 10th retry: ~3 seconds
|
||||
await new Promise(c => setTimeout(c, millis));
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Retried too many times');
|
||||
}
|
||||
87
build/azure-pipelines/common/sync-mooncake.js
Normal file
87
build/azure-pipelines/common/sync-mooncake.js
Normal file
@@ -0,0 +1,87 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const url = require("url");
|
||||
const azure = require("azure-storage");
|
||||
const mime = require("mime");
|
||||
const cosmos_1 = require("@azure/cosmos");
|
||||
const retry_1 = require("./retry");
|
||||
function log(...args) {
|
||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
}
|
||||
function error(...args) {
|
||||
console.error(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
}
|
||||
if (process.argv.length < 3) {
|
||||
error('Usage: node sync-mooncake.js <quality>');
|
||||
process.exit(-1);
|
||||
}
|
||||
async function sync(commit, quality) {
|
||||
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const container = client.database('builds').container(quality);
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
|
||||
const res = await container.items.query(query, {}).fetchAll();
|
||||
if (res.resources.length !== 1) {
|
||||
throw new Error(`No builds found for ${commit}`);
|
||||
}
|
||||
const build = res.resources[0];
|
||||
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
// mooncake is fussy and far away, this is needed!
|
||||
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
for (const asset of build.assets) {
|
||||
try {
|
||||
const blobPath = url.parse(asset.url).path;
|
||||
if (!blobPath) {
|
||||
throw new Error(`Failed to parse URL: ${asset.url}`);
|
||||
}
|
||||
const blobName = blobPath.replace(/^\/\w+\//, '');
|
||||
log(`Found ${blobName}`);
|
||||
if (asset.mooncakeUrl) {
|
||||
log(` Already in Mooncake ✔️`);
|
||||
continue;
|
||||
}
|
||||
const readStream = blobService.createReadStream(quality, blobName, undefined);
|
||||
const blobOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(blobPath),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
|
||||
log(` Uploading to Mooncake...`);
|
||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||
log(` Updating build in DB...`);
|
||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await retry_1.retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
|
||||
log(` Done ✔️`);
|
||||
}
|
||||
catch (err) {
|
||||
error(err);
|
||||
}
|
||||
}
|
||||
log(`All done ✔️`);
|
||||
}
|
||||
function main() {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
if (!commit) {
|
||||
error('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
const quality = process.argv[2];
|
||||
sync(commit, quality).catch(err => {
|
||||
error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
main();
|
||||
@@ -9,6 +9,7 @@ import * as url from 'url';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
import { retry } from './retry';
|
||||
|
||||
function log(...args: any[]) {
|
||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
@@ -99,8 +100,8 @@ async function sync(commit: string, quality: string): Promise<void> {
|
||||
|
||||
log(` Updating build in DB...`);
|
||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]);
|
||||
await retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
|
||||
|
||||
log(` Done ✔️`);
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,32 +1,7 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
@@ -34,9 +9,21 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
@@ -55,40 +42,45 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- script: |
|
||||
echo -n $(VSCODE_ARCH) > .build/arch
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm install -g node-gyp@7.1.0
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm install -g node-gyp@latest
|
||||
node-gyp --version
|
||||
displayName: Update node-gyp
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch=$(VSCODE_ARCH)
|
||||
export npm_config_node_gyp=$(which node-gyp)
|
||||
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
||||
export CHILD_CONCURRENCY="1"
|
||||
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
@@ -98,25 +90,19 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch=$(VSCODE_ARCH)
|
||||
export npm_config_node_gyp=$(which node-gyp)
|
||||
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
||||
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -126,10 +112,21 @@ steps:
|
||||
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
||||
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
|
||||
yarn electron-rebuild
|
||||
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
|
||||
cd ./node_modules/keytar
|
||||
node-gyp rebuild
|
||||
displayName: Rebuild native modules for ARM64
|
||||
condition: eq(variables['VSCODE_ARCH'], 'arm64')
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- download: current
|
||||
artifact: vscode-darwin-x64
|
||||
displayName: Download x64 artifact
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- download: current
|
||||
artifact: vscode-darwin-arm64
|
||||
displayName: Download arm64 artifact
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -141,6 +138,7 @@ steps:
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -148,14 +146,23 @@ steps:
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build reh
|
||||
displayName: Build Server
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn electron $(VSCODE_ARCH)
|
||||
displayName: Download Electron
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
unzip $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip -d $(agent.builddirectory)/vscode-x64
|
||||
unzip $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/vscode-arm64
|
||||
DEBUG=* node build/darwin/create-universal-app.js
|
||||
displayName: Create Universal App
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
|
||||
displayName: Download Electron and Playwright
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -167,17 +174,26 @@ steps:
|
||||
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" DEBUG=electron-osx-sign* node build/darwin/sign.js
|
||||
displayName: Set Hardened Entitlements
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn --cwd test/integration/browser compile
|
||||
displayName: Compile integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
@@ -191,13 +207,15 @@ steps:
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
timeoutInMinutes: 10
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||
./resources/server/test/test-web-integration.sh --browser webkit
|
||||
displayName: Run integration tests (Browser)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
@@ -208,22 +226,39 @@ steps:
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
./resources/server/test/test-remote-integration.sh
|
||||
displayName: Run remote integration tests (Electron)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn --cwd test/smoke compile
|
||||
displayName: Compile smoke tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
yarn smoketest --build "$APP_ROOT/$APP_NAME"
|
||||
continueOnError: true
|
||||
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME"
|
||||
timeoutInMinutes: 5
|
||||
displayName: Run smoke tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --remote
|
||||
timeoutInMinutes: 5
|
||||
displayName: Run smoke tests (Remote)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||
yarn smoketest --web --headless
|
||||
continueOnError: true
|
||||
yarn smoketest-no-compile --web --headless
|
||||
timeoutInMinutes: 5
|
||||
displayName: Run smoke tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
@@ -240,12 +275,13 @@ steps:
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: succeededOrFailed()
|
||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip * && popd
|
||||
displayName: Archive build
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
@@ -270,10 +306,12 @@ steps:
|
||||
]
|
||||
SessionTimeout: 60
|
||||
displayName: Codesign
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip "*.pkg"
|
||||
displayName: Clean Archive
|
||||
displayName: Clean
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||
@@ -281,6 +319,7 @@ steps:
|
||||
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
|
||||
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
|
||||
displayName: Export bundle identifier
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
@@ -305,6 +344,7 @@ steps:
|
||||
]
|
||||
SessionTimeout: 60
|
||||
displayName: Notarization
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -312,7 +352,7 @@ steps:
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
|
||||
displayName: Verify start after signing (export configuration)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -323,13 +363,29 @@ steps:
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
./build/azure-pipelines/darwin/publish.sh
|
||||
displayName: Publish
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
|
||||
artifact: vscode-darwin-$(VSCODE_ARCH)
|
||||
displayName: Publish archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
|
||||
artifact: vscode-server-darwin-$(VSCODE_ARCH)
|
||||
displayName: Publish server archive
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-darwin-web.zip
|
||||
artifact: vscode-server-darwin-$(VSCODE_ARCH)-web
|
||||
displayName: Publish web server archive
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
yarn gulp upload-vscode-configuration
|
||||
displayName: Upload configuration (for Bing settings search)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
continueOnError: true
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
|
||||
@@ -5,6 +5,7 @@ set -e
|
||||
case $VSCODE_ARCH in
|
||||
x64) ASSET_ID="darwin" ;;
|
||||
arm64) ASSET_ID="darwin-arm64" ;;
|
||||
universal) ASSET_ID="darwin-universal" ;;
|
||||
esac
|
||||
|
||||
# publish the build
|
||||
|
||||
@@ -11,7 +11,7 @@ pr:
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
|
||||
@@ -11,7 +11,7 @@ pr:
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
|
||||
@@ -6,12 +6,12 @@ RUN apt-get update && apt-get upgrade -y
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.8
|
||||
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.9
|
||||
|
||||
RUN rm /usr/bin/gcc
|
||||
RUN rm /usr/bin/g++
|
||||
RUN ln -s /usr/bin/gcc-4.8 /usr/bin/gcc
|
||||
RUN ln -s /usr/bin/g++-4.8 /usr/bin/g++
|
||||
RUN ln -s /usr/bin/gcc-4.9 /usr/bin/gcc
|
||||
RUN ln -s /usr/bin/g++-4.9 /usr/bin/g++
|
||||
|
||||
#docker
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
|
||||
@@ -22,7 +22,7 @@ SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
|
||||
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
|
||||
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/vscode-server-*.tar.*
|
||||
rm -rf $ROOT/vscode-server-*-web.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
||||
@@ -10,16 +10,21 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock"
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
|
||||
vstsFeed: "npm-cache" # {{SQL CARBON EDIT}} update build cache
|
||||
|
||||
@@ -31,7 +36,7 @@ steps:
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock"
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
|
||||
vstsFeed: "npm-cache" # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
@@ -1,28 +1,7 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -34,6 +13,17 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
|
||||
- task: Docker@1
|
||||
displayName: "Pull image"
|
||||
inputs:
|
||||
@@ -45,7 +35,6 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
@@ -58,30 +47,35 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- script: |
|
||||
echo -n "alpine" > .build/arch
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js "alpine" $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export CHILD_CONCURRENCY="1"
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
if [ $i -eq 3 ]; then
|
||||
@@ -90,21 +84,19 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -113,7 +105,7 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
docker run -e VSCODE_QUALITY -e CHILD_CONCURRENCY=1 -v $(pwd):/root/vscode -v ~/.netrc:/root/.netrc vscodehub.azurecr.io/vscode-linux-build-agent:alpine /root/vscode/build/azure-pipelines/linux/alpine/install-dependencies.sh
|
||||
docker run -e VSCODE_QUALITY -v $(pwd):/root/vscode -v ~/.netrc:/root/.netrc vscodehub.azurecr.io/vscode-linux-build-agent:alpine /root/vscode/build/azure-pipelines/linux/alpine/install-dependencies.sh
|
||||
displayName: Prebuild
|
||||
|
||||
- script: |
|
||||
@@ -129,6 +121,17 @@ steps:
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
./build/azure-pipelines/linux/alpine/publish.sh
|
||||
displayName: Publish
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine.tar.gz
|
||||
artifact: vscode-server-linux-alpine
|
||||
displayName: Publish server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine-web.tar.gz
|
||||
artifact: vscode-server-linux-alpine-web
|
||||
displayName: Publish web server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: "Component Detection"
|
||||
|
||||
@@ -1,28 +1,7 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -34,6 +13,17 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
@@ -48,31 +38,56 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- script: |
|
||||
echo -n $(VSCODE_ARCH) > .build/arch
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm install -g node-gyp@latest
|
||||
node-gyp --version
|
||||
displayName: Update node-gyp
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch=$(NPM_ARCH)
|
||||
export CHILD_CONCURRENCY="1"
|
||||
export npm_config_build_from_source=true
|
||||
|
||||
if [ -z "$CC" ] || [ -z "$CXX" ]; then
|
||||
export CC=$(which gcc-5)
|
||||
export CXX=$(which g++-5)
|
||||
fi
|
||||
|
||||
if [ "$VSCODE_ARCH" == "x64" ]; then
|
||||
export VSCODE_REMOTE_CC=$(which gcc-4.8)
|
||||
export VSCODE_REMOTE_CXX=$(which g++-4.8)
|
||||
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
|
||||
fi
|
||||
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
if [ $i -eq 3 ]; then
|
||||
@@ -81,21 +96,23 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
|
||||
cd ./node_modules/keytar
|
||||
npx node-gyp rebuild
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -106,28 +123,41 @@ steps:
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
service xvfb start
|
||||
displayName: Start xvfb
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build Server
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
|
||||
displayName: Download Electron and Playwright
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn --cwd test/integration/browser compile
|
||||
displayName: Compile integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
@@ -137,10 +167,12 @@ steps:
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_APP_NAME="$APP_NAME" \
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
timeoutInMinutes: 10
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
@@ -148,16 +180,19 @@ steps:
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
|
||||
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
||||
displayName: Run integration tests (Browser)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_APP_NAME="$APP_NAME" \
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
|
||||
displayName: Run remote integration tests (Electron)
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
@@ -180,17 +215,20 @@ steps:
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||
displayName: Build deb, rpm packages
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
|
||||
displayName: Prepare snap package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
# needed for code signing
|
||||
- task: UseDotNet@2
|
||||
displayName: "Install .NET Core SDK 2.x"
|
||||
inputs:
|
||||
version: 2.x
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
@@ -210,6 +248,7 @@ steps:
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: Codesign rpm
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -219,12 +258,34 @@ steps:
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
./build/azure-pipelines/linux/publish.sh
|
||||
displayName: Publish
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(DEB_PATH)
|
||||
artifact: vscode-linux-deb-$(VSCODE_ARCH)
|
||||
displayName: Publish deb package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(RPM_PATH)
|
||||
artifact: vscode-linux-rpm-$(VSCODE_ARCH)
|
||||
displayName: Publish rpm package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH).tar.gz
|
||||
artifact: vscode-server-linux-$(VSCODE_ARCH)
|
||||
displayName: Publish server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH)-web.tar.gz
|
||||
artifact: vscode-server-linux-$(VSCODE_ARCH)-web
|
||||
displayName: Publish web server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: "Publish Pipeline Artifact"
|
||||
inputs:
|
||||
artifactName: "snap-$(VSCODE_ARCH)"
|
||||
targetPath: .build/linux/snap-tarball
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: "Component Detection"
|
||||
|
||||
@@ -26,6 +26,17 @@ rm -rf $ROOT/vscode-server-*.tar.*
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
||||
# Publish Remote Extension Host (Web)
|
||||
LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
|
||||
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
|
||||
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/vscode-server-*-web.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
||||
# Publish DEB
|
||||
case $VSCODE_ARCH in
|
||||
x64) DEB_ARCH="amd64" ;;
|
||||
@@ -58,3 +69,7 @@ mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
|
||||
rm -rf $SNAP_TARBALL_PATH
|
||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||
|
||||
# Export DEB_PATH, RPM_PATH
|
||||
echo "##vso[task.setvariable variable=DEB_PATH]$DEB_PATH"
|
||||
echo "##vso[task.setvariable variable=RPM_PATH]$RPM_PATH"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -54,3 +54,11 @@ steps:
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
node build/azure-pipelines/common/createAsset.js "linux-snap-$(VSCODE_ARCH)" package "$SNAP_FILENAME" "$SNAP_PATH"
|
||||
|
||||
# Export SNAP_PATH
|
||||
echo "##vso[task.setvariable variable=SNAP_PATH]$SNAP_PATH"
|
||||
|
||||
- publish: $(SNAP_PATH)
|
||||
artifact: vscode-linux-snap-$(VSCODE_ARCH)
|
||||
displayName: Publish snap package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
@@ -131,16 +131,19 @@ steps:
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- ${{each extension in parameters.extensionsToUnitTest}}:
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
export INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
export NO_CLEANUP=1
|
||||
DISPLAY=:10 node ./scripts/test-extensions-unit.js ${{ extension }}
|
||||
displayName: 'Run ${{ extension }} Stable Extension Unit Tests'
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the unit tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
NO_CLEANUP=1 \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-linux-x64" \
|
||||
DISPLAY=:10 ./scripts/test-extensions-unit.sh --build --tfs "Extension Unit Tests"
|
||||
displayName: 'Run Stable Extension Unit Tests'
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -162,7 +165,6 @@ steps:
|
||||
# Only archive directories we want for debugging purposes
|
||||
tar -czvf $(Build.ArtifactStagingDirectory)/logs/linux-x64/$folder.tar.gz $folder/User $folder/logs
|
||||
done
|
||||
|
||||
displayName: Archive Logs
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
||||
@@ -237,6 +239,13 @@ steps:
|
||||
continueOnError: true
|
||||
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: crash reports'
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.SourcesDirectory)/.build/crashes'
|
||||
ArtifactName: crashes
|
||||
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
schedules:
|
||||
@@ -8,11 +7,108 @@ schedules:
|
||||
include:
|
||||
- master
|
||||
|
||||
parameters:
|
||||
- name: VSCODE_QUALITY
|
||||
displayName: Quality
|
||||
type: string
|
||||
default: insider
|
||||
values:
|
||||
- exploration
|
||||
- insider
|
||||
- stable
|
||||
- name: ENABLE_TERRAPIN
|
||||
displayName: "Enable Terrapin"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_WIN32
|
||||
displayName: "🎯 Windows x64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_WIN32_32BIT
|
||||
displayName: "🎯 Windows ia32"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_WIN32_ARM64
|
||||
displayName: "🎯 Windows arm64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_LINUX
|
||||
displayName: "🎯 Linux x64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_LINUX_ARM64
|
||||
displayName: "🎯 Linux arm64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_LINUX_ARMHF
|
||||
displayName: "🎯 Linux armhf"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_LINUX_ALPINE
|
||||
displayName: "🎯 Alpine Linux"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_MACOS
|
||||
displayName: "🎯 macOS x64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_MACOS_ARM64
|
||||
displayName: "🎯 macOS arm64"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_MACOS_UNIVERSAL
|
||||
displayName: "🎯 macOS universal"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_BUILD_WEB
|
||||
displayName: "🎯 Web"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_PUBLISH
|
||||
displayName: "Publish to builds.code.visualstudio.com"
|
||||
type: boolean
|
||||
default: true
|
||||
- name: VSCODE_RELEASE
|
||||
displayName: "Release build if successful"
|
||||
type: boolean
|
||||
default: false
|
||||
- name: VSCODE_COMPILE_ONLY
|
||||
displayName: "Run Compile stage exclusively"
|
||||
type: boolean
|
||||
default: false
|
||||
- name: VSCODE_STEP_ON_IT
|
||||
displayName: "Skip tests"
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
variables:
|
||||
- name: ENABLE_TERRAPIN
|
||||
value: ${{ eq(parameters.ENABLE_TERRAPIN, true) }}
|
||||
- name: VSCODE_QUALITY
|
||||
value: ${{ parameters.VSCODE_QUALITY }}
|
||||
- name: VSCODE_BUILD_STAGE_WINDOWS
|
||||
value: ${{ or(eq(parameters.VSCODE_BUILD_WIN32, true), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}
|
||||
- name: VSCODE_BUILD_STAGE_LINUX
|
||||
value: ${{ or(eq(parameters.VSCODE_BUILD_LINUX, true), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true), eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true), eq(parameters.VSCODE_BUILD_WEB, true)) }}
|
||||
- name: VSCODE_BUILD_STAGE_MACOS
|
||||
value: ${{ or(eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}
|
||||
- name: VSCODE_CIBUILD
|
||||
value: ${{ in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }}
|
||||
- name: VSCODE_PUBLISH
|
||||
value: ${{ and(eq(parameters.VSCODE_PUBLISH, true), eq(variables.VSCODE_CIBUILD, false)) }}
|
||||
- name: VSCODE_SCHEDULEDBUILD
|
||||
value: ${{ eq(variables['Build.Reason'], 'Schedule') }}
|
||||
- name: VSCODE_STEP_ON_IT
|
||||
value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }}
|
||||
- name: VSCODE_BUILD_MACOS_UNIVERSAL
|
||||
value: ${{ and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }}
|
||||
|
||||
resources:
|
||||
containers:
|
||||
- container: vscode-x64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:bionic-x64
|
||||
endpoint: VSCodeHub
|
||||
options: --user 0:0
|
||||
- container: vscode-arm64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
|
||||
endpoint: VSCodeHub
|
||||
@@ -26,177 +122,189 @@ stages:
|
||||
- stage: Compile
|
||||
jobs:
|
||||
- job: Compile
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
container: vscode-x64
|
||||
pool: compile
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: product-compile.yml
|
||||
|
||||
- stage: Windows
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
jobs:
|
||||
- job: Windows
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true)) }}:
|
||||
- stage: Windows
|
||||
dependsOn:
|
||||
- Compile
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
jobs:
|
||||
|
||||
- job: Windows32
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
- ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}:
|
||||
- job: Windows
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: WindowsARM64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true)) }}:
|
||||
- job: Windows32
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- stage: Linux
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
jobs:
|
||||
- job: Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: vscode-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
NPM_ARCH: x64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}:
|
||||
- job: WindowsARM64
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: LinuxSnap
|
||||
dependsOn:
|
||||
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_LINUX'], true)) }}:
|
||||
- stage: Linux
|
||||
dependsOn:
|
||||
- Compile
|
||||
pool:
|
||||
vmImage: "Ubuntu-18.04"
|
||||
jobs:
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}:
|
||||
- job: Linux
|
||||
container: vscode-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
NPM_ARCH: x64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true)) }}:
|
||||
- job: LinuxSnap
|
||||
dependsOn:
|
||||
- Linux
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}:
|
||||
- job: LinuxArmhf
|
||||
container: vscode-armhf
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
NPM_ARCH: armv7l
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
# TODO@joaomoreno: We don't ship ARM snaps for now
|
||||
- ${{ if and(false, eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}:
|
||||
- job: LinuxSnapArmhf
|
||||
dependsOn:
|
||||
- LinuxArmhf
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}:
|
||||
- job: LinuxArm64
|
||||
container: vscode-arm64
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
NPM_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
# TODO@joaomoreno: We don't ship ARM snaps for now
|
||||
- ${{ if and(false, eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}:
|
||||
- job: LinuxSnapArm64
|
||||
dependsOn:
|
||||
- LinuxArm64
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true)) }}:
|
||||
- job: LinuxAlpine
|
||||
steps:
|
||||
- template: linux/product-build-alpine.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WEB, true)) }}:
|
||||
- job: LinuxWeb
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: web/product-build-web.yml
|
||||
|
||||
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_MACOS'], true)) }}:
|
||||
- stage: macOS
|
||||
dependsOn:
|
||||
- Compile
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
jobs:
|
||||
|
||||
- ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}:
|
||||
- job: macOS
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}:
|
||||
- job: macOSARM64
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- ${{ if eq(variables['VSCODE_BUILD_MACOS_UNIVERSAL'], true) }}:
|
||||
- job: macOSUniversal
|
||||
dependsOn:
|
||||
- macOS
|
||||
- macOSARM64
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: universal
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}:
|
||||
- stage: Mooncake
|
||||
dependsOn:
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
|
||||
- Windows
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
|
||||
- Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
|
||||
- macOS
|
||||
condition: succeededOrFailed()
|
||||
pool:
|
||||
vmImage: "Ubuntu-18.04"
|
||||
jobs:
|
||||
- job: SyncMooncake
|
||||
displayName: Sync Mooncake
|
||||
steps:
|
||||
- template: sync-mooncake.yml
|
||||
|
||||
- job: LinuxArmhf
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||
container: vscode-armhf
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
NPM_ARCH: armv7l
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxSnapArmhf
|
||||
dependsOn:
|
||||
- LinuxArmhf
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
container: vscode-arm64
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
NPM_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxSnapArm64
|
||||
dependsOn:
|
||||
- LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: LinuxAlpine
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
||||
steps:
|
||||
- template: linux/product-build-alpine.yml
|
||||
|
||||
- job: LinuxWeb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: web/product-build-web.yml
|
||||
|
||||
- stage: macOS
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
jobs:
|
||||
- job: macOS
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- stage: macOSARM64
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
jobs:
|
||||
- job: macOSARM64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS_ARM64'], 'true'))
|
||||
timeoutInMinutes: 90
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- stage: Mooncake
|
||||
dependsOn:
|
||||
- Windows
|
||||
- Linux
|
||||
- macOS
|
||||
- macOSARM64
|
||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
jobs:
|
||||
- job: SyncMooncake
|
||||
displayName: Sync Mooncake
|
||||
steps:
|
||||
- template: sync-mooncake.yml
|
||||
|
||||
- stage: Publish
|
||||
dependsOn:
|
||||
- Windows
|
||||
- Linux
|
||||
- macOS
|
||||
- macOSARM64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
jobs:
|
||||
- job: BuildService
|
||||
displayName: Build Service
|
||||
steps:
|
||||
- template: release.yml
|
||||
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), or(eq(parameters.VSCODE_RELEASE, true), and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(variables['VSCODE_SCHEDULEDBUILD'], true)))) }}:
|
||||
- stage: Release
|
||||
dependsOn:
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
|
||||
- Windows
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
|
||||
- Linux
|
||||
- ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
|
||||
- macOS
|
||||
pool:
|
||||
vmImage: "Ubuntu-18.04"
|
||||
jobs:
|
||||
- job: ReleaseBuild
|
||||
displayName: Release Build
|
||||
steps:
|
||||
- template: release.yml
|
||||
|
||||
@@ -1,36 +1,17 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
dryRun: true
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: "Azure Key Vault: Get Secrets"
|
||||
inputs:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -43,24 +24,22 @@ steps:
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $(VSCODE_ARCH) > .build/arch
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
@@ -68,11 +47,22 @@ steps:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export CHILD_CONCURRENCY="1"
|
||||
export npm_config_arch=$(NPM_ARCH)
|
||||
|
||||
if [ -z "$CC" ] || [ -z "$CXX" ]; then
|
||||
export CC=$(which gcc-5)
|
||||
export CXX=$(which g++-5)
|
||||
fi
|
||||
|
||||
if [ "$VSCODE_ARCH" == "x64" ]; then
|
||||
export VSCODE_REMOTE_CC=$(which gcc-4.8)
|
||||
export VSCODE_REMOTE_CXX=$(which g++-4.8)
|
||||
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
|
||||
fi
|
||||
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
if [ $i -eq 3 ]; then
|
||||
@@ -81,50 +71,24 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
# Mixin must run before optimize, because the CSS loader will
|
||||
# inline small SVGs
|
||||
# Mixin must run before optimize, because the CSS loader will inline small SVGs
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp hygiene
|
||||
yarn monaco-compile-check
|
||||
yarn valid-layers-check
|
||||
displayName: Run hygiene, monaco compile & valid layers checks
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -
|
||||
./build/azure-pipelines/common/extract-telemetry.sh
|
||||
displayName: Extract Telemetry
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
|
||||
./build/azure-pipelines/common/publish-webview.sh
|
||||
displayName: Publish Webview
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -134,14 +98,13 @@ steps:
|
||||
yarn gulp vscode-reh-linux-x64-min
|
||||
yarn gulp vscode-reh-web-linux-x64-min
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
node build/azure-pipelines/upload-sourcemaps
|
||||
displayName: Upload sourcemaps
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -149,13 +112,16 @@ steps:
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
node build/azure-pipelines/common/createBuild.js $VERSION
|
||||
displayName: Create build
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
# we gotta tarball everything in order to preserve file permissions
|
||||
- script: |
|
||||
set -e
|
||||
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
|
||||
displayName: Compress compilation artifact
|
||||
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
artifactName: Compilation
|
||||
displayName: Publish compilation artifact
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
node_modules/
|
||||
*.js
|
||||
36
build/azure-pipelines/publish-types/check-version.js
Normal file
36
build/azure-pipelines/publish-types/check-version.js
Normal file
@@ -0,0 +1,36 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cp = require("child_process");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
if (!isValidTag(tag)) {
|
||||
throw Error(`Invalid tag ${tag}`);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function isValidTag(t) {
|
||||
if (t.split('.').length !== 3) {
|
||||
return false;
|
||||
}
|
||||
const [major, minor, bug] = t.split('.');
|
||||
// Only release for tags like 1.34.0
|
||||
if (bug !== '0') {
|
||||
return false;
|
||||
}
|
||||
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -9,7 +9,7 @@ pr: none
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
72
build/azure-pipelines/publish-types/update-types.js
Normal file
72
build/azure-pipelines/publish-types/update-types.js
Normal file
@@ -0,0 +1,72 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const cp = require("child_process");
|
||||
const path = require("path");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
||||
updateDTSFile(outPath, tag);
|
||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function updateDTSFile(outPath, tag) {
|
||||
const oldContent = fs.readFileSync(outPath, 'utf-8');
|
||||
const newContent = getNewFileContent(oldContent, tag);
|
||||
fs.writeFileSync(outPath, newContent);
|
||||
}
|
||||
function repeat(str, times) {
|
||||
const result = new Array(times);
|
||||
for (let i = 0; i < times; i++) {
|
||||
result[i] = str;
|
||||
}
|
||||
return result.join('');
|
||||
}
|
||||
function convertTabsToSpaces(str) {
|
||||
return str.replace(/\t/gm, value => repeat(' ', value.length));
|
||||
}
|
||||
function getNewFileContent(content, tag) {
|
||||
const oldheader = [
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`
|
||||
].join('\n');
|
||||
return convertTabsToSpaces(getNewFileHeader(tag) + content.slice(oldheader.length));
|
||||
}
|
||||
function getNewFileHeader(tag) {
|
||||
const [major, minor] = tag.split('.');
|
||||
const shorttag = `${major}.${minor}`;
|
||||
const header = [
|
||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
||||
`// Project: https://github.com/microsoft/vscode`,
|
||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
||||
``,
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA.`,
|
||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`,
|
||||
``,
|
||||
`/**`,
|
||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
||||
` * See https://code.visualstudio.com/api for more information`,
|
||||
` */`
|
||||
].join('\n');
|
||||
return header;
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
resources:
|
||||
containers:
|
||||
- container: linux-x64
|
||||
image: sqltoolscontainers.azurecr.io/linux-build-agent:2
|
||||
image: sqltoolscontainers.azurecr.io/linux-build-agent:3
|
||||
endpoint: ContainerRegistry
|
||||
|
||||
jobs:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -1,28 +1,7 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
||||
displayName: Prepare compilation cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -34,6 +13,17 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||
displayName: Extract compilation output
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
@@ -48,30 +38,35 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
mkdir -p .build
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js "web" $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- script: |
|
||||
echo -n "web" > .build/arch
|
||||
displayName: Prepare yarn cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
tar -xzf .build/node_modules_cache/cache.tgz
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export CHILD_CONCURRENCY="1"
|
||||
for i in {1..3}; do # try 3 times, for Terrapin
|
||||
yarn --frozen-lockfile && break
|
||||
if [ $i -eq 3 ]; then
|
||||
@@ -80,21 +75,19 @@ steps:
|
||||
fi
|
||||
echo "Yarn failed $i, trying again..."
|
||||
done
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||
mkdir -p .build/node_modules_cache
|
||||
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -130,3 +123,8 @@ steps:
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
./build/azure-pipelines/web/publish.sh
|
||||
displayName: Publish
|
||||
|
||||
- publish: $(Agent.BuildDirectory)/vscode-web.tar.gz
|
||||
artifact: vscode-web-standalone
|
||||
displayName: Publish web archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSources>
|
||||
<clear/>
|
||||
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
<packageSources>
|
||||
<clear />
|
||||
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
<disabledPackageSources>
|
||||
<clear />
|
||||
</disabledPackageSources>
|
||||
</configuration>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
|
||||
@@ -3,7 +3,7 @@ $ErrorActionPreference = "Stop"
|
||||
|
||||
$CertBytes = [System.Convert]::FromBase64String($CertBase64)
|
||||
$CertCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
|
||||
$CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable)
|
||||
$CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable -bxor [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::PersistKeySet)
|
||||
|
||||
$CertStore = New-Object System.Security.Cryptography.X509Certificates.X509Store("My","LocalMachine")
|
||||
$CertStore.Open("ReadWrite")
|
||||
|
||||
@@ -1,28 +1,7 @@
|
||||
steps:
|
||||
- powershell: |
|
||||
mkdir .build -ea 0
|
||||
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
|
||||
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
|
||||
"$env:ENABLE_TERRAPIN" | Out-File -Encoding ascii -NoNewLine .build\terrapin
|
||||
displayName: Prepare compilation cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
|
||||
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
|
||||
vstsFeed: "npm-vscode"
|
||||
platformIndependent: true
|
||||
alias: "Compilation"
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.14.1"
|
||||
versionSpec: "12.18.3"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -39,6 +18,18 @@ steps:
|
||||
azureSubscription: "vscode-builds-subscription"
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
artifact: Compilation
|
||||
path: $(Build.ArtifactStagingDirectory)
|
||||
displayName: Download compilation output
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { tar --force-local -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz }
|
||||
displayName: Extract compilation output
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
@@ -51,26 +42,34 @@ steps:
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
|
||||
exec { git fetch distro }
|
||||
exec { git merge $(node -p "require('./package.json').distro") }
|
||||
exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
displayName: Switch to Terrapin packages
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
|
||||
- powershell: |
|
||||
"$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch
|
||||
"$env:ENABLE_TERRAPIN" | Out-File -Encoding ascii -NoNewLine .build\terrapin
|
||||
node build/azure-pipelines/common/computeNodeModulesCacheKey.js > .build/yarnlockhash
|
||||
displayName: Prepare yarn cache flags
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
key: 'nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash'
|
||||
path: .build/node_modules_cache
|
||||
cacheHitVar: NODE_MODULES_RESTORED
|
||||
displayName: Restore node_modules cache
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { 7z.exe x .build/node_modules_cache/cache.7z -aos }
|
||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Extract node_modules cache
|
||||
|
||||
- script: |
|
||||
npx https://aka.ms/enablesecurefeed standAlone
|
||||
timeoutInMinutes: 5
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||
displayName: Switch to Terrapin packages
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
@@ -79,22 +78,20 @@ steps:
|
||||
$env:npm_config_arch="$(VSCODE_ARCH)"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
retry { exec { yarn --frozen-lockfile } }
|
||||
env:
|
||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "npm-vscode"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn postinstall }
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt }
|
||||
exec { mkdir -Force .build/node_modules_cache }
|
||||
exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt }
|
||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||
displayName: Create node_modules archive
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
@@ -107,11 +104,18 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-min-ci" }
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-code-helper" }
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
|
||||
echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)"
|
||||
displayName: Build
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-code-helper" }
|
||||
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
|
||||
displayName: Prepare Package
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
@@ -122,12 +126,21 @@ steps:
|
||||
displayName: Build Server
|
||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
||||
exec { yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install" }
|
||||
displayName: Download Electron and Playwright
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn electron $(VSCODE_ARCH) }
|
||||
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
||||
displayName: Run unit tests (Electron)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
@@ -135,6 +148,14 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn test-browser --build --browser chromium --browser firefox --tfs "Browser Unit Tests" }
|
||||
displayName: Run unit tests (Browser)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn --cwd test/integration/browser compile }
|
||||
displayName: Compile integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
@@ -148,6 +169,7 @@ steps:
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||
displayName: Run integration tests (Electron)
|
||||
timeoutInMinutes: 10
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
@@ -155,6 +177,7 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
|
||||
displayName: Run integration tests (Browser)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- powershell: |
|
||||
@@ -165,6 +188,7 @@ steps:
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
|
||||
displayName: Run remote integration tests (Electron)
|
||||
timeoutInMinutes: 7
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
@@ -180,7 +204,7 @@ steps:
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: and(succeededOrFailed(), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
@@ -236,6 +260,7 @@ steps:
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: NuGetCommand@2
|
||||
displayName: Install ESRPClient.exe
|
||||
@@ -245,11 +270,13 @@ steps:
|
||||
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
|
||||
externalFeedCredentials: "ESRP Nuget"
|
||||
restoreDirectory: packages
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: ESRPImportCertTask@1
|
||||
displayName: Import ESRP Request Signing Certificate
|
||||
inputs:
|
||||
ESRP: "ESRP CodeSign"
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- task: PowerShell@2
|
||||
inputs:
|
||||
@@ -257,6 +284,7 @@ steps:
|
||||
filePath: .\build\azure-pipelines\win32\import-esrp-auth-cert.ps1
|
||||
arguments: "$(ESRP-SSL-AADAuth)"
|
||||
displayName: Import ESRP Auth Certificate
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
@@ -266,6 +294,32 @@ steps:
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
||||
.\build\azure-pipelines\win32\publish.ps1
|
||||
displayName: Publish
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip
|
||||
artifact: vscode-win32-$(VSCODE_ARCH)
|
||||
displayName: Publish archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe
|
||||
artifact: vscode-win32-$(VSCODE_ARCH)-setup
|
||||
displayName: Publish system setup
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe
|
||||
artifact: vscode-win32-$(VSCODE_ARCH)-user-setup
|
||||
displayName: Publish user setup
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH).zip
|
||||
artifact: vscode-server-win32-$(VSCODE_ARCH)
|
||||
displayName: Publish server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH)-web.zip
|
||||
artifact: vscode-server-win32-$(VSCODE_ARCH)-web
|
||||
displayName: Publish web server archive
|
||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: "Component Detection"
|
||||
|
||||
@@ -6,8 +6,7 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const { remote } = require('electron');
|
||||
const dialog = remote.dialog;
|
||||
const { ipcRenderer } = require('electron');
|
||||
|
||||
const builtInExtensionsPath = path.join(__dirname, '..', '..', 'product.json');
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
@@ -84,17 +83,13 @@ function render(el, state) {
|
||||
}
|
||||
|
||||
const localInput = renderOption(form, `local-${ext.name}`, 'Local', 'local', !!local);
|
||||
localInput.onchange = function () {
|
||||
const result = dialog.showOpenDialog(remote.getCurrentWindow(), {
|
||||
title: 'Choose Folder',
|
||||
properties: ['openDirectory']
|
||||
});
|
||||
localInput.onchange = async function () {
|
||||
const result = await ipcRenderer.invoke('pickdir');
|
||||
|
||||
if (result && result.length >= 1) {
|
||||
control[ext.name] = result[0];
|
||||
if (result) {
|
||||
control[ext.name] = result;
|
||||
setState({ builtin, control });
|
||||
}
|
||||
|
||||
setState({ builtin, control });
|
||||
};
|
||||
|
||||
if (local) {
|
||||
|
||||
@@ -3,12 +3,25 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const { app, BrowserWindow } = require('electron');
|
||||
const { app, BrowserWindow, ipcMain, dialog } = require('electron');
|
||||
const url = require('url');
|
||||
const path = require('path');
|
||||
|
||||
let window = null;
|
||||
|
||||
ipcMain.handle('pickdir', async () => {
|
||||
const result = await dialog.showOpenDialog(window, {
|
||||
title: 'Choose Folder',
|
||||
properties: ['openDirectory']
|
||||
});
|
||||
|
||||
if (result.canceled || result.filePaths.length < 1) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return result.filePaths[0];
|
||||
});
|
||||
|
||||
app.once('ready', () => {
|
||||
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true, enableWebSQL: false, nativeWindowOpen: true } });
|
||||
window.setMenuBarVisibility(false);
|
||||
|
||||
58
build/darwin/create-universal-app.js
Normal file
58
build/darwin/create-universal-app.js
Normal file
@@ -0,0 +1,58 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const vscode_universal_1 = require("vscode-universal");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const plist = require("plist");
|
||||
const product = require("../../product.json");
|
||||
async function main() {
|
||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||
const arch = process.env['VSCODE_ARCH'];
|
||||
if (!buildDir) {
|
||||
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
||||
}
|
||||
const appName = product.nameLong + '.app';
|
||||
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
|
||||
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
|
||||
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
||||
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
||||
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
||||
await vscode_universal_1.makeUniversalApp({
|
||||
x64AppPath,
|
||||
arm64AppPath,
|
||||
x64AsarPath,
|
||||
arm64AsarPath,
|
||||
filesToSkip: [
|
||||
'product.json',
|
||||
'Credits.rtf',
|
||||
'CodeResources',
|
||||
'fsevents.node',
|
||||
'.npmrc'
|
||||
],
|
||||
outAppPath,
|
||||
force: true
|
||||
});
|
||||
let productJson = await fs.readJson(productJsonPath);
|
||||
Object.assign(productJson, {
|
||||
darwinUniversalAssetId: 'darwin-universal'
|
||||
});
|
||||
await fs.writeJson(productJsonPath, productJson);
|
||||
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
|
||||
let infoPlistJson = plist.parse(infoPlistString);
|
||||
Object.assign(infoPlistJson, {
|
||||
LSRequiresNativeExecution: true
|
||||
});
|
||||
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
|
||||
}
|
||||
if (require.main === module) {
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
66
build/darwin/create-universal-app.ts
Normal file
66
build/darwin/create-universal-app.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { makeUniversalApp } from 'vscode-universal';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as plist from 'plist';
|
||||
import * as product from '../../product.json';
|
||||
|
||||
async function main() {
|
||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||
const arch = process.env['VSCODE_ARCH'];
|
||||
|
||||
if (!buildDir) {
|
||||
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
||||
}
|
||||
|
||||
const appName = product.nameLong + '.app';
|
||||
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
|
||||
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
|
||||
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
||||
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
||||
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
||||
|
||||
await makeUniversalApp({
|
||||
x64AppPath,
|
||||
arm64AppPath,
|
||||
x64AsarPath,
|
||||
arm64AsarPath,
|
||||
filesToSkip: [
|
||||
'product.json',
|
||||
'Credits.rtf',
|
||||
'CodeResources',
|
||||
'fsevents.node',
|
||||
'.npmrc'
|
||||
],
|
||||
outAppPath,
|
||||
force: true
|
||||
});
|
||||
|
||||
let productJson = await fs.readJson(productJsonPath);
|
||||
Object.assign(productJson, {
|
||||
darwinUniversalAssetId: 'darwin-universal'
|
||||
});
|
||||
await fs.writeJson(productJsonPath, productJson);
|
||||
|
||||
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
|
||||
let infoPlistJson = plist.parse(infoPlistString);
|
||||
Object.assign(infoPlistJson, {
|
||||
LSRequiresNativeExecution: true
|
||||
});
|
||||
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
@@ -24,7 +24,6 @@ async function main() {
|
||||
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
|
||||
const helperAppBaseName = product.nameShort;
|
||||
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
|
||||
const pluginHelperAppName = helperAppBaseName + ' Helper (Plugin).app';
|
||||
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
|
||||
const defaultOpts = {
|
||||
app: path.join(appRoot, appName),
|
||||
@@ -43,14 +42,11 @@ async function main() {
|
||||
// TODO(deepak1556): Incorrectly declared type in electron-osx-sign
|
||||
ignore: (filePath) => {
|
||||
return filePath.includes(gpuHelperAppName) ||
|
||||
filePath.includes(pluginHelperAppName) ||
|
||||
filePath.includes(rendererHelperAppName);
|
||||
} });
|
||||
const gpuHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, gpuHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist') });
|
||||
const pluginHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, pluginHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist') });
|
||||
const rendererHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, rendererHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist') });
|
||||
await codesign.signAsync(gpuHelperOpts);
|
||||
await codesign.signAsync(pluginHelperOpts);
|
||||
await codesign.signAsync(rendererHelperOpts);
|
||||
await codesign.signAsync(appOpts);
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@ async function main(): Promise<void> {
|
||||
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
|
||||
const helperAppBaseName = product.nameShort;
|
||||
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
|
||||
const pluginHelperAppName = helperAppBaseName + ' Helper (Plugin).app';
|
||||
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
|
||||
|
||||
const defaultOpts: codesign.SignOptions = {
|
||||
@@ -51,7 +50,6 @@ async function main(): Promise<void> {
|
||||
// TODO(deepak1556): Incorrectly declared type in electron-osx-sign
|
||||
ignore: (filePath: string) => {
|
||||
return filePath.includes(gpuHelperAppName) ||
|
||||
filePath.includes(pluginHelperAppName) ||
|
||||
filePath.includes(rendererHelperAppName);
|
||||
}
|
||||
};
|
||||
@@ -63,13 +61,6 @@ async function main(): Promise<void> {
|
||||
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'),
|
||||
};
|
||||
|
||||
const pluginHelperOpts: codesign.SignOptions = {
|
||||
...defaultOpts,
|
||||
app: path.join(appFrameworkPath, pluginHelperAppName),
|
||||
entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'),
|
||||
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'),
|
||||
};
|
||||
|
||||
const rendererHelperOpts: codesign.SignOptions = {
|
||||
...defaultOpts,
|
||||
app: path.join(appFrameworkPath, rendererHelperAppName),
|
||||
@@ -78,7 +69,6 @@ async function main(): Promise<void> {
|
||||
};
|
||||
|
||||
await codesign.signAsync(gpuHelperOpts);
|
||||
await codesign.signAsync(pluginHelperOpts);
|
||||
await codesign.signAsync(rendererHelperOpts);
|
||||
await codesign.signAsync(appOpts as any);
|
||||
}
|
||||
|
||||
36
build/eslint.js
Normal file
36
build/eslint.js
Normal file
@@ -0,0 +1,36 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const es = require('event-stream');
|
||||
const vfs = require('vinyl-fs');
|
||||
const { jsHygieneFilter, tsHygieneFilter } = require('./filters');
|
||||
|
||||
function eslint() {
|
||||
const gulpeslint = require('gulp-eslint');
|
||||
return vfs
|
||||
.src([...jsHygieneFilter, ...tsHygieneFilter], { base: '.', follow: true, allowEmpty: true })
|
||||
.pipe(
|
||||
gulpeslint({
|
||||
configFile: '.eslintrc.json',
|
||||
rulePaths: ['./build/lib/eslint'],
|
||||
})
|
||||
)
|
||||
.pipe(gulpeslint.formatEach('compact'))
|
||||
.pipe(
|
||||
gulpeslint.results((results) => {
|
||||
if (results.warningCount > 0 || results.errorCount > 0) {
|
||||
throw new Error('eslint failed with warnings and/or errors');
|
||||
}
|
||||
})
|
||||
).pipe(es.through(function () { /* noop, important for the stream to end */ }));
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
eslint().on('error', (err) => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
152
build/filters.js
Normal file
152
build/filters.js
Normal file
@@ -0,0 +1,152 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
/**
|
||||
* Hygiene works by creating cascading subsets of all our files and
|
||||
* passing them through a sequence of checks. Here are the current subsets,
|
||||
* named according to the checks performed on them. Each subset contains
|
||||
* the following one, as described in mathematical notation:
|
||||
*
|
||||
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
|
||||
*/
|
||||
|
||||
module.exports.all = [
|
||||
'*',
|
||||
'build/**/*',
|
||||
'extensions/**/*',
|
||||
'scripts/**/*',
|
||||
'src/**/*',
|
||||
'test/**/*',
|
||||
'!out*/**',
|
||||
'!test/**/out/**',
|
||||
'!**/node_modules/**',
|
||||
];
|
||||
|
||||
module.exports.indentationFilter = [
|
||||
'**',
|
||||
|
||||
// except specific files
|
||||
'!**/ThirdPartyNotices.txt',
|
||||
'!**/LICENSE.{txt,rtf}',
|
||||
'!LICENSES.chromium.html',
|
||||
'!**/LICENSE',
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/nls.build.js',
|
||||
'!src/vs/css.js',
|
||||
'!src/vs/css.build.js',
|
||||
'!src/vs/loader.js',
|
||||
'!src/vs/base/common/insane/insane.js',
|
||||
'!src/vs/base/common/marked/marked.js',
|
||||
'!src/vs/base/common/semver/semver.js',
|
||||
'!src/vs/base/node/terminateProcess.sh',
|
||||
'!src/vs/base/node/cpuUsage.sh',
|
||||
'!test/unit/assert.js',
|
||||
'!resources/linux/snap/electron-launch',
|
||||
'!build/ext.js',
|
||||
|
||||
// except specific folders
|
||||
'!test/automation/out/**',
|
||||
'!test/monaco/out/**',
|
||||
'!test/smoke/out/**',
|
||||
'!extensions/typescript-language-features/test-workspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
'!build/monaco/**',
|
||||
'!build/win32/**',
|
||||
|
||||
// except multiple specific files
|
||||
'!**/package.json',
|
||||
'!**/yarn.lock',
|
||||
'!**/yarn-error.log',
|
||||
|
||||
// except multiple specific folders
|
||||
'!**/codicon/**',
|
||||
'!**/fixtures/**',
|
||||
'!**/lib/**',
|
||||
'!extensions/**/dist/**',
|
||||
'!extensions/**/out/**',
|
||||
'!extensions/**/snippets/**',
|
||||
'!extensions/**/syntaxes/**',
|
||||
'!extensions/**/themes/**',
|
||||
'!extensions/**/colorize-fixtures/**',
|
||||
|
||||
// except specific file types
|
||||
'!src/vs/*/**/*.d.ts',
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
|
||||
'!build/{lib,download,darwin}/**/*.js',
|
||||
'!build/**/*.sh',
|
||||
'!build/azure-pipelines/**/*.js',
|
||||
'!build/azure-pipelines/**/*.config',
|
||||
'!**/Dockerfile',
|
||||
'!**/Dockerfile.*',
|
||||
'!**/*.Dockerfile',
|
||||
'!**/*.dockerfile',
|
||||
'!extensions/markdown-language-features/media/*.js',
|
||||
'!extensions/simple-browser/media/*.js',
|
||||
];
|
||||
|
||||
module.exports.copyrightFilter = [
|
||||
'**',
|
||||
'!**/*.desktop',
|
||||
'!**/*.json',
|
||||
'!**/*.html',
|
||||
'!**/*.template',
|
||||
'!**/*.md',
|
||||
'!**/*.bat',
|
||||
'!**/*.cmd',
|
||||
'!**/*.ico',
|
||||
'!**/*.icns',
|
||||
'!**/*.xml',
|
||||
'!**/*.sh',
|
||||
'!**/*.txt',
|
||||
'!**/*.xpm',
|
||||
'!**/*.opts',
|
||||
'!**/*.disabled',
|
||||
'!**/*.code-workspace',
|
||||
'!**/*.js.map',
|
||||
'!build/**/*.init',
|
||||
'!resources/linux/snap/snapcraft.yaml',
|
||||
'!resources/win32/bin/code.js',
|
||||
'!resources/web/code-web.js',
|
||||
'!resources/completions/**',
|
||||
'!extensions/configuration-editing/build/inline-allOf.ts',
|
||||
'!extensions/markdown-language-features/media/highlight.css',
|
||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||
'!extensions/*/server/bin/*',
|
||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||
];
|
||||
|
||||
module.exports.jsHygieneFilter = [
|
||||
'src/**/*.js',
|
||||
'build/gulpfile.*.js',
|
||||
'!src/vs/loader.js',
|
||||
'!src/vs/css.js',
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/css.build.js',
|
||||
'!src/vs/nls.build.js',
|
||||
'!src/**/insane.js',
|
||||
'!src/**/marked.js',
|
||||
'!src/**/semver.js',
|
||||
'!**/test/**',
|
||||
];
|
||||
|
||||
module.exports.tsHygieneFilter = [
|
||||
'src/**/*.ts',
|
||||
'test/**/*.ts',
|
||||
'extensions/**/*.ts',
|
||||
'!src/vs/*/**/*.d.ts',
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!**/fixtures/**',
|
||||
'!**/typings/**',
|
||||
'!**/node_modules/**',
|
||||
'!extensions/**/colorize-fixtures/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
'!extensions/**/*.test.ts',
|
||||
'!extensions/html-language-features/server/lib/jquery.d.ts',
|
||||
];
|
||||
@@ -11,6 +11,11 @@ const task = require('./lib/task');
|
||||
const compilation = require('./lib/compilation');
|
||||
|
||||
// Full compile, including nls and inline sources in sourcemaps, for build
|
||||
const compileBuildTask = task.define('compile-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
|
||||
const compileBuildTask = task.define('compile-build',
|
||||
task.series(
|
||||
util.rimraf('out-build'),
|
||||
compilation.compileTask('src', 'out-build', true)
|
||||
)
|
||||
);
|
||||
gulp.task(compileBuildTask);
|
||||
exports.compileBuildTask = compileBuildTask;
|
||||
|
||||
@@ -16,8 +16,6 @@ const cp = require('child_process');
|
||||
const compilation = require('./lib/compilation');
|
||||
const monacoapi = require('./monaco/api');
|
||||
const fs = require('fs');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
|
||||
let root = path.dirname(__dirname);
|
||||
let sha1 = util.getVersion(root);
|
||||
@@ -369,6 +367,9 @@ gulp.task('editor-distro',
|
||||
);
|
||||
|
||||
const bundleEditorESMTask = task.define('editor-esm-bundle-webpack', () => {
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
|
||||
const result = es.through();
|
||||
|
||||
const webpackConfigPath = path.join(root, 'build/monaco/monaco.webpack.config.js');
|
||||
|
||||
@@ -9,17 +9,13 @@ require('events').EventEmitter.defaultMaxListeners = 100;
|
||||
const gulp = require('gulp');
|
||||
const path = require('path');
|
||||
const nodeUtil = require('util');
|
||||
const tsb = require('gulp-tsb');
|
||||
const es = require('event-stream');
|
||||
const filter = require('gulp-filter');
|
||||
const webpack = require('webpack');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const watcher = require('./lib/watch');
|
||||
const createReporter = require('./lib/reporter').createReporter;
|
||||
const glob = require('glob');
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
const plumber = require('gulp-plumber');
|
||||
@@ -70,6 +66,10 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
}
|
||||
|
||||
function createPipeline(build, emitError) {
|
||||
const nlsDev = require('vscode-nls-dev');
|
||||
const tsb = require('gulp-tsb');
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
|
||||
const reporter = createReporter('extensions');
|
||||
|
||||
overrideOptions.inlineSources = Boolean(build);
|
||||
@@ -176,6 +176,8 @@ const compileExtensionsBuildTask = task.define('compile-extensions-build', task.
|
||||
));
|
||||
|
||||
gulp.task(compileExtensionsBuildTask);
|
||||
gulp.task(task.define('extensions-ci', task.series(compileExtensionsBuildTask)));
|
||||
|
||||
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
||||
|
||||
const compileWebExtensionsTask = task.define('compile-web', () => buildWebExtensions(false));
|
||||
@@ -187,6 +189,7 @@ gulp.task(watchWebExtensionsTask);
|
||||
exports.watchWebExtensionsTask = watchWebExtensionsTask;
|
||||
|
||||
async function buildWebExtensions(isWatch) {
|
||||
const webpack = require('webpack');
|
||||
|
||||
const webpackConfigLocations = await nodeUtil.promisify(glob)(
|
||||
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),
|
||||
|
||||
41
build/gulpfile.js
Normal file
41
build/gulpfile.js
Normal file
@@ -0,0 +1,41 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
// Increase max listeners for event emitters
|
||||
require('events').EventEmitter.defaultMaxListeners = 100;
|
||||
|
||||
const gulp = require('gulp');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const compilation = require('./lib/compilation');
|
||||
const { monacoTypecheckTask/* , monacoTypecheckWatchTask */ } = require('./gulpfile.editor');
|
||||
const { compileExtensionsTask, watchExtensionsTask } = require('./gulpfile.extensions');
|
||||
|
||||
// Fast compile for development time
|
||||
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), compilation.compileTask('src', 'out', false)));
|
||||
gulp.task(compileClientTask);
|
||||
|
||||
const watchClientTask = task.define('watch-client', task.series(util.rimraf('out'), compilation.watchTask('out', false)));
|
||||
gulp.task(watchClientTask);
|
||||
|
||||
// All
|
||||
const compileTask = task.define('compile', task.parallel(monacoTypecheckTask, compileClientTask, compileExtensionsTask));
|
||||
gulp.task(compileTask);
|
||||
|
||||
gulp.task(task.define('watch', task.parallel(/* monacoTypecheckWatchTask, */ watchClientTask, watchExtensionsTask)));
|
||||
|
||||
// Default
|
||||
gulp.task('default', compileTask);
|
||||
|
||||
process.on('unhandledRejection', (reason, p) => {
|
||||
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Load all the gulpfiles only if running tasks other than the editor tasks
|
||||
require('glob').sync('gulpfile.*.js', { cwd: __dirname })
|
||||
.forEach(f => require(`./${f}`));
|
||||
@@ -14,10 +14,8 @@ const task = require('./lib/task');
|
||||
const vfs = require('vinyl-fs');
|
||||
const flatmap = require('gulp-flatmap');
|
||||
const gunzip = require('gulp-gunzip');
|
||||
const untar = require('gulp-untar');
|
||||
const File = require('vinyl');
|
||||
const fs = require('fs');
|
||||
const remote = require('gulp-remote-retry-src');
|
||||
const rename = require('gulp-rename');
|
||||
const filter = require('gulp-filter');
|
||||
const cp = require('child_process');
|
||||
@@ -78,13 +76,17 @@ BUILD_TARGETS.forEach(({ platform, arch }) => {
|
||||
}));
|
||||
});
|
||||
|
||||
const defaultNodeTask = gulp.task(`node-${process.platform}-${process.arch}`);
|
||||
const arch = process.platform === 'darwin' ? 'x64' : process.arch;
|
||||
const defaultNodeTask = gulp.task(`node-${process.platform}-${arch}`);
|
||||
|
||||
if (defaultNodeTask) {
|
||||
gulp.task(task.define('node', defaultNodeTask));
|
||||
}
|
||||
|
||||
function nodejs(platform, arch) {
|
||||
const remote = require('gulp-remote-retry-src');
|
||||
const untar = require('gulp-untar');
|
||||
|
||||
if (arch === 'ia32') {
|
||||
arch = 'x86';
|
||||
}
|
||||
|
||||
@@ -11,13 +11,10 @@ const os = require('os');
|
||||
const cp = require('child_process');
|
||||
const path = require('path');
|
||||
const es = require('event-stream');
|
||||
const azure = require('gulp-azure-storage');
|
||||
const electron = require('gulp-atom-electron');
|
||||
const vfs = require('vinyl-fs');
|
||||
const rename = require('gulp-rename');
|
||||
const replace = require('gulp-replace');
|
||||
const filter = require('gulp-filter');
|
||||
const json = require('gulp-json-editor');
|
||||
const _ = require('underscore');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
@@ -29,14 +26,12 @@ const packageJson = require('../package.json');
|
||||
const product = require('../product.json');
|
||||
const crypto = require('crypto');
|
||||
const i18n = require('./lib/i18n');
|
||||
const deps = require('./dependencies');
|
||||
const { getProductionDependencies } = require('./lib/dependencies');
|
||||
const { config } = require('./lib/electron');
|
||||
const createAsar = require('./lib/asar').createAsar;
|
||||
const { compileBuildTask } = require('./gulpfile.compile');
|
||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
||||
|
||||
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
|
||||
|
||||
// Build
|
||||
const vscodeEntryPoints = _.flatten([
|
||||
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
|
||||
@@ -57,7 +52,7 @@ const vscodeResources = [
|
||||
'out-build/bootstrap-node.js',
|
||||
'out-build/bootstrap-window.js',
|
||||
'out-build/paths.js',
|
||||
'out-build/vs/**/*.{svg,png,html}',
|
||||
'out-build/vs/**/*.{svg,png,html,jpg}',
|
||||
'!out-build/vs/code/browser/**/*.html',
|
||||
'!out-build/vs/editor/standalone/**/*.svg',
|
||||
'out-build/vs/base/common/performance.js',
|
||||
@@ -97,7 +92,6 @@ const vscodeResources = [
|
||||
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
||||
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
|
||||
'out-build/vs/code/electron-sandbox/processExplorer/processExplorer.js',
|
||||
'out-build/vs/code/electron-sandbox/proxy/auth.js',
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
@@ -122,6 +116,16 @@ const minifyVSCodeTask = task.define('minify-vscode', task.series(
|
||||
));
|
||||
gulp.task(minifyVSCodeTask);
|
||||
|
||||
const core = task.define('core-ci', task.series(
|
||||
gulp.task('compile-build'),
|
||||
task.parallel(
|
||||
gulp.task('minify-vscode'),
|
||||
gulp.task('minify-vscode-reh'),
|
||||
gulp.task('minify-vscode-reh-web'),
|
||||
)
|
||||
));
|
||||
gulp.task(core);
|
||||
|
||||
/**
|
||||
* Compute checksums for some files.
|
||||
*
|
||||
@@ -163,6 +167,9 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
platform = platform || process.platform;
|
||||
|
||||
return () => {
|
||||
const electron = require('gulp-atom-electron');
|
||||
const json = require('gulp-json-editor');
|
||||
|
||||
const out = sourceFolderName;
|
||||
|
||||
const checksums = computeChecksums(out, [
|
||||
@@ -221,8 +228,9 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
|
||||
const telemetry = gulp.src('.build/telemetry/**', { base: '.build/telemetry', dot: true });
|
||||
|
||||
const jsFilter = util.filter(data => !data.isDirectory() &&/\.js$/.test(data.path));
|
||||
const jsFilter = util.filter(data => !data.isDirectory() && /\.js$/.test(data.path));
|
||||
const root = path.resolve(path.join(__dirname, '..'));
|
||||
const productionDependencies = getProductionDependencies(root);
|
||||
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
|
||||
|
||||
// {{SQL CARBON EDIT}} - fix runtime module load break
|
||||
@@ -357,7 +365,7 @@ BUILD_TARGETS.forEach(buildTarget => {
|
||||
const arch = buildTarget.arch;
|
||||
const opts = buildTarget.opts;
|
||||
|
||||
['', 'min'].forEach(minified => {
|
||||
const [vscode, vscodeMin] = ['', 'min'].map(minified => {
|
||||
const sourceFolderName = `out-vscode${dashed(minified)}`;
|
||||
const destinationFolderName = `azuredatastudio${dashed(platform)}${dashed(arch)}`;
|
||||
|
||||
@@ -374,7 +382,14 @@ BUILD_TARGETS.forEach(buildTarget => {
|
||||
vscodeTaskCI
|
||||
));
|
||||
gulp.task(vscodeTask);
|
||||
|
||||
return vscodeTask;
|
||||
});
|
||||
|
||||
if (process.platform === platform && process.arch === arch) {
|
||||
gulp.task(task.define('vscode', task.series(vscode)));
|
||||
gulp.task(task.define('vscode-min', task.series(vscodeMin)));
|
||||
}
|
||||
});
|
||||
|
||||
// Transifex Localizations
|
||||
@@ -516,6 +531,8 @@ gulp.task(task.define(
|
||||
task.series(
|
||||
generateVSCodeConfigurationTask,
|
||||
() => {
|
||||
const azure = require('gulp-azure-storage');
|
||||
|
||||
if (!shouldSetupSettingsSearch()) {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
console.log(`Only runs on master and release branches, not ${branch}`);
|
||||
|
||||
@@ -96,9 +96,6 @@ function prepareDebPackage(arch) {
|
||||
|
||||
const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' })
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ARCHITECTURE@@', debArch))
|
||||
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
|
||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||
.pipe(rename('DEBIAN/postinst'));
|
||||
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, workspaceMime, icon, bash_completion, zsh_completion, code);
|
||||
@@ -240,7 +237,8 @@ function prepareSnapPackage(arch) {
|
||||
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@VERSION@@', commit.substr(0, 8)))
|
||||
.pipe(replace('@@ARCHITECTURE@@', arch))
|
||||
// Possible run-on values https://snapcraft.io/docs/architectures
|
||||
.pipe(replace('@@ARCHITECTURE@@', arch === 'x64' ? 'amd64' : arch))
|
||||
.pipe(rename('snap/snapcraft.yaml'));
|
||||
|
||||
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })
|
||||
|
||||
@@ -1,133 +1,120 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getBuiltInExtensions = void 0;
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const os = require("os");
|
||||
const rimraf = require("rimraf");
|
||||
const es = require("event-stream");
|
||||
const rename = require("gulp-rename");
|
||||
const vfs = require("vinyl-fs");
|
||||
const ext = require("./extensions");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const mkdirp = require('mkdirp');
|
||||
const rimraf = require('rimraf');
|
||||
const es = require('event-stream');
|
||||
const rename = require('gulp-rename');
|
||||
const vfs = require('vinyl-fs');
|
||||
const ext = require('./extensions');
|
||||
const fancyLog = require('fancy-log');
|
||||
const ansiColors = require('ansi-colors');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||
const builtInExtensions = productjson.builtInExtensions;
|
||||
const webBuiltInExtensions = productjson.webBuiltInExtensions;
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||
|
||||
function log() {
|
||||
if (ENABLE_LOGGING) {
|
||||
fancyLog.apply(this, arguments);
|
||||
}
|
||||
function log(...messages) {
|
||||
if (ENABLE_LOGGING) {
|
||||
fancyLog(...messages);
|
||||
}
|
||||
}
|
||||
|
||||
function getExtensionPath(extension) {
|
||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||
}
|
||||
|
||||
function isUpToDate(extension) {
|
||||
const packagePath = path.join(getExtensionPath(extension), 'package.json');
|
||||
|
||||
if (!fs.existsSync(packagePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
|
||||
|
||||
try {
|
||||
const diskVersion = JSON.parse(packageContents).version;
|
||||
return (diskVersion === extension.version);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
const packagePath = path.join(getExtensionPath(extension), 'package.json');
|
||||
if (!fs.existsSync(packagePath)) {
|
||||
return false;
|
||||
}
|
||||
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
|
||||
try {
|
||||
const diskVersion = JSON.parse(packageContents).version;
|
||||
return (diskVersion === extension.version);
|
||||
}
|
||||
catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function syncMarketplaceExtension(extension) {
|
||||
if (isUpToDate(extension)) {
|
||||
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
rimraf.sync(getExtensionPath(extension));
|
||||
|
||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
if (isUpToDate(extension)) {
|
||||
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
rimraf.sync(getExtensionPath(extension));
|
||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
}
|
||||
|
||||
function syncExtension(extension, controlState) {
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
|
||||
case 'marketplace':
|
||||
return syncMarketplaceExtension(extension);
|
||||
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
|
||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
if (extension.platforms) {
|
||||
const platforms = new Set(extension.platforms);
|
||||
if (!platforms.has(process.platform)) {
|
||||
log(ansiColors.gray('[skip]'), `${extension.name}@${extension.version}: Platform '${process.platform}' not supported: [${extension.platforms}]`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
case 'marketplace':
|
||||
return syncMarketplaceExtension(extension);
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
|
||||
function readControlFile() {
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
|
||||
} catch (err) {
|
||||
return {};
|
||||
}
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
|
||||
}
|
||||
catch (err) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function writeControlFile(control) {
|
||||
mkdirp.sync(path.dirname(controlFilePath));
|
||||
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
|
||||
mkdirp.sync(path.dirname(controlFilePath));
|
||||
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
|
||||
}
|
||||
|
||||
exports.getBuiltInExtensions = function getBuiltInExtensions() {
|
||||
log('Syncronizing built-in extensions...');
|
||||
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
|
||||
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
|
||||
let controlState = control[extension.name] || 'marketplace';
|
||||
control[extension.name] = controlState;
|
||||
|
||||
streams.push(syncExtension(extension, controlState));
|
||||
}
|
||||
|
||||
writeControlFile(control);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
es.merge(streams)
|
||||
.on('error', reject)
|
||||
.on('end', resolve);
|
||||
});
|
||||
};
|
||||
|
||||
function getBuiltInExtensions() {
|
||||
log('Syncronizing built-in extensions...');
|
||||
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
|
||||
let controlState = control[extension.name] || 'marketplace';
|
||||
control[extension.name] = controlState;
|
||||
streams.push(syncExtension(extension, controlState));
|
||||
}
|
||||
writeControlFile(control);
|
||||
return new Promise((resolve, reject) => {
|
||||
es.merge(streams)
|
||||
.on('error', reject)
|
||||
.on('end', resolve);
|
||||
});
|
||||
}
|
||||
exports.getBuiltInExtensions = getBuiltInExtensions;
|
||||
if (require.main === module) {
|
||||
exports.getBuiltInExtensions().then(() => process.exit(0)).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
getBuiltInExtensions().then(() => process.exit(0)).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
163
build/lib/builtInExtensions.ts
Normal file
163
build/lib/builtInExtensions.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import * as rimraf from 'rimraf';
|
||||
import * as es from 'event-stream';
|
||||
import * as rename from 'gulp-rename';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
import * as ext from './extensions';
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
import { Stream } from 'stream';
|
||||
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
interface IExtensionDefinition {
|
||||
name: string;
|
||||
version: string;
|
||||
repo: string;
|
||||
platforms?: string[];
|
||||
metadata: {
|
||||
id: string;
|
||||
publisherId: {
|
||||
publisherId: string;
|
||||
publisherName: string;
|
||||
displayName: string;
|
||||
flags: string;
|
||||
};
|
||||
publisherDisplayName: string;
|
||||
}
|
||||
}
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||
const builtInExtensions = <IExtensionDefinition[]>productjson.builtInExtensions;
|
||||
const webBuiltInExtensions = <IExtensionDefinition[]>productjson.webBuiltInExtensions;
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||
|
||||
function log(...messages: string[]): void {
|
||||
if (ENABLE_LOGGING) {
|
||||
fancyLog(...messages);
|
||||
}
|
||||
}
|
||||
|
||||
function getExtensionPath(extension: IExtensionDefinition): string {
|
||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||
}
|
||||
|
||||
function isUpToDate(extension: IExtensionDefinition): boolean {
|
||||
const packagePath = path.join(getExtensionPath(extension), 'package.json');
|
||||
|
||||
if (!fs.existsSync(packagePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
|
||||
|
||||
try {
|
||||
const diskVersion = JSON.parse(packageContents).version;
|
||||
return (diskVersion === extension.version);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function syncMarketplaceExtension(extension: IExtensionDefinition): Stream {
|
||||
if (isUpToDate(extension)) {
|
||||
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
rimraf.sync(getExtensionPath(extension));
|
||||
|
||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
}
|
||||
|
||||
function syncExtension(extension: IExtensionDefinition, controlState: 'disabled' | 'marketplace'): Stream {
|
||||
if (extension.platforms) {
|
||||
const platforms = new Set(extension.platforms);
|
||||
|
||||
if (!platforms.has(process.platform)) {
|
||||
log(ansiColors.gray('[skip]'), `${extension.name}@${extension.version}: Platform '${process.platform}' not supported: [${extension.platforms}]`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
|
||||
case 'marketplace':
|
||||
return syncMarketplaceExtension(extension);
|
||||
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
|
||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
|
||||
interface IControlFile {
|
||||
[name: string]: 'disabled' | 'marketplace';
|
||||
}
|
||||
|
||||
function readControlFile(): IControlFile {
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
|
||||
} catch (err) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function writeControlFile(control: IControlFile): void {
|
||||
mkdirp.sync(path.dirname(controlFilePath));
|
||||
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
|
||||
}
|
||||
|
||||
export function getBuiltInExtensions(): Promise<void> {
|
||||
log('Syncronizing built-in extensions...');
|
||||
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
|
||||
const control = readControlFile();
|
||||
const streams: Stream[] = [];
|
||||
|
||||
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
|
||||
let controlState = control[extension.name] || 'marketplace';
|
||||
control[extension.name] = controlState;
|
||||
|
||||
streams.push(syncExtension(extension, controlState));
|
||||
}
|
||||
|
||||
writeControlFile(control);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
es.merge(streams)
|
||||
.on('error', reject)
|
||||
.on('end', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
getBuiltInExtensions().then(() => process.exit(0)).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
@@ -8,9 +8,6 @@ exports.watchTask = exports.compileTask = void 0;
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const gulp = require("gulp");
|
||||
const bom = require("gulp-bom");
|
||||
const sourcemaps = require("gulp-sourcemaps");
|
||||
const tsb = require("gulp-tsb");
|
||||
const path = require("path");
|
||||
const monacodts = require("../monaco/api");
|
||||
const nls = require("./nls");
|
||||
@@ -36,10 +33,13 @@ function getTypeScriptCompilerOptions(src) {
|
||||
return options;
|
||||
}
|
||||
function createCompile(src, build, emitError) {
|
||||
const tsb = require('gulp-tsb');
|
||||
const sourcemaps = require('gulp-sourcemaps');
|
||||
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
|
||||
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||
function pipeline(token) {
|
||||
const bom = require('gulp-bom');
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
|
||||
@@ -52,7 +52,7 @@ function createCompile(src, build, emitError) {
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(compilation(token))
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls() : es.through())
|
||||
.pipe(build ? nls.nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
|
||||
@@ -8,9 +8,6 @@
|
||||
import * as es from 'event-stream';
|
||||
import * as fs from 'fs';
|
||||
import * as gulp from 'gulp';
|
||||
import * as bom from 'gulp-bom';
|
||||
import * as sourcemaps from 'gulp-sourcemaps';
|
||||
import * as tsb from 'gulp-tsb';
|
||||
import * as path from 'path';
|
||||
import * as monacodts from '../monaco/api';
|
||||
import * as nls from './nls';
|
||||
@@ -41,12 +38,17 @@ function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
|
||||
}
|
||||
|
||||
function createCompile(src: string, build: boolean, emitError?: boolean) {
|
||||
const tsb = require('gulp-tsb') as typeof import('gulp-tsb');
|
||||
const sourcemaps = require('gulp-sourcemaps') as typeof import('gulp-sourcemaps');
|
||||
|
||||
|
||||
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
|
||||
|
||||
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||
|
||||
function pipeline(token?: util.ICancellationToken) {
|
||||
const bom = require('gulp-bom') as typeof import('gulp-bom');
|
||||
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
@@ -61,7 +63,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(compilation(token))
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls() : es.through())
|
||||
.pipe(build ? nls.nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
|
||||
60
build/lib/dependencies.js
Normal file
60
build/lib/dependencies.js
Normal file
@@ -0,0 +1,60 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getProductionDependencies = void 0;
|
||||
const path = require("path");
|
||||
const cp = require("child_process");
|
||||
const _ = require("underscore");
|
||||
const parseSemver = require('parse-semver');
|
||||
function asYarnDependency(prefix, tree) {
|
||||
let parseResult;
|
||||
try {
|
||||
parseResult = parseSemver(tree.name);
|
||||
}
|
||||
catch (err) {
|
||||
err.message += `: ${tree.name}`;
|
||||
console.warn(`Could not parse semver: ${tree.name}`);
|
||||
return null;
|
||||
}
|
||||
// not an actual dependency in disk
|
||||
if (parseResult.version !== parseResult.range) {
|
||||
return null;
|
||||
}
|
||||
const name = parseResult.name;
|
||||
const version = parseResult.version;
|
||||
const dependencyPath = path.join(prefix, name);
|
||||
const children = [];
|
||||
for (const child of (tree.children || [])) {
|
||||
const dep = asYarnDependency(path.join(prefix, name, 'node_modules'), child);
|
||||
if (dep) {
|
||||
children.push(dep);
|
||||
}
|
||||
}
|
||||
return { name, version, path: dependencyPath, children };
|
||||
}
|
||||
function getYarnProductionDependencies(cwd) {
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: Object.assign(Object.assign({}, process.env), { NODE_ENV: 'production' }), stdio: [null, null, 'inherit'] });
|
||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||
if (!match || match.length !== 1) {
|
||||
throw new Error('Could not parse result of `yarn list --json`');
|
||||
}
|
||||
const trees = JSON.parse(match[0]).data.trees;
|
||||
return trees
|
||||
.map(tree => asYarnDependency(path.join(cwd, 'node_modules'), tree))
|
||||
.filter((dep) => !!dep);
|
||||
}
|
||||
function getProductionDependencies(cwd) {
|
||||
const result = [];
|
||||
const deps = getYarnProductionDependencies(cwd);
|
||||
const flatten = (dep) => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
|
||||
deps.forEach(flatten);
|
||||
return _.uniq(result);
|
||||
}
|
||||
exports.getProductionDependencies = getProductionDependencies;
|
||||
if (require.main === module) {
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
console.log(JSON.stringify(getProductionDependencies(root), null, ' '));
|
||||
}
|
||||
@@ -5,12 +5,27 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
import * as path from 'path';
|
||||
import * as cp from 'child_process';
|
||||
import * as _ from 'underscore';
|
||||
const parseSemver = require('parse-semver');
|
||||
const cp = require('child_process');
|
||||
const _ = require('underscore');
|
||||
|
||||
function asYarnDependency(prefix, tree) {
|
||||
interface Tree {
|
||||
readonly name: string;
|
||||
readonly children?: Tree[];
|
||||
}
|
||||
|
||||
interface FlatDependency {
|
||||
readonly name: string;
|
||||
readonly version: string;
|
||||
readonly path: string;
|
||||
}
|
||||
|
||||
interface Dependency extends FlatDependency {
|
||||
readonly children: Dependency[];
|
||||
}
|
||||
|
||||
function asYarnDependency(prefix: string, tree: Tree): Dependency | null {
|
||||
let parseResult;
|
||||
|
||||
try {
|
||||
@@ -42,7 +57,7 @@ function asYarnDependency(prefix, tree) {
|
||||
return { name, version, path: dependencyPath, children };
|
||||
}
|
||||
|
||||
function getYarnProductionDependencies(cwd) {
|
||||
function getYarnProductionDependencies(cwd: string): Dependency[] {
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] });
|
||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||
|
||||
@@ -50,25 +65,22 @@ function getYarnProductionDependencies(cwd) {
|
||||
throw new Error('Could not parse result of `yarn list --json`');
|
||||
}
|
||||
|
||||
const trees = JSON.parse(match[0]).data.trees;
|
||||
const trees = JSON.parse(match[0]).data.trees as Tree[];
|
||||
|
||||
return trees
|
||||
.map(tree => asYarnDependency(path.join(cwd, 'node_modules'), tree))
|
||||
.filter(dep => !!dep);
|
||||
.filter<Dependency>((dep): dep is Dependency => !!dep);
|
||||
}
|
||||
|
||||
function getProductionDependencies(cwd) {
|
||||
const result = [];
|
||||
export function getProductionDependencies(cwd: string): FlatDependency[] {
|
||||
const result: FlatDependency[] = [];
|
||||
const deps = getYarnProductionDependencies(cwd);
|
||||
const flatten = dep => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
|
||||
const flatten = (dep: Dependency) => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
|
||||
deps.forEach(flatten);
|
||||
|
||||
return _.uniq(result);
|
||||
}
|
||||
|
||||
module.exports.getProductionDependencies = getProductionDependencies;
|
||||
|
||||
if (require.main === module) {
|
||||
const root = path.dirname(__dirname);
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
console.log(JSON.stringify(getProductionDependencies(root), null, ' '));
|
||||
}
|
||||
@@ -9,10 +9,8 @@ const fs = require("fs");
|
||||
const path = require("path");
|
||||
const vfs = require("vinyl-fs");
|
||||
const filter = require("gulp-filter");
|
||||
const json = require("gulp-json-editor");
|
||||
const _ = require("underscore");
|
||||
const util = require("./util");
|
||||
const electron = require('gulp-atom-electron');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
@@ -53,6 +51,8 @@ exports.config = {
|
||||
};
|
||||
function getElectron(arch) {
|
||||
return () => {
|
||||
const electron = require('gulp-atom-electron');
|
||||
const json = require('gulp-json-editor');
|
||||
const electronOpts = _.extend({}, exports.config, {
|
||||
platform: process.platform,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
|
||||
@@ -9,12 +9,9 @@ import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
import * as filter from 'gulp-filter';
|
||||
import * as json from 'gulp-json-editor';
|
||||
import * as _ from 'underscore';
|
||||
import * as util from './util';
|
||||
|
||||
const electron = require('gulp-atom-electron');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
@@ -59,6 +56,9 @@ export const config = {
|
||||
|
||||
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
|
||||
return () => {
|
||||
const electron = require('gulp-atom-electron');
|
||||
const json = require('gulp-json-editor') as typeof import('gulp-json-editor');
|
||||
|
||||
const electronOpts = _.extend({}, config, {
|
||||
platform: process.platform,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
|
||||
33
build/lib/eslint/vscode-dts-cancellation.js
Normal file
33
build/lib/eslint/vscode-dts-cancellation.js
Normal file
@@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const experimental_utils_1 = require("@typescript-eslint/experimental-utils");
|
||||
module.exports = new class ApiProviderNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
noToken: 'Function lacks a cancellation token, preferable as last argument',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node) => {
|
||||
let found = false;
|
||||
for (let param of node.params) {
|
||||
if (param.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
|
||||
found = found || param.name === 'token';
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'noToken'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
38
build/lib/eslint/vscode-dts-cancellation.ts
Normal file
38
build/lib/eslint/vscode-dts-cancellation.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
import { AST_NODE_TYPES, TSESTree } from '@typescript-eslint/experimental-utils';
|
||||
|
||||
export = new class ApiProviderNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
noToken: 'Function lacks a cancellation token, preferable as last argument',
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node: any) => {
|
||||
|
||||
let found = false;
|
||||
for (let param of (<TSESTree.TSMethodSignature>node).params) {
|
||||
if (param.type === AST_NODE_TYPES.Identifier) {
|
||||
found = found || param.name === 'token';
|
||||
}
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'noToken'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
38
build/lib/eslint/vscode-dts-provider-naming.js
Normal file
38
build/lib/eslint/vscode-dts-provider-naming.js
Normal file
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var _a;
|
||||
module.exports = new (_a = class ApiProviderNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
naming: 'A provider should only have functions like provideXYZ or resolveXYZ',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const config = context.options[0];
|
||||
const allowed = new Set(config.allowed);
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature']: (node) => {
|
||||
var _a;
|
||||
const interfaceName = ((_a = node.parent) === null || _a === void 0 ? void 0 : _a.parent).id.name;
|
||||
if (allowed.has(interfaceName)) {
|
||||
// allowed
|
||||
return;
|
||||
}
|
||||
const methodName = node.key.name;
|
||||
if (!ApiProviderNaming._providerFunctionNames.test(methodName)) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'naming'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
},
|
||||
_a._providerFunctionNames = /^(provide|resolve|prepare).+/,
|
||||
_a);
|
||||
45
build/lib/eslint/vscode-dts-provider-naming.ts
Normal file
45
build/lib/eslint/vscode-dts-provider-naming.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
import { TSESTree } from '@typescript-eslint/experimental-utils';
|
||||
|
||||
export = new class ApiProviderNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
naming: 'A provider should only have functions like provideXYZ or resolveXYZ',
|
||||
}
|
||||
};
|
||||
|
||||
private static _providerFunctionNames = /^(provide|resolve|prepare).+/;
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
const config = <{ allowed: string[] }>context.options[0];
|
||||
const allowed = new Set(config.allowed);
|
||||
|
||||
return {
|
||||
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature']: (node: any) => {
|
||||
|
||||
|
||||
const interfaceName = (<TSESTree.TSInterfaceDeclaration>(<TSESTree.Identifier>node).parent?.parent).id.name;
|
||||
if (allowed.has(interfaceName)) {
|
||||
// allowed
|
||||
return;
|
||||
}
|
||||
|
||||
const methodName = (<any>(<TSESTree.TSMethodSignatureNonComputedName>node).key).name;
|
||||
|
||||
if (!ApiProviderNaming._providerFunctionNames.test(methodName)) {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'naming'
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
35
build/lib/eslint/vscode-dts-region-comments.js
Normal file
35
build/lib/eslint/vscode-dts-region-comments.js
Normal file
@@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
module.exports = new class ApiEventNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
comment: 'region comments should start with the GH issue link, e.g #region https://github.com/microsoft/vscode/issues/<number>',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const sourceCode = context.getSourceCode();
|
||||
return {
|
||||
['Program']: (_node) => {
|
||||
for (let comment of sourceCode.getAllComments()) {
|
||||
if (comment.type !== 'Line') {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/^\s*#region /)) {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/https:\/\/github.com\/microsoft\/vscode\/issues\/\d+/i)) {
|
||||
context.report({
|
||||
node: comment,
|
||||
messageId: 'comment',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
41
build/lib/eslint/vscode-dts-region-comments.ts
Normal file
41
build/lib/eslint/vscode-dts-region-comments.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
|
||||
export = new class ApiEventNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
comment: 'region comments should start with the GH issue link, e.g #region https://github.com/microsoft/vscode/issues/<number>',
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
const sourceCode = context.getSourceCode();
|
||||
|
||||
|
||||
return {
|
||||
['Program']: (_node: any) => {
|
||||
|
||||
for (let comment of sourceCode.getAllComments()) {
|
||||
if (comment.type !== 'Line') {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/^\s*#region /)) {
|
||||
continue;
|
||||
}
|
||||
if (!comment.value.match(/https:\/\/github.com\/microsoft\/vscode\/issues\/\d+/i)) {
|
||||
context.report({
|
||||
node: <any>comment,
|
||||
messageId: 'comment',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
24
build/lib/eslint/vscode-dts-use-thenable.js
Normal file
24
build/lib/eslint/vscode-dts-use-thenable.js
Normal file
@@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
module.exports = new class ApiEventNaming {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
usage: 'Use the Thenable-type instead of the Promise type',
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
return {
|
||||
['TSTypeAnnotation TSTypeReference Identifier[name="Promise"]']: (node) => {
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'usage',
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
30
build/lib/eslint/vscode-dts-use-thenable.ts
Normal file
30
build/lib/eslint/vscode-dts-use-thenable.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
|
||||
export = new class ApiEventNaming implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
usage: 'Use the Thenable-type instead of the Promise type',
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
|
||||
|
||||
return {
|
||||
['TSTypeAnnotation TSTypeReference Identifier[name="Promise"]']: (node: any) => {
|
||||
|
||||
context.report({
|
||||
node,
|
||||
messageId: 'usage',
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -11,20 +11,15 @@ const glob = require("glob");
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const File = require("vinyl");
|
||||
const vsce = require("vsce");
|
||||
const stats_1 = require("./stats");
|
||||
const util2 = require("./util");
|
||||
const remote = require("gulp-remote-retry-src");
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
const filter = require("gulp-filter");
|
||||
const rename = require("gulp-rename");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const buffer = require('gulp-buffer');
|
||||
const json = require("gulp-json-editor");
|
||||
const jsoncParser = require("jsonc-parser");
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
const util = require('./util');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const commit = util.getVersion(root);
|
||||
@@ -88,6 +83,9 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
|
||||
}
|
||||
}
|
||||
}
|
||||
const vsce = require('vsce');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
@@ -149,6 +147,7 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
|
||||
}
|
||||
function fromLocalNormal(extensionPath) {
|
||||
const result = es.through();
|
||||
const vsce = require('vsce');
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
@@ -170,7 +169,8 @@ const baseHeaders = {
|
||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||
};
|
||||
function fromMarketplace(extensionName, version, metadata) {
|
||||
// {{SQL CARBON EDIT}}
|
||||
const remote = require('gulp-remote-retry-src');
|
||||
const json = require('gulp-json-editor');
|
||||
const [, name] = extensionName.split('.');
|
||||
const url = `https://sqlopsextensions.blob.core.windows.net/extensions/${name}/${name}-${version}.vsix`;
|
||||
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
|
||||
@@ -233,9 +233,13 @@ const rebuildExtensions = [
|
||||
'big-data-cluster',
|
||||
'mssql'
|
||||
];
|
||||
const marketplaceWebExtensions = [
|
||||
'ms-vscode.references-view'
|
||||
];
|
||||
const marketplaceWebExtensionsExclude = new Set([
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
'ms-vscode.js-debug-companion',
|
||||
'ms-vscode.js-debug',
|
||||
'ms-vscode.vscode-js-profile-table'
|
||||
]);
|
||||
const productJson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||
const builtInExtensions = productJson.builtInExtensions || [];
|
||||
const webBuiltInExtensions = productJson.webBuiltInExtensions || [];
|
||||
@@ -279,7 +283,7 @@ function packageLocalExtensionsStream(forWeb) {
|
||||
exports.packageLocalExtensionsStream = packageLocalExtensionsStream;
|
||||
function packageMarketplaceExtensionsStream(forWeb) {
|
||||
const marketplaceExtensionsDescriptions = [
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? marketplaceWebExtensions.indexOf(name) >= 0 : true)),
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? !marketplaceWebExtensionsExclude.has(name) : true)),
|
||||
...(forWeb ? webBuiltInExtensions : [])
|
||||
];
|
||||
const marketplaceExtensionsStream = minifyExtensionResources(es.merge(...marketplaceExtensionsDescriptions
|
||||
|
||||
@@ -10,20 +10,15 @@ import * as gulp from 'gulp';
|
||||
import * as path from 'path';
|
||||
import { Stream } from 'stream';
|
||||
import * as File from 'vinyl';
|
||||
import * as vsce from 'vsce';
|
||||
import { createStatsStream } from './stats';
|
||||
import * as util2 from './util';
|
||||
import remote = require('gulp-remote-retry-src');
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
import filter = require('gulp-filter');
|
||||
import rename = require('gulp-rename');
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
const buffer = require('gulp-buffer');
|
||||
import json = require('gulp-json-editor');
|
||||
import * as jsoncParser from 'jsonc-parser';
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
const util = require('./util');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const commit = util.getVersion(root);
|
||||
@@ -97,6 +92,10 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string):
|
||||
}
|
||||
}
|
||||
|
||||
const vsce = require('vsce') as typeof import('vsce');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
@@ -175,6 +174,8 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string):
|
||||
function fromLocalNormal(extensionPath: string): Stream {
|
||||
const result = es.through();
|
||||
|
||||
const vsce = require('vsce') as typeof import('vsce');
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
@@ -200,7 +201,8 @@ const baseHeaders = {
|
||||
};
|
||||
|
||||
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream {
|
||||
// {{SQL CARBON EDIT}}
|
||||
const json = require('gulp-json-editor') as typeof import('gulp-json-editor');
|
||||
|
||||
const [, name] = extensionName.split('.');
|
||||
const url = `https://sqlopsextensions.blob.core.windows.net/extensions/${name}/${name}-${version}.vsix`;
|
||||
|
||||
@@ -269,9 +271,13 @@ const rebuildExtensions = [
|
||||
'mssql'
|
||||
];
|
||||
|
||||
const marketplaceWebExtensions = [
|
||||
'ms-vscode.references-view'
|
||||
];
|
||||
const marketplaceWebExtensionsExclude = new Set([
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
'ms-vscode.js-debug-companion',
|
||||
'ms-vscode.js-debug',
|
||||
'ms-vscode.vscode-js-profile-table'
|
||||
]);
|
||||
|
||||
interface IBuiltInExtension {
|
||||
name: string;
|
||||
@@ -339,7 +345,7 @@ export function packageLocalExtensionsStream(forWeb: boolean): Stream {
|
||||
|
||||
export function packageMarketplaceExtensionsStream(forWeb: boolean): Stream {
|
||||
const marketplaceExtensionsDescriptions = [
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? marketplaceWebExtensions.indexOf(name) >= 0 : true)),
|
||||
...builtInExtensions.filter(({ name }) => (forWeb ? !marketplaceWebExtensionsExclude.has(name) : true)),
|
||||
...(forWeb ? webBuiltInExtensions : [])
|
||||
];
|
||||
const marketplaceExtensionsStream = minifyExtensionResources(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user