diff --git a/.devcontainer/README.md b/.devcontainer/README.md
index e5597559f4..827166823d 100644
--- a/.devcontainer/README.md
+++ b/.devcontainer/README.md
@@ -1,14 +1,14 @@
# Code - OSS Development Container
-This repository includes configuration for a development container for working with Code - OSS in an isolated local container or using [GitHub Codespaces](https://github.com/features/codespaces).
+This repository includes configuration for a development container for working with Code - OSS in a local container or using [GitHub Codespaces](https://github.com/features/codespaces).
-> **Tip:** The default VNC password is `vscode`. The VNC server runs on port `5901` with a web client at `6080`. For better performance, we recommend using a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/). Applications like the macOS Screen Sharing app will not perform as well.
+> **Tip:** The default VNC password is `vscode`. The VNC server runs on port `5901` and a web client is available on port `6080`.
## Quick start - local
1. Install Docker Desktop or Docker for Linux on your local machine. (See [docs](https://aka.ms/vscode-remote/containers/getting-started) for additional details.)
-2. **Important**: Docker needs at least **4 Cores and 6 GB of RAM (8 GB recommended)** to run full build. If you on macOS, or using the old Hyper-V engine for Windows, update these values for Docker Desktop by right-clicking on the Docker status bar item, going to **Preferences/Settings > Resources > Advanced**.
+2. **Important**: Docker needs at least **4 Cores and 6 GB of RAM (8 GB recommended)** to run a full build. If you are on macOS, or are using the old Hyper-V engine for Windows, update these values for Docker Desktop by right-clicking on the Docker status bar item and going to **Preferences/Settings > Resources > Advanced**.
> **Note:** The [Resource Monitor](https://marketplace.visualstudio.com/items?itemName=mutantdino.resourcemonitor) extension is included in the container so you can keep an eye on CPU/Memory in the status bar.
@@ -16,53 +16,56 @@ This repository includes configuration for a development container for working w

- > Note that the Remote - Containers extension requires the Visual Studio Code distribution of Code - OSS. See the [FAQ](https://aka.ms/vscode-remote/faq/license) for details.
+ > **Note:** The Remote - Containers extension requires the Visual Studio Code distribution of Code - OSS. See the [FAQ](https://aka.ms/vscode-remote/faq/license) for details.
-4. Press Ctrl/Cmd + Shift + P and select **Remote - Containers: Open Repository in Container...**.
+4. Press Ctrl/Cmd + Shift + P or F1 and select **Remote-Containers: Clone Repository in Container Volume...**.
- > **Tip:** While you can use your local source tree instead, operations like `yarn install` can be slow on macOS or using the Hyper-V engine on Windows. We recommend the "open repository" approach instead since it uses "named volume" rather than the local filesystem.
+ > **Tip:** While you can use your local source tree instead, operations like `yarn install` can be slow on macOS or when using the Hyper-V engine on Windows. We recommend the "clone repository in container" approach instead since it uses "named volume" rather than the local filesystem.
5. Type `https://github.com/microsoft/vscode` (or a branch or PR URL) in the input box and press Enter.
-6. After the container is running, open a web browser and go to [http://localhost:6080](http://localhost:6080) or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
+6. After the container is running, open a web browser and go to [http://localhost:6080](http://localhost:6080), or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
-Anything you start in VS Code or the integrated terminal will appear here.
+Anything you start in VS Code, or the integrated terminal, will appear here.
Next: **[Try it out!](#try-it)**
## Quick start - GitHub Codespaces
-> **IMPORTANT:** You need to use a "Standard" sized codespace or larger (4-core, 8GB) since VS Code needs 6GB of RAM to compile. This is now the default for GitHub Codespaces, but do not downgrade to "Basic" unless you do not intend to compile.
+1. From the [microsoft/vscode GitHub repository](https://github.com/microsoft/vscode), click on the **Code** dropdown, select **Open with Codespaces**, and then click on **New codespace**. If prompted, select the **Standard** machine size (which is also the default).
-1. From the [microsoft/vscode GitHub repository](https://github.com/microsoft/vscode), click on the **Code** dropdown, select **Open with Codespaces**, and the **New codespace**
+ > **Note:** You will not see these options within GitHub if you are not in the Codespaces beta.
- > Note that you will not see these options if you are not in the beta yet.
+2. After the codespace is up and running in your browser, press Ctrl/Cmd + Shift + P or F1 and select **Ports: Focus on Ports View**.
-2. After the codespace is up and running in your browser, press F1 and select **Ports: Focus on Ports View**.
+3. You should see **VNC web client (6080)** under in the list of ports. Select the line and click on the globe icon to open it in a browser tab.
-3. You should see port `6080` under **Forwarded Ports**. Select the line and click on the globe icon to open it in a browser tab.
-
- > If you do not see port `6080`, press F1, select **Forward a Port** and enter port `6080`.
+ > **Tip:** If you do not see the port, Ctrl/Cmd + Shift + P or F1, select **Forward a Port** and enter port `6080`.
4. In the new tab, you should see noVNC. Click **Connect** and enter `vscode` as the password.
-Anything you start in VS Code or the integrated terminal will appear here.
+Anything you start in VS Code, or the integrated terminal, will appear here.
Next: **[Try it out!](#try-it)**
### Using VS Code with GitHub Codespaces
-You will likely see better performance when accessing the codespace you created from VS Code since you can use a[VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/). Here's how to do it.
+You may see improved VNC responsiveness when accessing a codespace from VS Code client since you can use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/). Here's how to do it.
-1. [Create a codespace](#quick-start---github-codespaces) if you have not already.
+1. Install [Visual Studio Code Stable](https://code.visualstudio.com/) or [Insiders](https://code.visualstudio.com/insiders/) and the the [GitHub Codespaces extension](https://marketplace.visualstudio.com/items?itemName=GitHub.codespaces).
-2. Set up [VS Code for use with GitHub Codespaces](https://docs.github.com/github/developing-online-with-codespaces/using-codespaces-in-visual-studio-code)
+ > **Note:** The GitHub Codespaces extension requires the Visual Studio Code distribution of Code - OSS.
-3. After the VS Code is up and running, press F1, choose **Codespaces: Connect to Codespace**, and select the codespace you created.
+2. After the VS Code is up and running, press Ctrl/Cmd + Shift + P or F1, choose **Codespaces: Create New Codespace**, and use the following settings:
+ - `microsoft/vscode` for the repository.
+ - Select any branch (e.g. **main**) - you select a different one later.
+ - Choose **Standard** (4-core, 8GB) as the size.
-4. After you've connected to the codespace, use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
+4. After you have connected to the codespace, you can use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
-5. Anything you start in VS Code or the integrated terminal will appear here.
+ > **Tip:** You may also need change your VNC client's **Picture Quaility** setting to **High** to get a full color desktop.
+
+5. Anything you start in VS Code, or the integrated terminal, will appear here.
Next: **[Try it out!](#try-it)**
@@ -70,20 +73,18 @@ Next: **[Try it out!](#try-it)**
This container uses the [Fluxbox](http://fluxbox.org/) window manager to keep things lean. **Right-click on the desktop** to see menu options. It works with GNOME and GTK applications, so other tools can be installed if needed.
-Note you can also set the resolution from the command line by typing `set-resolution`.
+> **Note:** You can also set the resolution from the command line by typing `set-resolution`.
To start working with Code - OSS, follow these steps:
-1. In your local VS Code, open a terminal (Ctrl/Cmd + Shift + \`) and type the following commands:
+1. In your local VS Code client, open a terminal (Ctrl/Cmd + Shift + \`) and type the following commands:
```bash
yarn install
bash scripts/code.sh
```
- Note that a previous run of `yarn install` will already be cached, so this step should simply pick up any recent differences.
-
-2. After the build is complete, open a web browser or a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to the desktop environnement as described in the quick start and enter `vscode` as the password.
+2. After the build is complete, open a web browser or a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to the desktop environment as described in the quick start and enter `vscode` as the password.
3. You should now see Code - OSS!
@@ -91,7 +92,7 @@ Next, let's try debugging.
1. Shut down Code - OSS by clicking the box in the upper right corner of the Code - OSS window through your browser or VNC viewer.
-2. Go to your local VS Code client, and use Run / Debug view to launch the **VS Code** configuration. (Typically the default, so you can likely just press F5).
+2. Go to your local VS Code client, and use the **Run / Debug** view to launch the **VS Code** configuration. (Typically the default, so you can likely just press F5).
> **Note:** If launching times out, you can increase the value of `timeout` in the "VS Code", "Attach Main Process", "Attach Extension Host", and "Attach to Shared Process" configurations in [launch.json](../.vscode/launch.json). However, running `scripts/code.sh` first will set up Electron which will usually solve timeout issues.
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 3b82cd9028..d66344eccf 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -3,20 +3,26 @@
// Image contents: https://github.com/microsoft/vscode-dev-containers/blob/master/repository-containers/images/github.com/microsoft/vscode/.devcontainer/base.Dockerfile
"image": "mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:branch-main",
-
- "workspaceMount": "source=${localWorkspaceFolder},target=/home/node/workspace/vscode,type=bind,consistency=cached",
- "workspaceFolder": "/home/node/workspace/vscode",
"overrideCommand": false,
"runArgs": [ "--init", "--security-opt", "seccomp=unconfined"],
"settings": {
- "terminal.integrated.shell.linux": "/bin/bash",
"resmon.show.battery": false,
"resmon.show.cpufreq": false
},
- // noVNC, VNC, debug ports
- "forwardPorts": [6080, 5901, 9222],
+ // noVNC, VNC
+ "forwardPorts": [6080, 5901],
+ "portsAttributes": {
+ "6080": {
+ "label": "VNC web client (noVNC)",
+ "onAutoForward": "silent"
+ },
+ "5901": {
+ "label": "VNC TCP port",
+ "onAutoForward": "silent"
+ }
+ },
"extensions": [
"dbaeumer.vscode-eslint",
diff --git a/.eslintrc.json b/.eslintrc.json
index 4e75422915..dd9e34c2db 100644
--- a/.eslintrc.json
+++ b/.eslintrc.json
@@ -104,6 +104,7 @@
"restrictions": [
"assert",
"sinon",
+ "sinon-test",
"vs/nls",
"**/{vs,sql}/base/common/**",
"**/{vs,sql}/base/test/common/**"
@@ -141,6 +142,7 @@
"restrictions": [
"assert",
"sinon",
+ "sinon-test",
"vs/nls",
"**/{vs,sql}/base/{common,browser}/**",
"**/{vs,sql}/base/test/{common,browser}/**",
@@ -220,6 +222,7 @@
"assert",
"typemoq",
"sinon",
+ "sinon-test",
"vs/nls",
"azdata",
"**/{vs,sql}/base/common/**",
@@ -292,6 +295,7 @@
"typemoq",
"sinon",
"azdata",
+ "sinon-test",
"vs/nls",
"**/{vs,sql}/base/{common,browser}/**",
"**/{vs,sql}/base/test/{common,browser}/**",
@@ -315,6 +319,7 @@
"restrictions": [
"assert",
"sinon",
+ "sinon-test",
"vs/nls",
"**/{vs,sql}/base/common/**",
"**/{vs,sql}/platform/*/common/**",
@@ -338,6 +343,7 @@
"restrictions": [
"assert",
"sinon",
+ "sinon-test",
"vs/nls",
"**/{vs,sql}/base/{common,browser}/**",
"**/{vs,sql}/platform/*/{common,browser}/**",
@@ -361,6 +367,7 @@
"restrictions": [
"assert",
"sinon",
+ "sinon-test",
"vs/nls",
"**/{vs,sql}/base/common/**",
"**/{vs,sql}/platform/*/common/**",
@@ -387,6 +394,7 @@
"restrictions": [
"assert",
"sinon",
+ "sinon-test",
"vs/nls",
"**/{vs,sql}/base/{common,browser}/**",
"**/{vs,sql}/platform/*/{common,browser}/**",
@@ -401,6 +409,7 @@
"restrictions": [
"assert",
"sinon",
+ "sinon-test",
"vs/nls",
"**/{vs,sql}/base/{common,browser}/**",
"**/{vs,sql}/base/test/{common,browser}/**",
@@ -523,7 +532,7 @@
"**/{vs,sql}/platform/**",
"**/{vs,sql}/editor/**",
"**/{vs,sql}/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
- "vs/workbench/contrib/files/common/editors/fileEditorInput",
+ "vs/workbench/contrib/files/browser/editors/fileEditorInput",
"**/{vs,sql}/workbench/services/**",
"**/{vs,sql}/workbench/test/**",
"*" // node modules
@@ -958,6 +967,7 @@
"**/{vs,sql}/**",
"assert",
"sinon",
+ "sinon-test",
"crypto",
"vscode",
"typemoq",
@@ -993,6 +1003,7 @@
"assert",
"typemoq",
"sinon",
+ "sinon-test",
"crypto",
"xterm*",
"azdata"
@@ -1005,6 +1016,7 @@
"assert",
"typemoq",
"sinon",
+ "sinon-test",
"crypto",
"xterm*"
]
@@ -1042,6 +1054,7 @@
"vscode-dts-cancellation": "warn",
"vscode-dts-use-thenable": "warn",
"vscode-dts-region-comments": "warn",
+ "vscode-dts-vscode-in-comments": "warn",
"vscode-dts-provider-naming": [
"warn",
{
diff --git a/.github/subscribers.json b/.github/subscribers.json
index 7ee6e5cdad..25c676a47c 100644
--- a/.github/subscribers.json
+++ b/.github/subscribers.json
@@ -4,6 +4,7 @@
"rchiodo",
"greazer",
"donjayamanne",
- "jilljac"
+ "jilljac",
+ "IanMatthewHuff"
]
}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index c74e2dce38..43e455c90a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -243,7 +243,6 @@ jobs:
with:
path: "**/node_modules"
key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
- restore-keys: ${{ runner.os }}-cacheNodeModules13-
- name: Get yarn cache directory path
id: yarnCacheDirPath
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
@@ -279,6 +278,9 @@ jobs:
# - name: Run Monaco Editor Checks {{SQL CARBON EDIT}} Remove Monaco checks
# run: yarn monaco-compile-check
+ - name: Compile /build/
+ run: yarn --cwd build compile
+
- name: Run Trusted Types Checks
run: yarn tsec-compile-check
diff --git a/.vscode/notebooks/api.github-issues b/.vscode/notebooks/api.github-issues
index b9e25a7c91..04d665a4f1 100644
--- a/.vscode/notebooks/api.github-issues
+++ b/.vscode/notebooks/api.github-issues
@@ -2,37 +2,31 @@
{
"kind": 1,
"language": "markdown",
- "value": "#### Config",
- "editable": true
+ "value": "#### Config"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"April 2021\"",
- "editable": true
+ "value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"June 2021\""
},
{
"kind": 1,
"language": "markdown",
- "value": "### Finalization",
- "editable": true
+ "value": "### Finalization"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repo $milestone label:api-finalization",
- "editable": true
+ "value": "$repo $milestone label:api-finalization"
},
{
"kind": 1,
"language": "markdown",
- "value": "### Proposals",
- "editable": true
+ "value": "### Proposals"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repo $milestone is:open label:api-proposal ",
- "editable": true
+ "value": "$repo $milestone is:open label:api-proposal "
}
]
\ No newline at end of file
diff --git a/.vscode/notebooks/endgame.github-issues b/.vscode/notebooks/endgame.github-issues
index 881af2c14b..bc2fba29dd 100644
--- a/.vscode/notebooks/endgame.github-issues
+++ b/.vscode/notebooks/endgame.github-issues
@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
- "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"April 2021\""
+ "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\""
},
{
"kind": 1,
diff --git a/.vscode/notebooks/my-endgame.github-issues b/.vscode/notebooks/my-endgame.github-issues
index c435ee7750..aad3a8db3a 100644
--- a/.vscode/notebooks/my-endgame.github-issues
+++ b/.vscode/notebooks/my-endgame.github-issues
@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
- "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"April 2021\"\n\n$MINE=assignee:@me"
+ "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\"\n\n$MINE=assignee:@me"
},
{
"kind": 1,
@@ -157,7 +157,7 @@
{
"kind": 2,
"language": "github-issues",
- "value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15"
+ "value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15 -author:hediet"
},
{
"kind": 1,
diff --git a/.vscode/notebooks/my-work.github-issues b/.vscode/notebooks/my-work.github-issues
index 4e288133b7..fe57793626 100644
--- a/.vscode/notebooks/my-work.github-issues
+++ b/.vscode/notebooks/my-work.github-issues
@@ -2,20 +2,17 @@
{
"kind": 1,
"language": "markdown",
- "value": "##### `Config`: This should be changed every month/milestone",
- "editable": true
+ "value": "##### `Config`: This should be changed every month/milestone"
},
{
"kind": 2,
"language": "github-issues",
- "value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"April 2021\"",
- "editable": true
+ "value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"June 2021\""
},
{
"kind": 1,
- "language": "github-issues",
- "value": "## Milestone Work",
- "editable": true
+ "language": "markdown",
+ "value": "## Milestone Work"
},
{
"kind": 2,
@@ -25,57 +22,48 @@
},
{
"kind": 1,
- "language": "github-issues",
- "value": "## Bugs, Debt, Features...",
- "editable": true
+ "language": "markdown",
+ "value": "## Bugs, Debt, Features..."
},
{
"kind": 1,
"language": "markdown",
- "value": "#### My Bugs",
- "editable": true
+ "value": "#### My Bugs"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open label:bug",
- "editable": true
+ "value": "$repos assignee:@me is:open label:bug"
},
{
"kind": 1,
"language": "markdown",
- "value": "#### Debt & Engineering",
- "editable": true
+ "value": "#### Debt & Engineering"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open label:debt OR $repos assignee:@me is:open label:engineering",
- "editable": true
+ "value": "$repos assignee:@me is:open label:debt OR $repos assignee:@me is:open label:engineering"
},
{
"kind": 1,
"language": "markdown",
- "value": "#### Performance 🐌 🔜 🏎",
- "editable": true
+ "value": "#### Performance 🐌 🔜 🏎"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open label:perf OR $repos assignee:@me is:open label:perf-startup OR $repos assignee:@me is:open label:perf-bloat OR $repos assignee:@me is:open label:freeze-slow-crash-leak",
- "editable": true
+ "value": "$repos assignee:@me is:open label:perf OR $repos assignee:@me is:open label:perf-startup OR $repos assignee:@me is:open label:perf-bloat OR $repos assignee:@me is:open label:freeze-slow-crash-leak"
},
{
"kind": 1,
"language": "markdown",
- "value": "#### Feature Requests",
- "editable": true
+ "value": "#### Feature Requests"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open label:feature-request milestone:Backlog sort:reactions-+1-desc",
- "editable": true
+ "value": "$repos assignee:@me is:open label:feature-request milestone:Backlog sort:reactions-+1-desc"
},
{
"kind": 2,
@@ -86,26 +74,22 @@
{
"kind": 1,
"language": "markdown",
- "value": "### Personal Inbox\n",
- "editable": true
+ "value": "### Personal Inbox\n"
},
{
"kind": 1,
"language": "markdown",
- "value": "\n#### Missing Type label",
- "editable": true
+ "value": "\n#### Missing Type label"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open type:issue -label:bug -label:\"needs more info\" -label:feature-request -label:under-discussion -label:debt -label:plan-item -label:upstream",
- "editable": true
+ "value": "$repos assignee:@me is:open type:issue -label:bug -label:\"needs more info\" -label:feature-request -label:under-discussion -label:debt -label:plan-item -label:upstream"
},
{
"kind": 1,
"language": "markdown",
- "value": "#### Not Actionable",
- "editable": true
+ "value": "#### Not Actionable"
},
{
"kind": 2,
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
index 86cfa7a44c..639c9fe520 100644
--- a/.vscode/tasks.json
+++ b/.vscode/tasks.json
@@ -55,39 +55,11 @@
}
}
},
- {
- "type": "npm",
- "script": "watch-extension-mediad",
- "label": "Ext Media - Build",
- "isBackground": true,
- "presentation": {
- "reveal": "never",
- "group": "buildWatchers"
- },
- "problemMatcher": {
- "owner": "typescript",
- "applyTo": "closedDocuments",
- "fileLocation": [
- "absolute"
- ],
- "pattern": {
- "regexp": "Error: ([^(]+)\\((\\d+|\\d+,\\d+|\\d+,\\d+,\\d+,\\d+)\\): (.*)$",
- "file": 1,
- "location": 2,
- "message": 3
- },
- "background": {
- "beginsPattern": "Starting compilation",
- "endsPattern": "Finished compilation"
- }
- }
- },
{
"label": "VS Code - Build",
"dependsOn": [
"Core - Build",
- "Ext - Build",
- "Ext Media - Build",
+ "Ext - Build"
],
"group": {
"kind": "build",
@@ -102,7 +74,8 @@
"group": "build",
"presentation": {
"reveal": "never",
- "group": "buildKillers"
+ "group": "buildKillers",
+ "close": true
},
"problemMatcher": "$tsc"
},
@@ -113,18 +86,8 @@
"group": "build",
"presentation": {
"reveal": "never",
- "group": "buildKillers"
- },
- "problemMatcher": "$tsc"
- },
- {
- "type": "npm",
- "script": "kill-watch-extension-mediad",
- "label": "Kill Ext Media - Build",
- "group": "build",
- "presentation": {
- "reveal": "never",
- "group": "buildKillers"
+ "group": "buildKillers",
+ "close": true
},
"problemMatcher": "$tsc"
},
@@ -132,8 +95,7 @@
"label": "Kill VS Code - Build",
"dependsOn": [
"Kill Core - Build",
- "Kill Ext - Build",
- "Kill Ext Media - Build",
+ "Kill Ext - Build"
],
"group": "build",
"problemMatcher": []
@@ -252,7 +214,8 @@
"command": "node build/lib/preLaunch.js",
"label": "Ensure Prelaunch Dependencies",
"presentation": {
- "reveal": "silent"
+ "reveal": "silent",
+ "close": true
}
},
{
diff --git a/.yarnrc b/.yarnrc
index 0b7e220665..ba29080966 100644
--- a/.yarnrc
+++ b/.yarnrc
@@ -1,3 +1,3 @@
disturl "https://electronjs.org/headers"
-target "12.0.7"
+target "12.0.9"
runtime "electron"
diff --git a/ThirdPartyNotices.txt b/ThirdPartyNotices.txt
index 218692880c..05bbfe95b6 100644
--- a/ThirdPartyNotices.txt
+++ b/ThirdPartyNotices.txt
@@ -86,6 +86,125 @@ expressly granted herein, whether by implication, estoppel or otherwise.
Microsoft PROSE SDK: https://microsoft.github.io/prose
+ atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure)
+ atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script)
+ atom/language-css version 0.44.4 (https://github.com/atom/language-css)
+ atom/language-java version 0.32.1 (https://github.com/atom/language-java)
+ atom/language-sass version 0.62.1 (https://github.com/atom/language-sass)
+ atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
+ atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
+ better-go-syntax version 1.0.0 (https://github.com/jeff-hykin/better-go-syntax/ )
+ Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
+ daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars)
+ dart-lang/dart-syntax-highlight (https://github.com/dart-lang/dart-syntax-highlight)
+ davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle)
+ definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
+ demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml)
+ Document Object Model version 4.0.0 (https://www.w3.org/DOM/)
+ dotnet/csharp-tmLanguage version 0.1.0 (https://github.com/dotnet/csharp-tmLanguage)
+ expand-abbreviation version 0.5.8 (https://github.com/emmetio/expand-abbreviation)
+ fadeevab/make.tmbundle (https://github.com/fadeevab/make.tmbundle)
+ freebroccolo/atom-language-swift (https://github.com/freebroccolo/atom-language-swift)
+ HTML 5.1 W3C Working Draft version 08 October 2015 (http://www.w3.org/TR/2015/WD-html51-20151008/)
+ Ikuyadeu/vscode-R version 1.3.0 (https://github.com/Ikuyadeu/vscode-R)
+ insane version 2.6.2 (https://github.com/bevacqua/insane)
+ Ionic documentation version 1.2.4 (https://github.com/ionic-team/ionic-site)
+ ionide/ionide-fsgrammar (https://github.com/ionide/ionide-fsgrammar)
+ jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar)
+ jeff-hykin/cpp-textmate-grammar version 1.15.5 (https://github.com/jeff-hykin/cpp-textmate-grammar)
+ js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
+ JuliaEditorSupport/atom-language-julia version 0.21.0 (https://github.com/JuliaEditorSupport/atom-language-julia)
+ Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
+ language-docker (https://github.com/moby/moby)
+ language-less version 0.34.2 (https://github.com/atom/language-less)
+ language-php version 0.46.2 (https://github.com/atom/language-php)
+ MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
+ marked version 1.1.0 (https://github.com/markedjs/marked)
+ mdn-data version 1.1.12 (https://github.com/mdn/data)
+ microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage)
+ microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage)
+ microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar)
+ microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql)
+ mmims/language-batchfile version 0.7.6 (https://github.com/mmims/language-batchfile)
+ NVIDIA/cuda-cpp-grammar (https://github.com/NVIDIA/cuda-cpp-grammar)
+ PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
+ rust-syntax version 0.4.3 (https://github.com/dustypomerleau/rust-syntax)
+ seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
+ shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
+ textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
+ textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
+ textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
+ textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
+ textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
+ textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
+ textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
+ textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
+ textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
+ textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
+ textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
+ textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
+ textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
+ TypeScript-TmLanguage version 0.1.8 (https://github.com/microsoft/TypeScript-TmLanguage)
+ TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage)
+ Unicode version 12.0.0 (https://home.unicode.org/)
+ vscode-codicons version 0.0.14 (https://github.com/microsoft/vscode-codicons)
+ vscode-logfile-highlighter version 2.11.0 (https://github.com/emilast/vscode-logfile-highlighter)
+ vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
+ Web Background Synchronization (https://github.com/WICG/background-sync)
+
+
+%% atom/language-clojure NOTICES AND INFORMATION BEGIN HERE
+=========================================
+Copyright (c) 2014 GitHub Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+This package was derived from a TextMate bundle located at
+https://github.com/mmcgrana/textmate-clojure and distributed under the
+following license, located in `LICENSE.md`:
+
+The MIT License (MIT)
+
+Copyright (c) 2010- Mark McGranaghan
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+=========================================
+END OF atom/language-clojure NOTICES AND INFORMATION
+
%% angular NOTICES AND INFORMATION BEGIN HERE
Copyright (c) 2014-2017 Google, Inc. http://angular.io
@@ -560,6 +679,63 @@ THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
=========================================
END OF http-proxy-agent NOTICES AND INFORMATION
+%% dart-lang/dart-syntax-highlight NOTICES AND INFORMATION BEGIN HERE
+=========================================
+Copyright 2020, the Dart project authors.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of Google LLC nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+=========================================
+END OF dart-lang/dart-syntax-highlight NOTICES AND INFORMATION
+
+%% davidrios/pug-tmbundle NOTICES AND INFORMATION BEGIN HERE
+=========================================
+The MIT License (MIT)
+
+Copyright (c) 2016 David Rios
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+=========================================
+END OF davidrios/pug-tmbundle NOTICES AND INFORMATION
+
%% iconv-lite NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2011 Alexander Shtuchkin
@@ -1486,6 +1662,61 @@ THE SOFTWARE.
=========================================
END OF node-pty NOTICES AND INFORMATION
+%% JuliaEditorSupport/atom-language-julia NOTICES AND INFORMATION BEGIN HERE
+=========================================
+The atom-language-julia package is licensed under the MIT "Expat" License:
+
+> Copyright (c) 2015
+>
+> Permission is hereby granted, free of charge, to any person obtaining
+> a copy of this software and associated documentation files (the
+> "Software"), to deal in the Software without restriction, including
+> without limitation the rights to use, copy, modify, merge, publish,
+> distribute, sublicense, and/or sell copies of the Software, and to
+> permit persons to whom the Software is furnished to do so, subject to
+> the following conditions:
+>
+> The above copyright notice and this permission notice shall be
+> included in all copies or substantial portions of the Software.
+>
+> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+> EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+> MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+> IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+> CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+> TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+> SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+=========================================
+END OF JuliaEditorSupport/atom-language-julia NOTICES AND INFORMATION
+
+%% Jxck/assert NOTICES AND INFORMATION BEGIN HERE
+=========================================
+The MIT License (MIT)
+
+Copyright (c) 2011 Jxck
+
+Originally from node.js (http://nodejs.org)
+Copyright Joyent, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+=========================================
+END OF Jxck/assert NOTICES AND INFORMATION
+
%% nsfw NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
diff --git a/build/.cachesalt b/build/.cachesalt
index 013244143e..4ec7190dfc 100644
--- a/build/.cachesalt
+++ b/build/.cachesalt
@@ -1 +1 @@
-2021-04-07T03:52:18.011Z
+2021-08-23T03:52:18.011Z
diff --git a/build/azure-pipelines/common/createAsset.js b/build/azure-pipelines/common/createAsset.js
index c972cdf3c2..d197cf7c25 100644
--- a/build/azure-pipelines/common/createAsset.js
+++ b/build/azure-pipelines/common/createAsset.js
@@ -5,15 +5,101 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
+const url = require("url");
const crypto = require("crypto");
const azure = require("azure-storage");
const mime = require("mime");
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
-if (process.argv.length !== 6) {
- console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
+if (process.argv.length !== 8) {
+ console.error('Usage: node createAsset.js PRODUCT OS ARCH TYPE NAME FILE');
process.exit(-1);
}
+// Contains all of the logic for mapping details to our actual product names in CosmosDB
+function getPlatform(product, os, arch, type) {
+ switch (os) {
+ case 'win32':
+ switch (product) {
+ case 'client':
+ const asset = arch === 'ia32' ? 'win32' : `win32-${arch}`;
+ switch (type) {
+ case 'archive':
+ return `${asset}-archive`;
+ case 'setup':
+ return asset;
+ case 'user-setup':
+ return `${asset}-user`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'server':
+ if (arch === 'arm64') {
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ return arch === 'ia32' ? 'server-win32' : `server-win32-${arch}`;
+ case 'web':
+ if (arch === 'arm64') {
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ return arch === 'ia32' ? 'server-win32-web' : `server-win32-${arch}-web`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'linux':
+ switch (type) {
+ case 'snap':
+ return `linux-snap-${arch}`;
+ case 'archive-unsigned':
+ switch (product) {
+ case 'client':
+ return `linux-${arch}`;
+ case 'server':
+ return `server-linux-${arch}`;
+ case 'web':
+ return arch === 'standalone' ? 'web-standalone' : `server-linux-${arch}-web`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'deb-package':
+ return `linux-deb-${arch}`;
+ case 'rpm-package':
+ return `linux-rpm-${arch}`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'darwin':
+ switch (product) {
+ case 'client':
+ if (arch === 'x64') {
+ return 'darwin';
+ }
+ return `darwin-${arch}`;
+ case 'server':
+ return 'server-darwin';
+ case 'web':
+ if (arch !== 'x64') {
+ throw `What should the platform be?: ${product} ${os} ${arch} ${type}`;
+ }
+ return 'server-darwin-web';
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+}
+// Contains all of the logic for mapping types to our actual types in CosmosDB
+function getRealType(type) {
+ switch (type) {
+ case 'user-setup':
+ return 'setup';
+ case 'deb-package':
+ case 'rpm-package':
+ return 'package';
+ default:
+ return type;
+ }
+}
function hashStream(hashName, stream) {
return new Promise((c, e) => {
const shasum = crypto.createHash(hashName);
@@ -45,7 +131,10 @@ function getEnv(name) {
return result;
}
async function main() {
- const [, , platform, type, fileName, filePath] = process.argv;
+ const [, , product, os, arch, unprocessedType, fileName, filePath] = process.argv;
+ // getPlatform needs the unprocessedType
+ const platform = getPlatform(product, os, arch, unprocessedType);
+ const type = getRealType(unprocessedType);
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
console.log('Creating asset...');
@@ -65,14 +154,27 @@ async function main() {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
}
- console.log('Uploading blobs to Azure storage...');
- await uploadBlob(blobService, quality, blobName, filePath, fileName);
+ const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
+ .withFilter(new azure.ExponentialRetryPolicyFilter(20));
+ // mooncake is fussy and far away, this is needed!
+ blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
+ mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
+ console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
+ await retry_1.retry(() => Promise.all([
+ uploadBlob(blobService, quality, blobName, filePath, fileName),
+ uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName)
+ ]));
console.log('Blobs successfully uploaded.');
+ // TODO: Understand if blobName and blobPath are the same and replace blobPath with blobName if so.
+ const assetUrl = `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`;
+ const blobPath = url.parse(assetUrl).path;
+ const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
const asset = {
platform,
type,
- url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
+ url: assetUrl,
hash: sha1hash,
+ mooncakeUrl,
sha256hash,
size
};
@@ -83,7 +185,8 @@ async function main() {
console.log('Asset:', JSON.stringify(asset, null, ' '));
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
- await (0, retry_1.retry)(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
+ await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
+ console.log(` Done ✔️`);
}
main().then(() => {
console.log('Asset successfully created');
diff --git a/build/azure-pipelines/common/createAsset.ts b/build/azure-pipelines/common/createAsset.ts
index 4fee172297..37a49bd237 100644
--- a/build/azure-pipelines/common/createAsset.ts
+++ b/build/azure-pipelines/common/createAsset.ts
@@ -6,6 +6,7 @@
'use strict';
import * as fs from 'fs';
+import * as url from 'url';
import { Readable } from 'stream';
import * as crypto from 'crypto';
import * as azure from 'azure-storage';
@@ -24,11 +25,98 @@ interface Asset {
supportsFastUpdate?: boolean;
}
-if (process.argv.length !== 6) {
- console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
+if (process.argv.length !== 8) {
+ console.error('Usage: node createAsset.js PRODUCT OS ARCH TYPE NAME FILE');
process.exit(-1);
}
+// Contains all of the logic for mapping details to our actual product names in CosmosDB
+function getPlatform(product: string, os: string, arch: string, type: string): string {
+ switch (os) {
+ case 'win32':
+ switch (product) {
+ case 'client':
+ const asset = arch === 'ia32' ? 'win32' : `win32-${arch}`;
+ switch (type) {
+ case 'archive':
+ return `${asset}-archive`;
+ case 'setup':
+ return asset;
+ case 'user-setup':
+ return `${asset}-user`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'server':
+ if (arch === 'arm64') {
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ return arch === 'ia32' ? 'server-win32' : `server-win32-${arch}`;
+ case 'web':
+ if (arch === 'arm64') {
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ return arch === 'ia32' ? 'server-win32-web' : `server-win32-${arch}-web`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'linux':
+ switch (type) {
+ case 'snap':
+ return `linux-snap-${arch}`;
+ case 'archive-unsigned':
+ switch (product) {
+ case 'client':
+ return `linux-${arch}`;
+ case 'server':
+ return `server-linux-${arch}`;
+ case 'web':
+ return arch === 'standalone' ? 'web-standalone' : `server-linux-${arch}-web`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'deb-package':
+ return `linux-deb-${arch}`;
+ case 'rpm-package':
+ return `linux-rpm-${arch}`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'darwin':
+ switch (product) {
+ case 'client':
+ if (arch === 'x64') {
+ return 'darwin';
+ }
+ return `darwin-${arch}`;
+ case 'server':
+ return 'server-darwin';
+ case 'web':
+ if (arch !== 'x64') {
+ throw `What should the platform be?: ${product} ${os} ${arch} ${type}`;
+ }
+ return 'server-darwin-web';
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+}
+
+// Contains all of the logic for mapping types to our actual types in CosmosDB
+function getRealType(type: string) {
+ switch (type) {
+ case 'user-setup':
+ return 'setup';
+ case 'deb-package':
+ case 'rpm-package':
+ return 'package';
+ default:
+ return type;
+ }
+}
+
function hashStream(hashName: string, stream: Readable): Promise {
return new Promise((c, e) => {
const shasum = crypto.createHash(hashName);
@@ -68,7 +156,10 @@ function getEnv(name: string): string {
}
async function main(): Promise {
- const [, , platform, type, fileName, filePath] = process.argv;
+ const [, , product, os, arch, unprocessedType, fileName, filePath] = process.argv;
+ // getPlatform needs the unprocessedType
+ const platform = getPlatform(product, os, arch, unprocessedType);
+ const type = getRealType(unprocessedType);
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
@@ -98,17 +189,33 @@ async function main(): Promise {
return;
}
- console.log('Uploading blobs to Azure storage...');
+ const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
+ .withFilter(new azure.ExponentialRetryPolicyFilter(20));
- await uploadBlob(blobService, quality, blobName, filePath, fileName);
+ // mooncake is fussy and far away, this is needed!
+ blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
+ mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
+
+ console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
+
+ await retry(() => Promise.all([
+ uploadBlob(blobService, quality, blobName, filePath, fileName),
+ uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName)
+ ]));
console.log('Blobs successfully uploaded.');
+ // TODO: Understand if blobName and blobPath are the same and replace blobPath with blobName if so.
+ const assetUrl = `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`;
+ const blobPath = url.parse(assetUrl).path;
+ const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
+
const asset: Asset = {
platform,
type,
- url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
+ url: assetUrl,
hash: sha1hash,
+ mooncakeUrl,
sha256hash,
size
};
@@ -123,6 +230,8 @@ async function main(): Promise {
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
+
+ console.log(` Done ✔️`);
}
main().then(() => {
diff --git a/build/azure-pipelines/common/createBuild.js b/build/azure-pipelines/common/createBuild.js
index 15e06b1331..2165a62b8c 100644
--- a/build/azure-pipelines/common/createBuild.js
+++ b/build/azure-pipelines/common/createBuild.js
@@ -40,7 +40,7 @@ async function main() {
};
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
- await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
+ await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
}
main().then(() => {
console.log('Build successfully created');
diff --git a/build/azure-pipelines/common/extract-telemetry.sh b/build/azure-pipelines/common/extract-telemetry.sh
index 4abade1e7b..9cebe22bfd 100755
--- a/build/azure-pipelines/common/extract-telemetry.sh
+++ b/build/azure-pipelines/common/extract-telemetry.sh
@@ -4,12 +4,12 @@ set -e
cd $BUILD_STAGINGDIRECTORY
mkdir extraction
cd extraction
-git clone --depth 1 https://github.com/Microsoft/vscode-extension-telemetry.git
-git clone --depth 1 https://github.com/Microsoft/vscode-chrome-debug-core.git
-git clone --depth 1 https://github.com/Microsoft/vscode-node-debug2.git
-git clone --depth 1 https://github.com/Microsoft/vscode-node-debug.git
-git clone --depth 1 https://github.com/Microsoft/vscode-html-languageservice.git
-git clone --depth 1 https://github.com/Microsoft/vscode-json-languageservice.git
+git clone --depth 1 https://github.com/microsoft/vscode-extension-telemetry.git
+git clone --depth 1 https://github.com/microsoft/vscode-chrome-debug-core.git
+git clone --depth 1 https://github.com/microsoft/vscode-node-debug2.git
+git clone --depth 1 https://github.com/microsoft/vscode-node-debug.git
+git clone --depth 1 https://github.com/microsoft/vscode-html-languageservice.git
+git clone --depth 1 https://github.com/microsoft/vscode-json-languageservice.git
node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
diff --git a/build/azure-pipelines/common/publish-webview.js b/build/azure-pipelines/common/publish-webview.js
index f50e50277d..bf0c3d30c0 100644
--- a/build/azure-pipelines/common/publish-webview.js
+++ b/build/azure-pipelines/common/publish-webview.js
@@ -39,7 +39,7 @@ async function publish(commit, files) {
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
- const blobName = (0, path_1.basename)(file);
+ const blobName = path_1.basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
@@ -58,7 +58,7 @@ function main() {
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
- const files = fileNames.map(fileName => (0, path_1.join)(directory, fileName));
+ const files = fileNames.map(fileName => path_1.join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
diff --git a/build/azure-pipelines/common/releaseBuild.js b/build/azure-pipelines/common/releaseBuild.js
index ef44e03189..6932aed3bd 100644
--- a/build/azure-pipelines/common/releaseBuild.js
+++ b/build/azure-pipelines/common/releaseBuild.js
@@ -39,7 +39,7 @@ async function main() {
}
console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts;
- await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
+ await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
}
main().then(() => {
console.log('Build successfully released');
diff --git a/build/azure-pipelines/common/sync-mooncake.js b/build/azure-pipelines/common/sync-mooncake.js
deleted file mode 100644
index cf7c41e57b..0000000000
--- a/build/azure-pipelines/common/sync-mooncake.js
+++ /dev/null
@@ -1,87 +0,0 @@
-/*---------------------------------------------------------------------------------------------
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the Source EULA. See License.txt in the project root for license information.
- *--------------------------------------------------------------------------------------------*/
-'use strict';
-Object.defineProperty(exports, "__esModule", { value: true });
-const url = require("url");
-const azure = require("azure-storage");
-const mime = require("mime");
-const cosmos_1 = require("@azure/cosmos");
-const retry_1 = require("./retry");
-function log(...args) {
- console.log(...[`[${new Date().toISOString()}]`, ...args]);
-}
-function error(...args) {
- console.error(...[`[${new Date().toISOString()}]`, ...args]);
-}
-if (process.argv.length < 3) {
- error('Usage: node sync-mooncake.js ');
- process.exit(-1);
-}
-async function sync(commit, quality) {
- log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
- const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
- const container = client.database('builds').container(quality);
- const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
- const res = await container.items.query(query, {}).fetchAll();
- if (res.resources.length !== 1) {
- throw new Error(`No builds found for ${commit}`);
- }
- const build = res.resources[0];
- log(`Found build for ${commit}, with ${build.assets.length} assets`);
- const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
- const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
- .withFilter(new azure.ExponentialRetryPolicyFilter(20));
- const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
- .withFilter(new azure.ExponentialRetryPolicyFilter(20));
- // mooncake is fussy and far away, this is needed!
- blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
- mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
- for (const asset of build.assets) {
- try {
- const blobPath = url.parse(asset.url).path;
- if (!blobPath) {
- throw new Error(`Failed to parse URL: ${asset.url}`);
- }
- const blobName = blobPath.replace(/^\/\w+\//, '');
- log(`Found ${blobName}`);
- if (asset.mooncakeUrl) {
- log(` Already in Mooncake ✔️`);
- continue;
- }
- const readStream = blobService.createReadStream(quality, blobName, undefined);
- const blobOptions = {
- contentSettings: {
- contentType: mime.lookup(blobPath),
- cacheControl: 'max-age=31536000, public'
- }
- };
- const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
- log(` Uploading to Mooncake...`);
- await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
- log(` Updating build in DB...`);
- const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
- await (0, retry_1.retry)(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
- .execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
- log(` Done ✔️`);
- }
- catch (err) {
- error(err);
- }
- }
- log(`All done ✔️`);
-}
-function main() {
- const commit = process.env['BUILD_SOURCEVERSION'];
- if (!commit) {
- error('Skipping publish due to missing BUILD_SOURCEVERSION');
- return;
- }
- const quality = process.argv[2];
- sync(commit, quality).catch(err => {
- error(err);
- process.exit(1);
- });
-}
-main();
diff --git a/build/azure-pipelines/common/sync-mooncake.ts b/build/azure-pipelines/common/sync-mooncake.ts
deleted file mode 100644
index aa645a8861..0000000000
--- a/build/azure-pipelines/common/sync-mooncake.ts
+++ /dev/null
@@ -1,131 +0,0 @@
-/*---------------------------------------------------------------------------------------------
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the Source EULA. See License.txt in the project root for license information.
- *--------------------------------------------------------------------------------------------*/
-
-'use strict';
-
-import * as url from 'url';
-import * as azure from 'azure-storage';
-import * as mime from 'mime';
-import { CosmosClient } from '@azure/cosmos';
-import { retry } from './retry';
-
-function log(...args: any[]) {
- console.log(...[`[${new Date().toISOString()}]`, ...args]);
-}
-
-function error(...args: any[]) {
- console.error(...[`[${new Date().toISOString()}]`, ...args]);
-}
-
-if (process.argv.length < 3) {
- error('Usage: node sync-mooncake.js ');
- process.exit(-1);
-}
-
-interface Build {
- assets: Asset[];
-}
-
-interface Asset {
- platform: string;
- type: string;
- url: string;
- mooncakeUrl: string;
- hash: string;
- sha256hash: string;
- size: number;
- supportsFastUpdate?: boolean;
-}
-
-async function sync(commit: string, quality: string): Promise {
- log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
-
- const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
- const container = client.database('builds').container(quality);
-
- const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
- const res = await container.items.query(query, {}).fetchAll();
-
- if (res.resources.length !== 1) {
- throw new Error(`No builds found for ${commit}`);
- }
-
- const build = res.resources[0];
-
- log(`Found build for ${commit}, with ${build.assets.length} assets`);
-
- const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
-
- const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
- .withFilter(new azure.ExponentialRetryPolicyFilter(20));
-
- const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
- .withFilter(new azure.ExponentialRetryPolicyFilter(20));
-
- // mooncake is fussy and far away, this is needed!
- blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
- mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
-
- for (const asset of build.assets) {
- try {
- const blobPath = url.parse(asset.url).path;
-
- if (!blobPath) {
- throw new Error(`Failed to parse URL: ${asset.url}`);
- }
-
- const blobName = blobPath.replace(/^\/\w+\//, '');
-
- log(`Found ${blobName}`);
-
- if (asset.mooncakeUrl) {
- log(` Already in Mooncake ✔️`);
- continue;
- }
-
- const readStream = blobService.createReadStream(quality, blobName, undefined!);
- const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
- contentSettings: {
- contentType: mime.lookup(blobPath),
- cacheControl: 'max-age=31536000, public'
- }
- };
-
- const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
-
- log(` Uploading to Mooncake...`);
- await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
-
- log(` Updating build in DB...`);
- const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
- await retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
- .execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
-
- log(` Done ✔️`);
- } catch (err) {
- error(err);
- }
- }
-
- log(`All done ✔️`);
-}
-
-function main(): void {
- const commit = process.env['BUILD_SOURCEVERSION'];
-
- if (!commit) {
- error('Skipping publish due to missing BUILD_SOURCEVERSION');
- return;
- }
-
- const quality = process.argv[2];
-
- sync(commit, quality).catch(err => {
- error(err);
- process.exit(1);
- });
-}
-
-main();
diff --git a/build/azure-pipelines/darwin/product-build-darwin-sign.yml b/build/azure-pipelines/darwin/product-build-darwin-sign.yml
index 4ad8349c51..49f74b55c9 100644
--- a/build/azure-pipelines/darwin/product-build-darwin-sign.yml
+++ b/build/azure-pipelines/darwin/product-build-darwin-sign.yml
@@ -35,13 +35,13 @@ steps:
displayName: Restore modules for just build folder and compile it
- download: current
- artifact: vscode-darwin-$(VSCODE_ARCH)
+ artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
displayName: Download $(VSCODE_ARCH) artifact
- script: |
set -e
- unzip $(Pipeline.Workspace)/vscode-darwin-$(VSCODE_ARCH)/VSCode-darwin-$(VSCODE_ARCH).zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
- mv $(Pipeline.Workspace)/vscode-darwin-$(VSCODE_ARCH)/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
+ unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
+ mv $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Unzip & move
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
@@ -108,22 +108,18 @@ steps:
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'))
- script: |
- set -e
-
# For legacy purposes, arch for x64 is just 'darwin'
case $VSCODE_ARCH in
x64) ASSET_ID="darwin" ;;
arm64) ASSET_ID="darwin-arm64" ;;
universal) ASSET_ID="darwin-universal" ;;
esac
+ echo "##vso[task.setvariable variable=ASSET_ID]$ASSET_ID"
+ displayName: Set asset id variable
- VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- node build/azure-pipelines/common/createAsset.js \
- "$ASSET_ID" \
- archive \
- "VSCode-$ASSET_ID.zip" \
- ../VSCode-darwin-$(VSCODE_ARCH).zip
- displayName: Publish Clients
+ - script: mv $(agent.builddirectory)/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin.zip
+ displayName: Rename x64 build to it's legacy name
+ condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
+
+ - publish: $(Agent.BuildDirectory)/VSCode-$(ASSET_ID).zip
+ artifact: vscode_client_darwin_$(VSCODE_ARCH)_archive
diff --git a/build/azure-pipelines/darwin/product-build-darwin.yml b/build/azure-pipelines/darwin/product-build-darwin.yml
index 186920fe96..566eeb8052 100644
--- a/build/azure-pipelines/darwin/product-build-darwin.yml
+++ b/build/azure-pipelines/darwin/product-build-darwin.yml
@@ -138,19 +138,19 @@ steps:
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- download: current
- artifact: vscode-darwin-x64
+ artifact: unsigned_vscode_client_darwin_x64_archive
displayName: Download x64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- download: current
- artifact: vscode-darwin-arm64
+ artifact: unsigned_vscode_client_darwin_arm64_archive
displayName: Download arm64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
- cp $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin-x64.zip
- cp $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip $(agent.builddirectory)/VSCode-darwin-arm64.zip
+ cp $(Pipeline.Workspace)/unsigned_vscode_client_darwin_x64_archive/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin-x64.zip
+ cp $(Pipeline.Workspace)/unsigned_vscode_client_darwin_arm64_archive/VSCode-darwin-arm64.zip $(agent.builddirectory)/VSCode-darwin-arm64.zip
unzip $(agent.builddirectory)/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64
unzip $(agent.builddirectory)/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64
DEBUG=* node build/darwin/create-universal-app.js
@@ -280,26 +280,27 @@ steps:
- script: |
set -e
- VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- VSCODE_ARCH="$(VSCODE_ARCH)" ./build/azure-pipelines/darwin/publish-server.sh
- displayName: Publish Servers
+
+ # package Remote Extension Host
+ pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
+
+ # package Remote Extension Host (Web)
+ pushd .. && mv vscode-reh-web-darwin vscode-server-darwin-web && zip -Xry vscode-server-darwin-web.zip vscode-server-darwin-web && popd
+ displayName: Prepare to publish servers
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
- artifact: vscode-darwin-$(VSCODE_ARCH)
+ artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
displayName: Publish client archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
- artifact: vscode-server-darwin-$(VSCODE_ARCH)
+ artifact: vscode_server_darwin_$(VSCODE_ARCH)_archive-unsigned
displayName: Publish server archive
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-darwin-web.zip
- artifact: vscode-server-darwin-$(VSCODE_ARCH)-web
+ artifact: vscode_web_darwin_$(VSCODE_ARCH)_archive-unsigned
displayName: Publish web server archive
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
@@ -308,5 +309,5 @@ steps:
VSCODE_ARCH="$(VSCODE_ARCH)" \
yarn gulp upload-vscode-configuration
displayName: Upload configuration (for Bing settings search)
- condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
+ condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
continueOnError: true
diff --git a/build/azure-pipelines/darwin/publish-server.sh b/build/azure-pipelines/darwin/publish-server.sh
deleted file mode 100755
index 72a85942d5..0000000000
--- a/build/azure-pipelines/darwin/publish-server.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-set -e
-
-if [ "$VSCODE_ARCH" == "x64" ]; then
- # package Remote Extension Host
- pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
-
- # publish Remote Extension Host
- node build/azure-pipelines/common/createAsset.js \
- server-darwin \
- archive-unsigned \
- "vscode-server-darwin.zip" \
- ../vscode-server-darwin.zip
-fi
diff --git a/build/azure-pipelines/linux/alpine/publish.sh b/build/azure-pipelines/linux/alpine/publish.sh
deleted file mode 100755
index 2f5647d1ea..0000000000
--- a/build/azure-pipelines/linux/alpine/publish.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-set -e
-REPO="$(pwd)"
-ROOT="$REPO/.."
-
-PLATFORM_LINUX="linux-alpine"
-
-# Publish Remote Extension Host
-LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
-SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
-SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
-SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
-
-rm -rf $ROOT/vscode-server-*.tar.*
-(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
-
-node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
-
-# Publish Remote Extension Host (Web)
-LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
-SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
-SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
-SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
-
-rm -rf $ROOT/vscode-server-*-web.tar.*
-(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
-
-node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
diff --git a/build/azure-pipelines/linux/publish.sh b/build/azure-pipelines/linux/prepare-publish.sh
similarity index 79%
rename from build/azure-pipelines/linux/publish.sh
rename to build/azure-pipelines/linux/prepare-publish.sh
index 6d748c6e34..891fa8024e 100755
--- a/build/azure-pipelines/linux/publish.sh
+++ b/build/azure-pipelines/linux/prepare-publish.sh
@@ -13,8 +13,6 @@ TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
-node build/azure-pipelines/common/createAsset.js "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$TARBALL_PATH"
-
# Publish Remote Extension Host
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
@@ -24,8 +22,6 @@ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
-node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
-
# Publish Remote Extension Host (Web)
LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
@@ -35,8 +31,6 @@ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*-web.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
-node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
-
# Publish DEB
case $VSCODE_ARCH in
x64) DEB_ARCH="amd64" ;;
@@ -47,8 +41,6 @@ PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
-node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
-
# Publish RPM
case $VSCODE_ARCH in
x64) RPM_ARCH="x86_64" ;;
@@ -61,8 +53,6 @@ PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
-node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
-
# Publish Snap
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
@@ -73,3 +63,4 @@ rm -rf $SNAP_TARBALL_PATH
# Export DEB_PATH, RPM_PATH
echo "##vso[task.setvariable variable=DEB_PATH]$DEB_PATH"
echo "##vso[task.setvariable variable=RPM_PATH]$RPM_PATH"
+echo "##vso[task.setvariable variable=TARBALL_PATH]$TARBALL_PATH"
diff --git a/build/azure-pipelines/linux/product-build-alpine.yml b/build/azure-pipelines/linux/product-build-alpine.yml
index 8376c079ce..ed0c35346c 100644
--- a/build/azure-pipelines/linux/product-build-alpine.yml
+++ b/build/azure-pipelines/linux/product-build-alpine.yml
@@ -117,19 +117,37 @@ steps:
- script: |
set -e
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
- ./build/azure-pipelines/linux/alpine/publish.sh
- displayName: Publish
+ REPO="$(pwd)"
+ ROOT="$REPO/.."
+
+ PLATFORM_LINUX="linux-alpine"
+
+ # Publish Remote Extension Host
+ LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
+ SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
+ SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
+ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
+
+ rm -rf $ROOT/vscode-server-*.tar.*
+ (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
+
+ # Publish Remote Extension Host (Web)
+ LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
+ SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
+ SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
+ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
+
+ rm -rf $ROOT/vscode-server-*-web.tar.*
+ (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
+ displayName: Prepare for publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine.tar.gz
- artifact: vscode-server-linux-alpine
+ artifact: vscode_server_linux_alpine_archive-unsigned
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine-web.tar.gz
- artifact: vscode-server-linux-alpine-web
+ artifact: vscode_web_linux_alpine_archive-unsigned
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/build/azure-pipelines/linux/product-build-linux.yml b/build/azure-pipelines/linux/product-build-linux.yml
index cb06bf6a72..8181083d1f 100644
--- a/build/azure-pipelines/linux/product-build-linux.yml
+++ b/build/azure-pipelines/linux/product-build-linux.yml
@@ -245,27 +245,32 @@ steps:
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \
- ./build/azure-pipelines/linux/publish.sh
- displayName: Publish
+ ./build/azure-pipelines/linux/prepare-publish.sh
+ displayName: Prepare for Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(DEB_PATH)
- artifact: vscode-linux-deb-$(VSCODE_ARCH)
+ artifact: vscode_client_linux_$(VSCODE_ARCH)_deb-package
displayName: Publish deb package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(RPM_PATH)
- artifact: vscode-linux-rpm-$(VSCODE_ARCH)
+ artifact: vscode_client_linux_$(VSCODE_ARCH)_rpm-package
displayName: Publish rpm package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
+ - publish: $(TARBALL_PATH)
+ artifact: vscode_client_linux_$(VSCODE_ARCH)_archive-unsigned
+ displayName: Publish client archive
+ condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
+
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH).tar.gz
- artifact: vscode-server-linux-$(VSCODE_ARCH)
+ artifact: vscode_server_linux_$(VSCODE_ARCH)_archive-unsigned
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH)-web.tar.gz
- artifact: vscode-server-linux-$(VSCODE_ARCH)-web
+ artifact: vscode_web_linux_$(VSCODE_ARCH)_archive-unsigned
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/build/azure-pipelines/linux/snap-build-linux.yml b/build/azure-pipelines/linux/snap-build-linux.yml
index f5e0288f0b..f7af900e1d 100644
--- a/build/azure-pipelines/linux/snap-build-linux.yml
+++ b/build/azure-pipelines/linux/snap-build-linux.yml
@@ -50,15 +50,11 @@ steps:
esac
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft prime $SNAPCRAFT_TARGET_ARGS && snap pack prime --compression=lzo --filename="$SNAP_PATH")
- # Publish snap package
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- node build/azure-pipelines/common/createAsset.js "linux-snap-$(VSCODE_ARCH)" package "$SNAP_FILENAME" "$SNAP_PATH"
-
# Export SNAP_PATH
echo "##vso[task.setvariable variable=SNAP_PATH]$SNAP_PATH"
+ displayName: Prepare for publish
- publish: $(SNAP_PATH)
- artifact: vscode-linux-snap-$(VSCODE_ARCH)
+ artifact: vscode_client_linux_$(VSCODE_ARCH)_snap
displayName: Publish snap package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/build/azure-pipelines/product-build.yml b/build/azure-pipelines/product-build.yml
index fd698a0e7d..2c475b9ded 100644
--- a/build/azure-pipelines/product-build.yml
+++ b/build/azure-pipelines/product-build.yml
@@ -86,6 +86,8 @@ variables:
value: ${{ eq(parameters.ENABLE_TERRAPIN, true) }}
- name: VSCODE_QUALITY
value: ${{ parameters.VSCODE_QUALITY }}
+ - name: VSCODE_RELEASE
+ value: ${{ parameters.VSCODE_RELEASE }}
- name: VSCODE_BUILD_STAGE_WINDOWS
value: ${{ or(eq(parameters.VSCODE_BUILD_WIN32, true), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}
- name: VSCODE_BUILD_STAGE_LINUX
@@ -301,37 +303,30 @@ stages:
steps:
- template: darwin/product-build-darwin-sign.yml
- - ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}:
- - stage: Mooncake
+ - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), ne(variables['VSCODE_PUBLISH'], 'false')) }}:
+ - stage: Publish
dependsOn:
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
- - Windows
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
- - Linux
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
- - macOS
- condition: succeededOrFailed()
+ - Compile
pool:
vmImage: "Ubuntu-18.04"
+ variables:
+ - name: BUILDS_API_URL
+ value: $(System.CollectionUri)$(System.TeamProject)/_apis/build/builds/$(Build.BuildId)/
jobs:
- - job: SyncMooncake
- displayName: Sync Mooncake
+ - job: PublishBuild
+ timeoutInMinutes: 180
+ displayName: Publish Build
steps:
- - template: sync-mooncake.yml
+ - template: product-publish.yml
- - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), or(eq(parameters.VSCODE_RELEASE, true), and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(variables['VSCODE_SCHEDULEDBUILD'], true)))) }}:
- - stage: Release
- dependsOn:
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
- - Windows
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
- - Linux
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
- - macOS
- pool:
- vmImage: "Ubuntu-18.04"
- jobs:
- - job: ReleaseBuild
- displayName: Release Build
- steps:
- - template: release.yml
+ - ${{ if or(eq(parameters.VSCODE_RELEASE, true), and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(variables['VSCODE_SCHEDULEDBUILD'], true))) }}:
+ - stage: Release
+ dependsOn:
+ - Publish
+ pool:
+ vmImage: "Ubuntu-18.04"
+ jobs:
+ - job: ReleaseBuild
+ displayName: Release Build
+ steps:
+ - template: product-release.yml
diff --git a/build/azure-pipelines/product-compile.yml b/build/azure-pipelines/product-compile.yml
index 52c7758cfd..18c17639b8 100644
--- a/build/azure-pipelines/product-compile.yml
+++ b/build/azure-pipelines/product-compile.yml
@@ -118,14 +118,6 @@ steps:
displayName: Publish Webview
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- - script: |
- set -e
- VERSION=`node -p "require(\"./package.json\").version"`
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- node build/azure-pipelines/common/createBuild.js $VERSION
- displayName: Create build
- condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
-
# we gotta tarball everything in order to preserve file permissions
- script: |
set -e
diff --git a/build/azure-pipelines/product-publish.ps1 b/build/azure-pipelines/product-publish.ps1
new file mode 100644
index 0000000000..339002ab0c
--- /dev/null
+++ b/build/azure-pipelines/product-publish.ps1
@@ -0,0 +1,114 @@
+. build/azure-pipelines/win32/exec.ps1
+$ErrorActionPreference = 'Stop'
+$ProgressPreference = 'SilentlyContinue'
+$ARTIFACT_PROCESSED_WILDCARD_PATH = "$env:PIPELINE_WORKSPACE/artifacts_processed_*/artifacts_processed_*"
+$ARTIFACT_PROCESSED_FILE_PATH = "$env:PIPELINE_WORKSPACE/artifacts_processed_$env:SYSTEM_STAGEATTEMPT/artifacts_processed_$env:SYSTEM_STAGEATTEMPT.txt"
+
+function Get-PipelineArtifact {
+ param($Name = '*')
+ try {
+ $res = Invoke-RestMethod "$($env:BUILDS_API_URL)artifacts?api-version=6.0" -Headers @{
+ Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
+ } -MaximumRetryCount 5 -RetryIntervalSec 1
+
+ if (!$res) {
+ return
+ }
+
+ $res.value | Where-Object { $_.name -Like $Name }
+ } catch {
+ Write-Warning $_
+ }
+}
+
+# This set will keep track of which artifacts have already been processed
+$set = [System.Collections.Generic.HashSet[string]]::new()
+
+if (Test-Path $ARTIFACT_PROCESSED_WILDCARD_PATH) {
+ # Grab the latest artifact_processed text file and load all assets already processed from that.
+ # This means that the latest artifact_processed_*.txt file has all of the contents of the previous ones.
+ # Note: The kusto-like syntax only works in PS7+ and only in scripts, not at the REPL.
+ Get-ChildItem $ARTIFACT_PROCESSED_WILDCARD_PATH
+ | Sort-Object
+ | Select-Object -Last 1
+ | Get-Content
+ | ForEach-Object {
+ $set.Add($_) | Out-Null
+ Write-Host "Already processed artifact: $_"
+ }
+}
+
+# Create the artifact file that will be used for this run
+New-Item -Path $ARTIFACT_PROCESSED_FILE_PATH -Force | Out-Null
+
+# Determine which stages we need to watch
+$stages = @(
+ if ($env:VSCODE_BUILD_STAGE_WINDOWS -eq 'True') { 'Windows' }
+ if ($env:VSCODE_BUILD_STAGE_LINUX -eq 'True') { 'Linux' }
+ if ($env:VSCODE_BUILD_STAGE_MACOS -eq 'True') { 'macOS' }
+)
+
+do {
+ Start-Sleep -Seconds 10
+
+ $artifacts = Get-PipelineArtifact -Name 'vscode_*'
+ if (!$artifacts) {
+ continue
+ }
+
+ $artifacts | ForEach-Object {
+ $artifactName = $_.name
+ if($set.Add($artifactName)) {
+ Write-Host "Processing artifact: '$artifactName. Downloading from: $($_.resource.downloadUrl)"
+
+ try {
+ Invoke-RestMethod $_.resource.downloadUrl -OutFile "$env:AGENT_TEMPDIRECTORY/$artifactName.zip" -Headers @{
+ Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
+ } -MaximumRetryCount 5 -RetryIntervalSec 1 | Out-Null
+
+ Expand-Archive -Path "$env:AGENT_TEMPDIRECTORY/$artifactName.zip" -DestinationPath $env:AGENT_TEMPDIRECTORY | Out-Null
+ } catch {
+ Write-Warning $_
+ $set.Remove($artifactName) | Out-Null
+ continue
+ }
+
+ $null,$product,$os,$arch,$type = $artifactName -split '_'
+ $asset = Get-ChildItem -rec "$env:AGENT_TEMPDIRECTORY/$artifactName"
+ Write-Host "Processing artifact with the following values:"
+ # turning in into an object just to log nicely
+ @{
+ product = $product
+ os = $os
+ arch = $arch
+ type = $type
+ asset = $asset.Name
+ } | Format-Table
+
+ exec { node build/azure-pipelines/common/createAsset.js $product $os $arch $type $asset.Name $asset.FullName }
+ $artifactName >> $ARTIFACT_PROCESSED_FILE_PATH
+ }
+ }
+
+ # Get the timeline and see if it says the other stage completed
+ try {
+ $timeline = Invoke-RestMethod "$($env:BUILDS_API_URL)timeline?api-version=6.0" -Headers @{
+ Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
+ } -MaximumRetryCount 5 -RetryIntervalSec 1
+ } catch {
+ Write-Warning $_
+ continue
+ }
+
+ foreach ($stage in $stages) {
+ $otherStageFinished = $timeline.records | Where-Object { $_.name -eq $stage -and $_.type -eq 'stage' -and $_.state -eq 'completed' }
+ if (!$otherStageFinished) {
+ break
+ }
+ }
+
+ $artifacts = Get-PipelineArtifact -Name 'vscode_*'
+ $artifactsStillToProcess = $artifacts.Count -ne $set.Count
+} while (!$otherStageFinished -or $artifactsStillToProcess)
+
+Write-Host "Processed $($set.Count) artifacts."
diff --git a/build/azure-pipelines/product-publish.yml b/build/azure-pipelines/product-publish.yml
new file mode 100644
index 0000000000..de8cb216b8
--- /dev/null
+++ b/build/azure-pipelines/product-publish.yml
@@ -0,0 +1,89 @@
+steps:
+ - task: NodeTool@0
+ inputs:
+ versionSpec: "12.x"
+
+ - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
+ inputs:
+ versionSpec: "1.x"
+
+ - task: AzureKeyVault@1
+ displayName: "Azure Key Vault: Get Secrets"
+ inputs:
+ azureSubscription: "vscode-builds-subscription"
+ KeyVaultName: vscode
+
+ - pwsh: |
+ . build/azure-pipelines/win32/exec.ps1
+ cd build
+ exec { yarn }
+ displayName: Install dependencies
+
+ - download: current
+ patterns: '**/artifacts_processed_*.txt'
+ displayName: Download all artifacts_processed text files
+
+ - pwsh: |
+ . build/azure-pipelines/win32/exec.ps1
+
+ if (Test-Path "$(Pipeline.Workspace)/artifacts_processed_*/artifacts_processed_*.txt") {
+ Write-Host "Artifacts already processed so a build must have already been created."
+ return
+ }
+
+ $env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
+ $VERSION = node -p "require('./package.json').version"
+ Write-Host "Creating build with version: $VERSION"
+ exec { node build/azure-pipelines/common/createBuild.js $VERSION }
+ displayName: Create build if it hasn't been created before
+
+ - pwsh: |
+ $env:VSCODE_MIXIN_PASSWORD = "$(github-distro-mixin-password)"
+ $env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
+ $env:AZURE_STORAGE_ACCESS_KEY = "$(ticino-storage-key)"
+ $env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
+ $env:MOONCAKE_STORAGE_ACCESS_KEY = "$(vscode-mooncake-storage-key)"
+ build/azure-pipelines/product-publish.ps1
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ displayName: Process artifacts
+
+ - publish: $(Pipeline.Workspace)/artifacts_processed_$(System.StageAttempt)/artifacts_processed_$(System.StageAttempt).txt
+ artifact: artifacts_processed_$(System.StageAttempt)
+ displayName: Publish what artifacts were published for this stage attempt
+
+ - pwsh: |
+ $ErrorActionPreference = 'Stop'
+
+ # Determine which stages we need to watch
+ $stages = @(
+ if ($env:VSCODE_BUILD_STAGE_WINDOWS -eq 'True') { 'Windows' }
+ if ($env:VSCODE_BUILD_STAGE_LINUX -eq 'True') { 'Linux' }
+ if ($env:VSCODE_BUILD_STAGE_MACOS -eq 'True') { 'macOS' }
+ )
+ Write-Host "Stages to check: $stages"
+
+ # Get the timeline and see if it says the other stage completed
+ $timeline = Invoke-RestMethod "$($env:BUILDS_API_URL)timeline?api-version=6.0" -Headers @{
+ Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
+ } -MaximumRetryCount 5 -RetryIntervalSec 1
+
+ $failedStages = @()
+ foreach ($stage in $stages) {
+ $didStageFail = $timeline.records | Where-Object {
+ $_.name -eq $stage -and $_.type -eq 'stage' -and $_.result -ne 'succeeded' -and $_.result -ne 'succeededWithIssues'
+ }
+
+ if($didStageFail) {
+ $failedStages += $stage
+ } else {
+ Write-Host "'$stage' did not fail."
+ }
+ }
+
+ if ($failedStages.Length) {
+ throw "Failed stages: $($failedStages -join ', '). This stage will now fail so that it is easier to retry failed jobs."
+ }
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ displayName: Determine if stage should succeed
diff --git a/build/azure-pipelines/release.yml b/build/azure-pipelines/product-release.yml
similarity index 100%
rename from build/azure-pipelines/release.yml
rename to build/azure-pipelines/product-release.yml
diff --git a/build/azure-pipelines/publish-types/update-types.js b/build/azure-pipelines/publish-types/update-types.js
index 6fc1d6b990..2da8ae32e9 100644
--- a/build/azure-pipelines/publish-types/update-types.js
+++ b/build/azure-pipelines/publish-types/update-types.js
@@ -63,8 +63,8 @@ function getNewFileHeader(tag) {
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
- ` * Licensed under the MIT License.`,
- ` * See https://github.com/Microsoft/azuredatastudio/blob/main/LICENSE.txt for license information.`,
+ ` * Licensed under the Source EULA.`,
+ ` * See https://github.com/microsoft/azuredatastudio/blob/main/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
diff --git a/build/azure-pipelines/publish-types/update-types.ts b/build/azure-pipelines/publish-types/update-types.ts
index fd1ab41301..c3ed3324a7 100644
--- a/build/azure-pipelines/publish-types/update-types.ts
+++ b/build/azure-pipelines/publish-types/update-types.ts
@@ -75,8 +75,8 @@ function getNewFileHeader(tag: string) {
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
- ` * Licensed under the MIT License.`,
- ` * See https://github.com/Microsoft/azuredatastudio/blob/main/LICENSE.txt for license information.`,
+ ` * Licensed under the Source EULA.`,
+ ` * See https://github.com/microsoft/azuredatastudio/blob/main/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
diff --git a/build/azure-pipelines/sync-mooncake.yml b/build/azure-pipelines/sync-mooncake.yml
deleted file mode 100644
index 6e379754f2..0000000000
--- a/build/azure-pipelines/sync-mooncake.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-steps:
- - task: NodeTool@0
- inputs:
- versionSpec: "14.x"
-
- - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
- inputs:
- versionSpec: "1.x"
-
- - task: AzureKeyVault@1
- displayName: "Azure Key Vault: Get Secrets"
- inputs:
- azureSubscription: "vscode-builds-subscription"
- KeyVaultName: vscode
-
- - script: |
- set -e
-
- (cd build ; yarn)
-
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- MOONCAKE_STORAGE_ACCESS_KEY="$(vscode-mooncake-storage-key)" \
- node build/azure-pipelines/common/sync-mooncake.js "$VSCODE_QUALITY"
diff --git a/build/azure-pipelines/web/product-build-web.yml b/build/azure-pipelines/web/product-build-web.yml
index 772fe1c05a..45dedea1b4 100644
--- a/build/azure-pipelines/web/product-build-web.yml
+++ b/build/azure-pipelines/web/product-build-web.yml
@@ -119,13 +119,19 @@ steps:
- script: |
set -e
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
- ./build/azure-pipelines/web/publish.sh
- displayName: Publish
+ REPO="$(pwd)"
+ ROOT="$REPO/.."
+
+ WEB_BUILD_NAME="vscode-web"
+ WEB_TARBALL_FILENAME="vscode-web.tar.gz"
+ WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
+
+ rm -rf $ROOT/vscode-web.tar.*
+
+ cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME
+ displayName: Prepare for publish
- publish: $(Agent.BuildDirectory)/vscode-web.tar.gz
- artifact: vscode-web-standalone
+ artifact: vscode_web_linux_standalone_archive-unsigned
displayName: Publish web archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/build/azure-pipelines/web/publish.sh b/build/azure-pipelines/web/publish.sh
deleted file mode 100755
index 827edc2661..0000000000
--- a/build/azure-pipelines/web/publish.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env bash
-set -e
-REPO="$(pwd)"
-ROOT="$REPO/.."
-
-# Publish Web Client
-WEB_BUILD_NAME="vscode-web"
-WEB_TARBALL_FILENAME="vscode-web.tar.gz"
-WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
-
-rm -rf $ROOT/vscode-web.tar.*
-
-(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
-
-node build/azure-pipelines/common/createAsset.js web-standalone archive-unsigned "$WEB_TARBALL_FILENAME" "$WEB_TARBALL_PATH"
diff --git a/build/azure-pipelines/win32/publish.ps1 b/build/azure-pipelines/win32/prepare-publish.ps1
similarity index 51%
rename from build/azure-pipelines/win32/publish.ps1
rename to build/azure-pipelines/win32/prepare-publish.ps1
index a225f9d5fd..f80e1ca0ce 100644
--- a/build/azure-pipelines/win32/publish.ps1
+++ b/build/azure-pipelines/win32/prepare-publish.ps1
@@ -13,24 +13,31 @@ $Zip = "$Repo\.build\win32-$Arch\archive\VSCode-win32-$Arch.zip"
$LegacyServer = "$Root\vscode-reh-win32-$Arch"
$Server = "$Root\vscode-server-win32-$Arch"
$ServerZip = "$Repo\.build\vscode-server-win32-$Arch.zip"
+$LegacyWeb = "$Root\vscode-reh-web-win32-$Arch"
+$Web = "$Root\vscode-server-win32-$Arch-web"
+$WebZip = "$Repo\.build\vscode-server-win32-$Arch-web.zip"
$Build = "$Root\VSCode-win32-$Arch"
# Create server archive
if ("$Arch" -ne "arm64") {
exec { xcopy $LegacyServer $Server /H /E /I }
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
+ exec { xcopy $LegacyWeb $Web /H /E /I }
+ exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $WebZip $Web -r }
}
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
-$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-$Arch" }
+$ARCHIVE_NAME = "VSCode-win32-$Arch-$Version.zip"
+$SYSTEM_SETUP_NAME = "VSCodeSetup-$Arch-$Version.exe"
+$USER_SETUP_NAME = "VSCodeUserSetup-$Arch-$Version.exe"
-exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Zip }
-exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $SystemExe }
-exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $UserExe }
-
-if ("$Arch" -ne "arm64") {
- exec { node build/azure-pipelines/common/createAsset.js "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $ServerZip }
-}
+# Set variables for upload
+Move-Item $Zip "$Repo\.build\win32-$Arch\archive\$ARCHIVE_NAME"
+Write-Host "##vso[task.setvariable variable=ARCHIVE_NAME]$ARCHIVE_NAME"
+Move-Item $SystemExe "$Repo\.build\win32-$Arch\system-setup\$SYSTEM_SETUP_NAME"
+Write-Host "##vso[task.setvariable variable=SYSTEM_SETUP_NAME]$SYSTEM_SETUP_NAME"
+Move-Item $UserExe "$Repo\.build\win32-$Arch\user-setup\$USER_SETUP_NAME"
+Write-Host "##vso[task.setvariable variable=USER_SETUP_NAME]$USER_SETUP_NAME"
diff --git a/build/azure-pipelines/win32/product-build-win32.yml b/build/azure-pipelines/win32/product-build-win32.yml
index 2dcaf8b2e0..1f8514ae7e 100644
--- a/build/azure-pipelines/win32/product-build-win32.yml
+++ b/build/azure-pipelines/win32/product-build-win32.yml
@@ -295,31 +295,31 @@ steps:
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
- .\build\azure-pipelines\win32\publish.ps1
+ .\build\azure-pipelines\win32\prepare-publish.ps1
displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip
- artifact: vscode-win32-$(VSCODE_ARCH)
+ - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\archive\$(ARCHIVE_NAME)
+ artifact: vscode_client_win32_$(VSCODE_ARCH)_archive
displayName: Publish archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe
- artifact: vscode-win32-$(VSCODE_ARCH)-setup
+ - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\system-setup\$(SYSTEM_SETUP_NAME)
+ artifact: vscode_client_win32_$(VSCODE_ARCH)_setup
displayName: Publish system setup
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe
- artifact: vscode-win32-$(VSCODE_ARCH)-user-setup
+ - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\user-setup\$(USER_SETUP_NAME)
+ artifact: vscode_client_win32_$(VSCODE_ARCH)_user-setup
displayName: Publish user setup
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH).zip
- artifact: vscode-server-win32-$(VSCODE_ARCH)
+ artifact: vscode_server_win32_$(VSCODE_ARCH)_archive
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH)-web.zip
- artifact: vscode-server-win32-$(VSCODE_ARCH)-web
+ artifact: vscode_web_win32_$(VSCODE_ARCH)_archive
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
diff --git a/build/darwin/create-universal-app.js b/build/darwin/create-universal-app.js
index 5e4ba3e611..44600c28e1 100644
--- a/build/darwin/create-universal-app.js
+++ b/build/darwin/create-universal-app.js
@@ -23,7 +23,7 @@ async function main() {
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
- await (0, vscode_universal_1.makeUniversalApp)({
+ await vscode_universal_1.makeUniversalApp({
x64AppPath,
arm64AppPath,
x64AsarPath,
diff --git a/build/filters.js b/build/filters.js
index 3dc3db6a46..665d4ed372 100644
--- a/build/filters.js
+++ b/build/filters.js
@@ -51,7 +51,7 @@ module.exports.indentationFilter = [
'!test/monaco/out/**',
'!test/smoke/out/**',
'!extensions/typescript-language-features/test-workspace/**',
- '!extensions/notebook-markdown-extensions/notebook-out/**',
+ '!extensions/markdown-math/notebook-out/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/vscode-custom-editor-tests/test-workspace/**',
@@ -89,7 +89,7 @@ module.exports.indentationFilter = [
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
'!extensions/markdown-language-features/notebook-out/*.js',
- '!extensions/notebook-markdown-extensions/notebook-out/*.js',
+ '!extensions/markdown-math/notebook-out/*.js',
'!extensions/simple-browser/media/*.js',
];
@@ -119,7 +119,7 @@ module.exports.copyrightFilter = [
'!resources/completions/**',
'!extensions/configuration-editing/build/inline-allOf.ts',
'!extensions/markdown-language-features/media/highlight.css',
- '!extensions/notebook-markdown-extensions/notebook-out/**',
+ '!extensions/markdown-math/notebook-out/**',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
diff --git a/build/gulpfile.editor.js b/build/gulpfile.editor.js
index 0e56c448f0..1706287c84 100644
--- a/build/gulpfile.editor.js
+++ b/build/gulpfile.editor.js
@@ -14,7 +14,7 @@ const i18n = require('./lib/i18n');
const standalone = require('./lib/standalone');
const cp = require('child_process');
const compilation = require('./lib/compilation');
-const monacoapi = require('./monaco/api');
+const monacoapi = require('./lib/monaco-api');
const fs = require('fs');
let root = path.dirname(__dirname);
@@ -49,7 +49,7 @@ let BUNDLED_FILE_HEADER = [
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Version: ' + headerVersion,
' * Released under the Source EULA',
- ' * https://github.com/Microsoft/vscode/blob/master/LICENSE.txt',
+ ' * https://github.com/microsoft/vscode/blob/main/LICENSE.txt',
' *-----------------------------------------------------------*/',
''
].join('\n');
@@ -279,7 +279,7 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
// version.txt
gulp.src('build/monaco/version.txt')
.pipe(es.through(function (data) {
- data.contents = Buffer.from(`monaco-editor-core: https://github.com/Microsoft/vscode/tree/${sha1}`);
+ data.contents = Buffer.from(`monaco-editor-core: https://github.com/microsoft/vscode/tree/${sha1}`);
this.emit('data', data);
}))
.pipe(gulp.dest('out-monaco-editor-core')),
diff --git a/build/gulpfile.extensions.js b/build/gulpfile.extensions.js
index c03b0c4f0f..617cc47a96 100644
--- a/build/gulpfile.extensions.js
+++ b/build/gulpfile.extensions.js
@@ -8,7 +8,6 @@ require('events').EventEmitter.defaultMaxListeners = 100;
const gulp = require('gulp');
const path = require('path');
-const child_process = require('child_process');
const nodeUtil = require('util');
const es = require('event-stream');
const filter = require('gulp-filter');
@@ -20,8 +19,6 @@ const glob = require('glob');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const plumber = require('gulp-plumber');
-const fancyLog = require('fancy-log');
-const ansiColors = require('ansi-colors');
const ext = require('./lib/extensions');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@@ -59,6 +56,7 @@ const compilations = glob.sync('**/tsconfig.json', {
// 'json-language-features/server/tsconfig.json',
// 'markdown-language-features/preview-src/tsconfig.json',
// 'markdown-language-features/tsconfig.json',
+// 'markdown-math/tsconfig.json',
// 'merge-conflict/tsconfig.json',
// 'microsoft-authentication/tsconfig.json',
// 'npm/tsconfig.json',
@@ -207,45 +205,17 @@ gulp.task(compileExtensionsBuildLegacyTask);
//#region Extension media
-// Additional projects to webpack. These typically build code for webviews
-const webpackMediaConfigFiles = [
- 'markdown-language-features/webpack.config.js',
- 'simple-browser/webpack.config.js',
-];
-
-// Additional projects to run esbuild on. These typically build code for webviews
-const esbuildMediaScripts = [
- 'markdown-language-features/esbuild.js',
- 'notebook-markdown-extensions/esbuild.js',
-];
-
-const compileExtensionMediaTask = task.define('compile-extension-media', () => buildExtensionMedia(false));
+const compileExtensionMediaTask = task.define('compile-extension-media', () => ext.buildExtensionMedia(false));
gulp.task(compileExtensionMediaTask);
exports.compileExtensionMediaTask = compileExtensionMediaTask;
-const watchExtensionMedia = task.define('watch-extension-media', () => buildExtensionMedia(true));
+const watchExtensionMedia = task.define('watch-extension-media', () => ext.buildExtensionMedia(true));
gulp.task(watchExtensionMedia);
exports.watchExtensionMedia = watchExtensionMedia;
-const compileExtensionMediaBuildTask = task.define('compile-extension-media-build', () => buildExtensionMedia(false, '.build/extensions'));
+const compileExtensionMediaBuildTask = task.define('compile-extension-media-build', () => ext.buildExtensionMedia(false, '.build/extensions'));
gulp.task(compileExtensionMediaBuildTask);
-async function buildExtensionMedia(isWatch, outputRoot) {
- const webpackConfigLocations = webpackMediaConfigFiles.map(p => {
- return {
- configPath: path.join(extensionsPath, p),
- outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
- };
- });
- return Promise.all([
- webpackExtensions('webpacking extension media', isWatch, webpackConfigLocations),
- esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({
- script: path.join(extensionsPath, p),
- outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
- }))),
- ]);
-}
-
//#endregion
//#region Azure Pipelines
@@ -320,121 +290,5 @@ async function buildWebExtensions(isWatch) {
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),
{ ignore: ['**/node_modules'] }
);
- return webpackExtensions('packaging web extension', isWatch, webpackConfigLocations.map(configPath => ({ configPath })));
-}
-
-/**
- * @param {string} taskName
- * @param {boolean} isWatch
- * @param {{ configPath: string, outputRoot?: boolean}} webpackConfigLocations
- */
-async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
- const webpack = require('webpack');
-
- const webpackConfigs = [];
-
- for (const { configPath, outputRoot } of webpackConfigLocations) {
- const configOrFnOrArray = require(configPath);
- function addConfig(configOrFn) {
- let config;
- if (typeof configOrFn === 'function') {
- config = configOrFn({}, {});
- webpackConfigs.push(config);
- } else {
- config = configOrFn;
- }
-
- if (outputRoot) {
- config.output.path = path.join(outputRoot, path.relative(path.dirname(configPath), config.output.path));
- }
-
- webpackConfigs.push(configOrFn);
- }
- addConfig(configOrFnOrArray);
- }
- function reporter(fullStats) {
- if (Array.isArray(fullStats.children)) {
- for (const stats of fullStats.children) {
- const outputPath = stats.outputPath;
- if (outputPath) {
- const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
- const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
- fancyLog(`Finished ${ansiColors.green(taskName)} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
- }
- if (Array.isArray(stats.errors)) {
- stats.errors.forEach(error => {
- fancyLog.error(error);
- });
- }
- if (Array.isArray(stats.warnings)) {
- stats.warnings.forEach(warning => {
- fancyLog.warn(warning);
- });
- }
- }
- }
- }
- return new Promise((resolve, reject) => {
- if (isWatch) {
- webpack(webpackConfigs).watch({}, (err, stats) => {
- if (err) {
- reject();
- } else {
- reporter(stats.toJson());
- }
- });
- } else {
- webpack(webpackConfigs).run((err, stats) => {
- if (err) {
- fancyLog.error(err);
- reject();
- } else {
- reporter(stats.toJson());
- resolve();
- }
- });
- }
- });
-}
-
-/**
- * @param {string} taskName
- * @param {boolean} isWatch
- * @param {{ script: string, outputRoot?: string }}} scripts
- */
-async function esbuildExtensions(taskName, isWatch, scripts) {
- function reporter(/** @type {string} */ stdError, /** @type {string} */script) {
- const matches = (stdError || '').match(/\> (.+): error: (.+)?/g);
- fancyLog(`Finished ${ansiColors.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`);
- for (const match of matches || []) {
- fancyLog.error(match);
- }
- }
-
- const tasks = scripts.map(({ script, outputRoot }) => {
- return new Promise((resolve, reject) => {
- const args = [script];
- if (isWatch) {
- args.push('--watch');
- }
- if (outputRoot) {
- args.push('--outputRoot', outputRoot);
- }
- const proc = child_process.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => {
- if (error) {
- return reject(error);
- }
- reporter(stderr, script);
- if (stderr) {
- return reject();
- }
- return resolve();
- });
-
- proc.stdout.on('data', (data) => {
- fancyLog(`${ansiColors.green(taskName)}: ${data.toString('utf8')}`);
- });
- });
- });
- return Promise.all(tasks);
+ return ext.webpackExtensions('packaging web extension', isWatch, webpackConfigLocations.map(configPath => ({ configPath })));
}
diff --git a/build/gulpfile.vscode.js b/build/gulpfile.vscode.js
index 8c405c4520..71eea44795 100644
--- a/build/gulpfile.vscode.js
+++ b/build/gulpfile.vscode.js
@@ -283,7 +283,14 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
.pipe(jsFilter)
.pipe(util.rewriteSourceMappingURL(sourceMappingURLBase))
.pipe(jsFilter.restore)
- .pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*', '**/*.wasm'], 'node_modules.asar'));
+ .pipe(createAsar(path.join(process.cwd(), 'node_modules'), [
+ '**/*.node',
+ '**/vscode-ripgrep/bin/*',
+ '**/node-pty/build/Release/*',
+ '**/node-pty/lib/worker/conoutSocketWorker.js',
+ '**/node-pty/lib/shared/conout.js',
+ '**/*.wasm'
+ ], 'node_modules.asar'));
let all = es.merge(
packageJsonStream,
@@ -439,8 +446,6 @@ BUILD_TARGETS.forEach(buildTarget => {
}
});
-// Transifex Localizations
-
const innoSetupConfig = {
'zh-cn': { codePage: 'CP936', defaultInfo: { name: 'Simplified Chinese', id: '$0804', } },
'zh-tw': { codePage: 'CP950', defaultInfo: { name: 'Traditional Chinese', id: '$0404' } },
@@ -456,6 +461,8 @@ const innoSetupConfig = {
'tr': { codePage: 'CP1254' }
};
+// Transifex Localizations
+
const apiHostname = process.env.TRANSIFEX_API_URL;
const apiName = process.env.TRANSIFEX_API_NAME;
const apiToken = process.env.TRANSIFEX_API_TOKEN;
@@ -491,7 +498,7 @@ const vscodeTranslationsExport = task.define(
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = '.build/extensions/*';
- const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
+ const pathToSetup = 'build/win32/i18n/messages.en.isl';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
diff --git a/build/lib/builtInExtensions.js b/build/lib/builtInExtensions.js
index 7495f0595a..a01b233d0f 100644
--- a/build/lib/builtInExtensions.js
+++ b/build/lib/builtInExtensions.js
@@ -18,8 +18,8 @@ const ansiColors = require("ansi-colors");
const mkdirp = require('mkdirp');
const root = path.dirname(path.dirname(__dirname));
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
-const builtInExtensions = productjson.builtInExtensions;
-const webBuiltInExtensions = productjson.webBuiltInExtensions;
+const builtInExtensions = productjson.builtInExtensions || [];
+const webBuiltInExtensions = productjson.webBuiltInExtensions || [];
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
function log(...messages) {
diff --git a/build/lib/builtInExtensions.ts b/build/lib/builtInExtensions.ts
index 4c9fc5d22f..eee9f99158 100644
--- a/build/lib/builtInExtensions.ts
+++ b/build/lib/builtInExtensions.ts
@@ -36,8 +36,8 @@ export interface IExtensionDefinition {
const root = path.dirname(path.dirname(__dirname));
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
-const builtInExtensions = productjson.builtInExtensions;
-const webBuiltInExtensions = productjson.webBuiltInExtensions;
+const builtInExtensions = productjson.builtInExtensions || [];
+const webBuiltInExtensions = productjson.webBuiltInExtensions || [];
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
diff --git a/build/lib/builtInExtensionsCG.js b/build/lib/builtInExtensionsCG.js
index 64b9064c8e..a08b72b3ec 100644
--- a/build/lib/builtInExtensionsCG.js
+++ b/build/lib/builtInExtensionsCG.js
@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const got_1 = require("got");
@@ -12,8 +12,8 @@ const ansiColors = require("ansi-colors");
const root = path.dirname(path.dirname(__dirname));
const rootCG = path.join(root, 'extensionsCG');
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
-const builtInExtensions = productjson.builtInExtensions;
-const webBuiltInExtensions = productjson.webBuiltInExtensions;
+const builtInExtensions = productjson.builtInExtensions || [];
+const webBuiltInExtensions = productjson.webBuiltInExtensions || [];
const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined;
const contentBasePath = 'raw.githubusercontent.com';
const contentFileNames = ['package.json', 'package-lock.json', 'yarn.lock'];
@@ -25,7 +25,7 @@ async function downloadExtensionDetails(extension) {
const promises = [];
for (const fileName of contentFileNames) {
promises.push(new Promise(resolve => {
- (0, got_1.default)(`${repositoryContentBaseUrl}/${fileName}`)
+ got_1.default(`${repositoryContentBaseUrl}/${fileName}`)
.then(response => {
resolve({ fileName, body: response.rawBody });
})
diff --git a/build/lib/builtInExtensionsCG.ts b/build/lib/builtInExtensionsCG.ts
index 45785529b6..21c970e5f7 100644
--- a/build/lib/builtInExtensionsCG.ts
+++ b/build/lib/builtInExtensionsCG.ts
@@ -1,6 +1,6 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import got from 'got';
@@ -13,8 +13,8 @@ import { IExtensionDefinition } from './builtInExtensions';
const root = path.dirname(path.dirname(__dirname));
const rootCG = path.join(root, 'extensionsCG');
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
-const builtInExtensions = productjson.builtInExtensions;
-const webBuiltInExtensions = productjson.webBuiltInExtensions;
+const builtInExtensions = productjson.builtInExtensions || [];
+const webBuiltInExtensions = productjson.webBuiltInExtensions || [];
const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined;
const contentBasePath = 'raw.githubusercontent.com';
diff --git a/build/lib/compilation.js b/build/lib/compilation.js
index b545199e78..22a8ee8be1 100644
--- a/build/lib/compilation.js
+++ b/build/lib/compilation.js
@@ -9,7 +9,7 @@ const es = require("event-stream");
const fs = require("fs");
const gulp = require("gulp");
const path = require("path");
-const monacodts = require("../monaco/api");
+const monacodts = require("./monaco-api");
const nls = require("./nls");
const reporter_1 = require("./reporter");
const util = require("./util");
@@ -17,7 +17,7 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const os = require("os");
const watch = require('./watch');
-const reporter = (0, reporter_1.createReporter)();
+const reporter = reporter_1.createReporter();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
let options = {};
diff --git a/build/lib/compilation.ts b/build/lib/compilation.ts
index 282dae530e..610a7999f5 100644
--- a/build/lib/compilation.ts
+++ b/build/lib/compilation.ts
@@ -9,7 +9,7 @@ import * as es from 'event-stream';
import * as fs from 'fs';
import * as gulp from 'gulp';
import * as path from 'path';
-import * as monacodts from '../monaco/api';
+import * as monacodts from './monaco-api';
import * as nls from './nls';
import { createReporter } from './reporter';
import * as util from './util';
diff --git a/build/lib/eslint/code-import-patterns.js b/build/lib/eslint/code-import-patterns.js
index 52adf71a64..5babda400c 100644
--- a/build/lib/eslint/code-import-patterns.js
+++ b/build/lib/eslint/code-import-patterns.js
@@ -21,7 +21,7 @@ module.exports = new class {
const configs = context.options;
for (const config of configs) {
if (minimatch(context.getFilename(), config.target)) {
- return (0, utils_1.createImportRuleListener)((node, value) => this._checkImport(context, config, node, value));
+ return utils_1.createImportRuleListener((node, value) => this._checkImport(context, config, node, value));
}
}
return {};
@@ -29,7 +29,7 @@ module.exports = new class {
_checkImport(context, config, node, path) {
// resolve relative paths
if (path[0] === '.') {
- path = (0, path_1.join)(context.getFilename(), path);
+ path = path_1.join(context.getFilename(), path);
}
let restrictions;
if (typeof config.restrictions === 'string') {
diff --git a/build/lib/eslint/code-layering.js b/build/lib/eslint/code-layering.js
index d8b70f5ac2..bac676755b 100644
--- a/build/lib/eslint/code-layering.js
+++ b/build/lib/eslint/code-layering.js
@@ -17,7 +17,7 @@ module.exports = new class {
};
}
create(context) {
- const fileDirname = (0, path_1.dirname)(context.getFilename());
+ const fileDirname = path_1.dirname(context.getFilename());
const parts = fileDirname.split(/\\|\//);
const ruleArgs = context.options[0];
let config;
@@ -39,11 +39,11 @@ module.exports = new class {
// nothing
return {};
}
- return (0, utils_1.createImportRuleListener)((node, path) => {
+ return utils_1.createImportRuleListener((node, path) => {
if (path[0] === '.') {
- path = (0, path_1.join)((0, path_1.dirname)(context.getFilename()), path);
+ path = path_1.join(path_1.dirname(context.getFilename()), path);
}
- const parts = (0, path_1.dirname)(path).split(/\\|\//);
+ const parts = path_1.dirname(path).split(/\\|\//);
for (let i = parts.length - 1; i >= 0; i--) {
const part = parts[i];
if (config.allowed.has(part)) {
diff --git a/build/lib/eslint/code-no-nls-in-standalone-editor.js b/build/lib/eslint/code-no-nls-in-standalone-editor.js
index 5d508810d1..1f1eabfcba 100644
--- a/build/lib/eslint/code-no-nls-in-standalone-editor.js
+++ b/build/lib/eslint/code-no-nls-in-standalone-editor.js
@@ -20,10 +20,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
- return (0, utils_1.createImportRuleListener)((node, path) => {
+ return utils_1.createImportRuleListener((node, path) => {
// resolve relative paths
if (path[0] === '.') {
- path = (0, path_1.join)(context.getFilename(), path);
+ path = path_1.join(context.getFilename(), path);
}
if (/vs(\/|\\)nls/.test(path)) {
context.report({
diff --git a/build/lib/eslint/code-no-standalone-editor.js b/build/lib/eslint/code-no-standalone-editor.js
index 5812f1a1cc..df97c4d7e0 100644
--- a/build/lib/eslint/code-no-standalone-editor.js
+++ b/build/lib/eslint/code-no-standalone-editor.js
@@ -21,10 +21,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
// the vs/editor folder is allowed to use the standalone editor
return {};
}
- return (0, utils_1.createImportRuleListener)((node, path) => {
+ return utils_1.createImportRuleListener((node, path) => {
// resolve relative paths
if (path[0] === '.') {
- path = (0, path_1.join)(context.getFilename(), path);
+ path = path_1.join(context.getFilename(), path);
}
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path)
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path)
diff --git a/build/lib/eslint/code-no-unused-expressions.js b/build/lib/eslint/code-no-unused-expressions.js
index 21a29c94ea..bc6b7519a7 100644
--- a/build/lib/eslint/code-no-unused-expressions.js
+++ b/build/lib/eslint/code-no-unused-expressions.js
@@ -2,144 +2,124 @@
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
-
// FORKED FROM https://github.com/eslint/eslint/blob/b23ad0d789a909baf8d7c41a35bc53df932eaf30/lib/rules/no-unused-expressions.js
// and added support for `OptionalCallExpression`, see https://github.com/facebook/create-react-app/issues/8107 and https://github.com/eslint/eslint/issues/12642
-
/**
* @fileoverview Flag expressions in statement position that do not side effect
* @author Michael Ficarra
*/
-
'use strict';
-
+Object.defineProperty(exports, "__esModule", { value: true });
//------------------------------------------------------------------------------
// Rule Definition
//------------------------------------------------------------------------------
-
module.exports = {
- meta: {
- type: 'suggestion',
-
- docs: {
- description: 'disallow unused expressions',
- category: 'Best Practices',
- recommended: false,
- url: 'https://eslint.org/docs/rules/no-unused-expressions'
- },
-
- schema: [
- {
- type: 'object',
- properties: {
- allowShortCircuit: {
- type: 'boolean',
- default: false
- },
- allowTernary: {
- type: 'boolean',
- default: false
- },
- allowTaggedTemplates: {
- type: 'boolean',
- default: false
- }
- },
- additionalProperties: false
- }
- ]
- },
-
- create(context) {
- const config = context.options[0] || {},
- allowShortCircuit = config.allowShortCircuit || false,
- allowTernary = config.allowTernary || false,
- allowTaggedTemplates = config.allowTaggedTemplates || false;
-
- // eslint-disable-next-line jsdoc/require-description
+ meta: {
+ type: 'suggestion',
+ docs: {
+ description: 'disallow unused expressions',
+ category: 'Best Practices',
+ recommended: false,
+ url: 'https://eslint.org/docs/rules/no-unused-expressions'
+ },
+ schema: [
+ {
+ type: 'object',
+ properties: {
+ allowShortCircuit: {
+ type: 'boolean',
+ default: false
+ },
+ allowTernary: {
+ type: 'boolean',
+ default: false
+ },
+ allowTaggedTemplates: {
+ type: 'boolean',
+ default: false
+ }
+ },
+ additionalProperties: false
+ }
+ ]
+ },
+ create(context) {
+ const config = context.options[0] || {},
+ allowShortCircuit = config.allowShortCircuit || false,
+ allowTernary = config.allowTernary || false,
+ allowTaggedTemplates = config.allowTaggedTemplates || false;
+ // eslint-disable-next-line jsdoc/require-description
/**
- * @param {ASTNode} node any node
- * @returns {boolean} whether the given node structurally represents a directive
+ * @param node any node
+ * @returns whether the given node structurally represents a directive
*/
- function looksLikeDirective(node) {
- return node.type === 'ExpressionStatement' &&
- node.expression.type === 'Literal' && typeof node.expression.value === 'string';
- }
-
- // eslint-disable-next-line jsdoc/require-description
+ function looksLikeDirective(node) {
+ return node.type === 'ExpressionStatement' &&
+ node.expression.type === 'Literal' && typeof node.expression.value === 'string';
+ }
+ // eslint-disable-next-line jsdoc/require-description
/**
- * @param {Function} predicate ([a] -> Boolean) the function used to make the determination
- * @param {a[]} list the input list
- * @returns {a[]} the leading sequence of members in the given list that pass the given predicate
+ * @param predicate ([a] -> Boolean) the function used to make the determination
+ * @param list the input list
+ * @returns the leading sequence of members in the given list that pass the given predicate
*/
- function takeWhile(predicate, list) {
- for (let i = 0; i < list.length; ++i) {
- if (!predicate(list[i])) {
- return list.slice(0, i);
- }
- }
- return list.slice();
- }
-
- // eslint-disable-next-line jsdoc/require-description
+ function takeWhile(predicate, list) {
+ for (let i = 0; i < list.length; ++i) {
+ if (!predicate(list[i])) {
+ return list.slice(0, i);
+ }
+ }
+ return list.slice();
+ }
+ // eslint-disable-next-line jsdoc/require-description
/**
- * @param {ASTNode} node a Program or BlockStatement node
- * @returns {ASTNode[]} the leading sequence of directive nodes in the given node's body
+ * @param node a Program or BlockStatement node
+ * @returns the leading sequence of directive nodes in the given node's body
*/
- function directives(node) {
- return takeWhile(looksLikeDirective, node.body);
- }
-
- // eslint-disable-next-line jsdoc/require-description
+ function directives(node) {
+ return takeWhile(looksLikeDirective, node.body);
+ }
+ // eslint-disable-next-line jsdoc/require-description
/**
- * @param {ASTNode} node any node
- * @param {ASTNode[]} ancestors the given node's ancestors
- * @returns {boolean} whether the given node is considered a directive in its current position
+ * @param node any node
+ * @param ancestors the given node's ancestors
+ * @returns whether the given node is considered a directive in its current position
*/
- function isDirective(node, ancestors) {
- const parent = ancestors[ancestors.length - 1],
- grandparent = ancestors[ancestors.length - 2];
-
- return (parent.type === 'Program' || parent.type === 'BlockStatement' &&
- (/Function/u.test(grandparent.type))) &&
- directives(parent).indexOf(node) >= 0;
- }
-
+ function isDirective(node, ancestors) {
+ const parent = ancestors[ancestors.length - 1], grandparent = ancestors[ancestors.length - 2];
+ return (parent.type === 'Program' || parent.type === 'BlockStatement' &&
+ (/Function/u.test(grandparent.type))) &&
+ directives(parent).indexOf(node) >= 0;
+ }
/**
* Determines whether or not a given node is a valid expression. Recurses on short circuit eval and ternary nodes if enabled by flags.
- * @param {ASTNode} node any node
- * @returns {boolean} whether the given node is a valid expression
+ * @param node any node
+ * @returns whether the given node is a valid expression
*/
- function isValidExpression(node) {
- if (allowTernary) {
-
- // Recursive check for ternary and logical expressions
- if (node.type === 'ConditionalExpression') {
- return isValidExpression(node.consequent) && isValidExpression(node.alternate);
- }
- }
-
- if (allowShortCircuit) {
- if (node.type === 'LogicalExpression') {
- return isValidExpression(node.right);
- }
- }
-
- if (allowTaggedTemplates && node.type === 'TaggedTemplateExpression') {
- return true;
- }
-
- return /^(?:Assignment|OptionalCall|Call|New|Update|Yield|Await)Expression$/u.test(node.type) ||
- (node.type === 'UnaryExpression' && ['delete', 'void'].indexOf(node.operator) >= 0);
- }
-
- return {
- ExpressionStatement(node) {
- if (!isValidExpression(node.expression) && !isDirective(node, context.getAncestors())) {
- context.report({ node, message: 'Expected an assignment or function call and instead saw an expression.' });
- }
- }
- };
-
- }
+ function isValidExpression(node) {
+ if (allowTernary) {
+ // Recursive check for ternary and logical expressions
+ if (node.type === 'ConditionalExpression') {
+ return isValidExpression(node.consequent) && isValidExpression(node.alternate);
+ }
+ }
+ if (allowShortCircuit) {
+ if (node.type === 'LogicalExpression') {
+ return isValidExpression(node.right);
+ }
+ }
+ if (allowTaggedTemplates && node.type === 'TaggedTemplateExpression') {
+ return true;
+ }
+ return /^(?:Assignment|OptionalCall|Call|New|Update|Yield|Await)Expression$/u.test(node.type) ||
+ (node.type === 'UnaryExpression' && ['delete', 'void'].indexOf(node.operator) >= 0);
+ }
+ return {
+ ExpressionStatement(node) {
+ if (!isValidExpression(node.expression) && !isDirective(node, context.getAncestors())) {
+ context.report({ node: node, message: 'Expected an assignment or function call and instead saw an expression.' });
+ }
+ }
+ };
+ }
};
diff --git a/build/lib/eslint/code-translation-remind.js b/build/lib/eslint/code-translation-remind.js
index 4107285d76..01a39c82bb 100644
--- a/build/lib/eslint/code-translation-remind.js
+++ b/build/lib/eslint/code-translation-remind.js
@@ -15,7 +15,7 @@ module.exports = new (_a = class TranslationRemind {
};
}
create(context) {
- return (0, utils_1.createImportRuleListener)((node, path) => this._checkImport(context, node, path));
+ return utils_1.createImportRuleListener((node, path) => this._checkImport(context, node, path));
}
_checkImport(context, node, path) {
if (path !== TranslationRemind.NLS_MODULE) {
@@ -31,7 +31,7 @@ module.exports = new (_a = class TranslationRemind {
let resourceDefined = false;
let json;
try {
- json = (0, fs_1.readFileSync)('./build/lib/i18n.resources.json', 'utf8');
+ json = fs_1.readFileSync('./build/lib/i18n.resources.json', 'utf8');
}
catch (e) {
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
diff --git a/build/lib/eslint/vscode-dts-vscode-in-comments.js b/build/lib/eslint/vscode-dts-vscode-in-comments.js
new file mode 100644
index 0000000000..8f9a13fb01
--- /dev/null
+++ b/build/lib/eslint/vscode-dts-vscode-in-comments.js
@@ -0,0 +1,45 @@
+"use strict";
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+module.exports = new class ApiVsCodeInComments {
+ constructor() {
+ this.meta = {
+ messages: {
+ comment: `Don't use the term 'vs code' in comments`
+ }
+ };
+ }
+ create(context) {
+ const sourceCode = context.getSourceCode();
+ return {
+ ['Program']: (_node) => {
+ for (const comment of sourceCode.getAllComments()) {
+ if (comment.type !== 'Block') {
+ continue;
+ }
+ if (!comment.range) {
+ continue;
+ }
+ const startIndex = comment.range[0] + '/*'.length;
+ const re = /vs code/ig;
+ let match;
+ while ((match = re.exec(comment.value))) {
+ // Allow using 'VS Code' in quotes
+ if (comment.value[match.index - 1] === `'` && comment.value[match.index + match[0].length] === `'`) {
+ continue;
+ }
+ // Types for eslint seem incorrect
+ const start = sourceCode.getLocFromIndex(startIndex + match.index);
+ const end = sourceCode.getLocFromIndex(startIndex + match.index + match[0].length);
+ context.report({
+ messageId: 'comment',
+ loc: { start, end }
+ });
+ }
+ }
+ }
+ };
+ }
+};
diff --git a/build/lib/eslint/vscode-dts-vscode-in-comments.ts b/build/lib/eslint/vscode-dts-vscode-in-comments.ts
new file mode 100644
index 0000000000..1410fc2f42
--- /dev/null
+++ b/build/lib/eslint/vscode-dts-vscode-in-comments.ts
@@ -0,0 +1,53 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+import * as eslint from 'eslint';
+import type * as estree from 'estree';
+
+export = new class ApiVsCodeInComments implements eslint.Rule.RuleModule {
+
+ readonly meta: eslint.Rule.RuleMetaData = {
+ messages: {
+ comment: `Don't use the term 'vs code' in comments`
+ }
+ };
+
+ create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
+
+ const sourceCode = context.getSourceCode();
+
+ return {
+ ['Program']: (_node: any) => {
+
+ for (const comment of sourceCode.getAllComments()) {
+ if (comment.type !== 'Block') {
+ continue;
+ }
+ if (!comment.range) {
+ continue;
+ }
+
+ const startIndex = comment.range[0] + '/*'.length;
+ const re = /vs code/ig;
+ let match: RegExpExecArray | null;
+ while ((match = re.exec(comment.value))) {
+ // Allow using 'VS Code' in quotes
+ if (comment.value[match.index - 1] === `'` && comment.value[match.index + match[0].length] === `'`) {
+ continue;
+ }
+
+ // Types for eslint seem incorrect
+ const start = sourceCode.getLocFromIndex(startIndex + match.index) as any as estree.Position;
+ const end = sourceCode.getLocFromIndex(startIndex + match.index + match[0].length) as any as estree.Position;
+ context.report({
+ messageId: 'comment',
+ loc: { start, end }
+ });
+ }
+ }
+ }
+ };
+ }
+};
diff --git a/build/lib/extensions.js b/build/lib/extensions.js
index a13ad382b1..20de293871 100644
--- a/build/lib/extensions.js
+++ b/build/lib/extensions.js
@@ -4,9 +4,10 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
-exports.translatePackageJSON = exports.packageRebuildExtensionsStream = exports.cleanRebuildExtensions = exports.packageExternalExtensionsStream = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.vscodeExternalExtensions = exports.fromMarketplace = exports.fromLocalNormal = exports.fromLocal = void 0;
+exports.buildExtensionMedia = exports.webpackExtensions = exports.translatePackageJSON = exports.packageRebuildExtensionsStream = exports.cleanRebuildExtensions = exports.packageExternalExtensionsStream = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.vscodeExternalExtensions = exports.fromMarketplace = exports.fromLocalNormal = exports.fromLocal = void 0;
const es = require("event-stream");
const fs = require("fs");
+const cp = require("child_process");
const glob = require("glob");
const gulp = require("gulp");
const path = require("path");
@@ -23,7 +24,7 @@ const jsoncParser = require("jsonc-parser");
const util = require('./util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
-const sourceMappingURLBase = `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}`;
+const sourceMappingURLBase = `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}`; // {{SQL CARBON EDIT}}
function minifyExtensionResources(input) {
const jsonFilter = filter(['**/*.json', '**/*.code-snippets'], { restore: true });
return input
@@ -144,7 +145,7 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
console.error(packagedDependencies);
result.emit('error', err);
});
- return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
+ return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath) {
const result = es.through();
@@ -162,7 +163,7 @@ function fromLocalNormal(extensionPath) {
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
- return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
+ return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
exports.fromLocalNormal = fromLocalNormal;
const baseHeaders = {
@@ -174,7 +175,7 @@ function fromMarketplace(extensionName, version, metadata) {
const remote = require('gulp-remote-retry-src');
const json = require('gulp-json-editor');
const [, name] = extensionName.split('.');
- const url = `https://sqlopsextensions.blob.core.windows.net/extensions/${name}/${name}-${version}.vsix`;
+ const url = `https://sqlopsextensions.blob.core.windows.net/extensions/${name}/${name}-${version}.vsix`; // {{SQL CARBON EDIT}}
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: url,
@@ -346,6 +347,7 @@ function scanBuiltinExtensions(extensionsRoot, exclude = []) {
}
}
exports.scanBuiltinExtensions = scanBuiltinExtensions;
+// {{SQL CARBON EDIT}} start
function packageExternalExtensionsStream() {
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
@@ -361,7 +363,6 @@ function packageExternalExtensionsStream() {
return es.merge(builtExtensions);
}
exports.packageExternalExtensionsStream = packageExternalExtensionsStream;
-// {{SQL CARBON EDIT}} start
function cleanRebuildExtensions(root) {
return Promise.all(rebuildExtensions.map(async (e) => {
await util2.rimraf(path.join(root, e))();
@@ -408,3 +409,132 @@ function translatePackageJSON(packageJSON, packageNLSPath) {
return packageJSON;
}
exports.translatePackageJSON = translatePackageJSON;
+const extensionsPath = path.join(root, 'extensions');
+// Additional projects to webpack. These typically build code for webviews
+const webpackMediaConfigFiles = [
+ 'markdown-language-features/webpack.config.js',
+ 'simple-browser/webpack.config.js',
+];
+// Additional projects to run esbuild on. These typically build code for webviews
+const esbuildMediaScripts = [
+ 'markdown-language-features/esbuild.js',
+ 'markdown-math/esbuild.js',
+];
+async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
+ const webpack = require('webpack');
+ const webpackConfigs = [];
+ for (const { configPath, outputRoot } of webpackConfigLocations) {
+ const configOrFnOrArray = require(configPath);
+ function addConfig(configOrFn) {
+ let config;
+ if (typeof configOrFn === 'function') {
+ config = configOrFn({}, {});
+ webpackConfigs.push(config);
+ }
+ else {
+ config = configOrFn;
+ }
+ if (outputRoot) {
+ config.output.path = path.join(outputRoot, path.relative(path.dirname(configPath), config.output.path));
+ }
+ webpackConfigs.push(configOrFn);
+ }
+ addConfig(configOrFnOrArray);
+ }
+ function reporter(fullStats) {
+ if (Array.isArray(fullStats.children)) {
+ for (const stats of fullStats.children) {
+ const outputPath = stats.outputPath;
+ if (outputPath) {
+ const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
+ const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
+ fancyLog(`Finished ${ansiColors.green(taskName)} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
+ }
+ if (Array.isArray(stats.errors)) {
+ stats.errors.forEach((error) => {
+ fancyLog.error(error);
+ });
+ }
+ if (Array.isArray(stats.warnings)) {
+ stats.warnings.forEach((warning) => {
+ fancyLog.warn(warning);
+ });
+ }
+ }
+ }
+ }
+ return new Promise((resolve, reject) => {
+ if (isWatch) {
+ webpack(webpackConfigs).watch({}, (err, stats) => {
+ if (err) {
+ reject();
+ }
+ else {
+ reporter(stats.toJson());
+ }
+ });
+ }
+ else {
+ webpack(webpackConfigs).run((err, stats) => {
+ if (err) {
+ fancyLog.error(err);
+ reject();
+ }
+ else {
+ reporter(stats.toJson());
+ resolve();
+ }
+ });
+ }
+ });
+}
+exports.webpackExtensions = webpackExtensions;
+async function esbuildExtensions(taskName, isWatch, scripts) {
+ function reporter(stdError, script) {
+ const matches = (stdError || '').match(/\> (.+): error: (.+)?/g);
+ fancyLog(`Finished ${ansiColors.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`);
+ for (const match of matches || []) {
+ fancyLog.error(match);
+ }
+ }
+ const tasks = scripts.map(({ script, outputRoot }) => {
+ return new Promise((resolve, reject) => {
+ const args = [script];
+ if (isWatch) {
+ args.push('--watch');
+ }
+ if (outputRoot) {
+ args.push('--outputRoot', outputRoot);
+ }
+ const proc = cp.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => {
+ if (error) {
+ return reject(error);
+ }
+ reporter(stderr, script);
+ if (stderr) {
+ return reject();
+ }
+ return resolve();
+ });
+ proc.stdout.on('data', (data) => {
+ fancyLog(`${ansiColors.green(taskName)}: ${data.toString('utf8')}`);
+ });
+ });
+ });
+ return Promise.all(tasks);
+}
+async function buildExtensionMedia(isWatch, outputRoot) {
+ return Promise.all([
+ webpackExtensions('webpacking extension media', isWatch, webpackMediaConfigFiles.map(p => {
+ return {
+ configPath: path.join(extensionsPath, p),
+ outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
+ };
+ })),
+ esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({
+ script: path.join(extensionsPath, p),
+ outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
+ }))),
+ ]);
+}
+exports.buildExtensionMedia = buildExtensionMedia;
diff --git a/build/lib/extensions.ts b/build/lib/extensions.ts
index fa50c88117..1b01a0bea8 100644
--- a/build/lib/extensions.ts
+++ b/build/lib/extensions.ts
@@ -5,6 +5,7 @@
import * as es from 'event-stream';
import * as fs from 'fs';
+import * as cp from 'child_process';
import * as glob from 'glob';
import * as gulp from 'gulp';
import * as path from 'path';
@@ -19,10 +20,11 @@ import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
const buffer = require('gulp-buffer');
import * as jsoncParser from 'jsonc-parser';
+import webpack = require('webpack');
const util = require('./util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
-const sourceMappingURLBase = `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}`;
+const sourceMappingURLBase = `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}`; // {{SQL CARBON EDIT}}
function minifyExtensionResources(input: Stream): Stream {
const jsonFilter = filter(['**/*.json', '**/*.code-snippets'], { restore: true });
@@ -205,7 +207,7 @@ export function fromMarketplace(extensionName: string, version: string, metadata
const json = require('gulp-json-editor') as typeof import('gulp-json-editor');
const [, name] = extensionName.split('.');
- const url = `https://sqlopsextensions.blob.core.windows.net/extensions/${name}/${name}-${version}.vsix`;
+ const url = `https://sqlopsextensions.blob.core.windows.net/extensions/${name}/${name}-${version}.vsix`; // {{SQL CARBON EDIT}}
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
@@ -424,6 +426,7 @@ export function scanBuiltinExtensions(extensionsRoot: string, exclude: string[]
}
}
+// {{SQL CARBON EDIT}} start
export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
const extenalExtensionDescriptions = (glob.sync('extensions/*/package.json'))
.map(manifestPath => {
@@ -441,7 +444,6 @@ export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
return es.merge(builtExtensions);
}
-// {{SQL CARBON EDIT}} start
export function cleanRebuildExtensions(root: string): Promise {
return Promise.all(rebuildExtensions.map(async e => {
await util2.rimraf(path.join(root, e))();
@@ -487,3 +489,138 @@ export function translatePackageJSON(packageJSON: string, packageNLSPath: string
translate(packageJSON);
return packageJSON;
}
+
+const extensionsPath = path.join(root, 'extensions');
+
+// Additional projects to webpack. These typically build code for webviews
+const webpackMediaConfigFiles = [
+ 'markdown-language-features/webpack.config.js',
+ 'simple-browser/webpack.config.js',
+];
+
+// Additional projects to run esbuild on. These typically build code for webviews
+const esbuildMediaScripts = [
+ 'markdown-language-features/esbuild.js',
+ 'markdown-math/esbuild.js',
+];
+
+export async function webpackExtensions(taskName: string, isWatch: boolean, webpackConfigLocations: { configPath: string, outputRoot?: string }[]) {
+ const webpack = require('webpack') as typeof import('webpack');
+
+ const webpackConfigs: webpack.Configuration[] = [];
+
+ for (const { configPath, outputRoot } of webpackConfigLocations) {
+ const configOrFnOrArray = require(configPath);
+ function addConfig(configOrFn: webpack.Configuration | Function) {
+ let config;
+ if (typeof configOrFn === 'function') {
+ config = configOrFn({}, {});
+ webpackConfigs.push(config);
+ } else {
+ config = configOrFn;
+ }
+
+ if (outputRoot) {
+ config.output.path = path.join(outputRoot, path.relative(path.dirname(configPath), config.output.path));
+ }
+
+ webpackConfigs.push(configOrFn);
+ }
+ addConfig(configOrFnOrArray);
+ }
+ function reporter(fullStats: any) {
+ if (Array.isArray(fullStats.children)) {
+ for (const stats of fullStats.children) {
+ const outputPath = stats.outputPath;
+ if (outputPath) {
+ const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
+ const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
+ fancyLog(`Finished ${ansiColors.green(taskName)} ${ansiColors.cyan(match![0])} with ${stats.errors.length} errors.`);
+ }
+ if (Array.isArray(stats.errors)) {
+ stats.errors.forEach((error: any) => {
+ fancyLog.error(error);
+ });
+ }
+ if (Array.isArray(stats.warnings)) {
+ stats.warnings.forEach((warning: any) => {
+ fancyLog.warn(warning);
+ });
+ }
+ }
+ }
+ }
+ return new Promise((resolve, reject) => {
+ if (isWatch) {
+ webpack(webpackConfigs).watch({}, (err, stats) => {
+ if (err) {
+ reject();
+ } else {
+ reporter(stats.toJson());
+ }
+ });
+ } else {
+ webpack(webpackConfigs).run((err, stats) => {
+ if (err) {
+ fancyLog.error(err);
+ reject();
+ } else {
+ reporter(stats.toJson());
+ resolve();
+ }
+ });
+ }
+ });
+}
+
+async function esbuildExtensions(taskName: string, isWatch: boolean, scripts: { script: string, outputRoot?: string }[]) {
+ function reporter(stdError: string, script: string) {
+ const matches = (stdError || '').match(/\> (.+): error: (.+)?/g);
+ fancyLog(`Finished ${ansiColors.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`);
+ for (const match of matches || []) {
+ fancyLog.error(match);
+ }
+ }
+
+ const tasks = scripts.map(({ script, outputRoot }) => {
+ return new Promise((resolve, reject) => {
+ const args = [script];
+ if (isWatch) {
+ args.push('--watch');
+ }
+ if (outputRoot) {
+ args.push('--outputRoot', outputRoot);
+ }
+ const proc = cp.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => {
+ if (error) {
+ return reject(error);
+ }
+ reporter(stderr, script);
+ if (stderr) {
+ return reject();
+ }
+ return resolve();
+ });
+
+ proc.stdout!.on('data', (data) => {
+ fancyLog(`${ansiColors.green(taskName)}: ${data.toString('utf8')}`);
+ });
+ });
+ });
+ return Promise.all(tasks);
+}
+
+export async function buildExtensionMedia(isWatch: boolean, outputRoot?: string) {
+ return Promise.all([
+ webpackExtensions('webpacking extension media', isWatch, webpackMediaConfigFiles.map(p => {
+ return {
+ configPath: path.join(extensionsPath, p),
+ outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
+ };
+ })),
+ esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({
+ script: path.join(extensionsPath, p),
+ outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
+ }))),
+ ]);
+}
diff --git a/build/lib/i18n.js b/build/lib/i18n.js
index ad14ef33a8..5c4ce69ff9 100644
--- a/build/lib/i18n.js
+++ b/build/lib/i18n.js
@@ -4,14 +4,13 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
-exports.prepareIslFiles = exports.prepareI18nPackFiles = exports.pullI18nPackFiles = exports.i18nPackVersion = exports.createI18nFile = exports.prepareI18nFiles = exports.pullSetupXlfFiles = exports.pullCoreAndExtensionsXlfFiles = exports.findObsoleteResources = exports.pushXlfFiles = exports.createXlfFilesForIsl = exports.createXlfFilesForExtensions = exports.createXlfFilesForCoreBundle = exports.getResource = exports.processNlsFiles = exports.Limiter = exports.XLF = exports.Line = exports.externalExtensionsWithTranslations = exports.extraLanguages = exports.defaultLanguages = void 0;
+exports.prepareIslFiles = exports.prepareI18nPackFiles = exports.i18nPackVersion = exports.createI18nFile = exports.prepareI18nFiles = exports.pullSetupXlfFiles = exports.findObsoleteResources = exports.pushXlfFiles = exports.createXlfFilesForIsl = exports.createXlfFilesForExtensions = exports.createXlfFilesForCoreBundle = exports.getResource = exports.processNlsFiles = exports.Limiter = exports.XLF = exports.Line = exports.externalExtensionsWithTranslations = exports.extraLanguages = exports.defaultLanguages = void 0;
const path = require("path");
const fs = require("fs");
const event_stream_1 = require("event-stream");
const File = require("vinyl");
const Is = require("is");
const xml2js = require("xml2js");
-const glob = require("glob");
const https = require("https");
const gulp = require("gulp");
const fancyLog = require("fancy-log");
@@ -110,12 +109,16 @@ class XLF {
}
toString() {
this.appendHeader();
- for (let file in this.files) {
+ const files = Object.keys(this.files).sort();
+ for (const file of files) {
this.appendNewLine(``, 2);
- for (let item of this.files[file]) {
+ const items = this.files[file].sort((a, b) => {
+ return a.id < b.id ? -1 : a.id > b.id ? 1 : 0;
+ });
+ for (const item of items) {
this.addStringItem(file, item);
}
- this.appendNewLine('', 2);
+ this.appendNewLine('