Compare commits

..

13 Commits

Author SHA1 Message Date
Cory Rivera
78a42e1d11 Check if python executable exists before querying user package directory. (#6345) 2019-07-09 18:43:32 -07:00
Cory Rivera
d2e758c0d7 Fix python install issues caused by other preexisting Python versions. (#6294)
* Remove --user option when doing pip installs for our standalone Python version.

* Use force-reinstall option when installing sparkmagic since we use a custom version.

* Use force-reinstall when installing pip packages from Manage Packages dialog so that dependencies don't get split across multiple locations.

* Update PATH after install to include additional package directories.
2019-07-09 17:13:47 -07:00
Chris LaFreniere
6f5ad3a8a3 Use in proc markdown by default (#6315) 2019-07-09 15:27:00 -07:00
Charles Gagnon
3e446980df Update CMS version for July release (#6312) 2019-07-09 14:16:46 -07:00
Alan Ren
053636af9c Update product.json (#6307) 2019-07-09 11:02:51 -07:00
Cory Rivera
e3b166846d Manually define JupyterServerInstallation execOptions field in notebook unit tests. (#6292) 2019-07-09 10:44:31 -07:00
Charles Gagnon
fcba0d1558 Bump Agent and Import package versions for July release (#6275) 2019-07-09 10:42:48 -07:00
Karl Burtram
35e3a42017 Fix inconsistencies in langpack readme files (#6285) 2019-07-09 10:41:18 -07:00
Karl Burtram
401d4b2211 Update localization resource files (#6283)
* Update localization resource files

* Remove extra space from readme headers
2019-07-09 10:40:28 -07:00
Zbyněk Sailer
be3e7e3dc1 LOC CHECKIN | Microsoft/azuredatastudio master | 20190708 (#6274) 2019-07-09 10:39:51 -07:00
Kevin Cunnane
1e12e61243 Fix #6221 notebook shortcut and use new grid in stable (#6268)
- Fix #6221 Notebooks: Keyboard Shortcut for New Notebook has Changed.
  - Use Win+Alt+N instead of Win+Shift+N
- New Grid is now "stable" (forgot to do this in last PR)
2019-07-09 10:35:42 -07:00
Kevin Cunnane
f3b12dd5ac Fix #6287 Notebook editor deserialize on reload/save is broken (#6288)
- Notebook editors have their own mode
2019-07-09 10:35:15 -07:00
Charles Gagnon
73bb5501bd Fix Agent tabs not switching
(cherry picked from commit 6f15ebcf6fe1f4976e82e3ee71cfba0d35fa2b7c)
2019-07-09 10:30:49 -07:00
4886 changed files with 195085 additions and 319600 deletions

View File

@@ -1,34 +1,49 @@
{
perform: true,
perform: false,
alwaysRequireAssignee: false,
labelsRequiringAssignee: [],
autoAssignees: {
Area - Acquisition: [],
Area - Azure: [],
Area - Backup\Restore: [],
Area - Charting\Insights: [],
Area - Connection: [],
Area - DacFX: [],
Area - Dashboard: [],
Area - Data Explorer: [],
Area - Edit Data: [],
Area - Extensibility: [],
Area - External Table: [],
Area - Fundamentals: [],
Area - Language Service: [],
Area - Localization: [],
Area - Notebooks: [],
Area - Performance: [],
Area - Query Editor: [ anthonydresser ],
Area - Query Plan: [],
Area - Reliability: [],
Area - Resource Deployment: [],
Area - Schema Compare: [],
Area - Shell: [],
Area - SQL Agent: [],
Area - SQL Import: [],
Area - SQL Profiler: [],
Area - SQL 2019: [],
Area - SSMS Integration: []
accessibility: [],
acquisition: [],
agent: [],
azure: [],
backup: [],
bcdr: [],
'chart viewer': [],
connection: [],
dacfx: [],
dashboard: [],
'data explorer': [],
documentation: [],
'edit data': [],
export: [],
extensibility: [],
extensionManager: [],
globalization: [],
grid: [],
import: [],
insights: [],
intellisense: [],
localization: [],
'managed instance': [],
notebooks: [],
'object explorer': [],
performance: [],
profiler: [],
'query editor': [],
'query execution': [],
reliability: [],
restore: [],
scripting: [],
'server group': [],
settings: [],
setup: [],
shell: [],
showplan: [],
snippet: [],
sql2019Preview: [],
sqldw: [],
supportability: [],
ux: []
}
}

View File

@@ -1,9 +0,0 @@
<!-- Thank you for submitting a Pull Request. Please:
* Read our Pull Request guidelines:
https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests.
* Associate an issue with the Pull Request.
* Ensure that the code is up-to-date with the `master` branch.
* Include a description of the proposed changes and how to test them.
-->
This PR fixes #

View File

@@ -1,118 +0,0 @@
name: CI
on:
push:
branches:
- master
- release/*
pull_request:
branches:
- master
- release/*
jobs:
linux:
runs-on: ubuntu-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
- run: |
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
name: Setup Build Environment
- uses: actions/setup-node@v1
with:
node-version: 10
# TODO: cache node modules
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests
windows:
runs-on: windows-2016
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- uses: actions/setup-python@v1
with:
python-version: '2.x'
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: .\scripts\test.bat --tfs "Unit Tests"
name: Run Unit Tests
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests
darwin:
runs-on: macos-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
# - run: ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests

View File

@@ -1,13 +0,0 @@
name: TSLint Enforcement
on: [pull_request]
jobs:
job:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@v1
- name: TSLint
uses: aaomidi/gh-action-tslint@master
with:
token: ${{ secrets.GITHUB_TOKEN }}
tslint_config: 'tslint-sql.json'

10
.gitignore vendored
View File

@@ -18,16 +18,8 @@ out-vscode-min/
out-vscode-reh/
out-vscode-reh-min/
out-vscode-reh-pkg/
out-vscode-reh-web/
out-vscode-reh-web-min/
out-vscode-reh-web-pkg/
out-vscode-web/
out-vscode-web-min/
src/vs/server
resources/server
build/node_modules
**/node_modules
coverage/
test_data/
test-results/
yarn-error.log
*.vsix

View File

@@ -1,33 +0,0 @@
/**
* @name No floating promises
* @kind problem
* @problem.severity error
* @id js/experimental/floating-promise
*/
import javascript
private predicate isEscapingPromise(PromiseDefinition promise) {
exists (DataFlow::Node escape | promise.flowsTo(escape) |
escape = any(DataFlow::InvokeNode invk).getAnArgument()
or
escape = any(DataFlow::FunctionNode fun).getAReturn()
or
escape = any(ThrowStmt t).getExpr().flow()
or
escape = any(GlobalVariable v).getAnAssignedExpr().flow()
or
escape = any(DataFlow::PropWrite write).getRhs()
or
exists(WithStmt with, Assignment assign |
with.mayAffect(assign.getLhs()) and
assign.getRhs().flow() = escape
)
)
}
from PromiseDefinition promise
where
not exists(promise.getAMethodCall(any(string m | m = "then" or m = "catch" or m = "finally"))) and
not exists (AwaitExpr e | promise.flowsTo(e.getOperand().flow())) and
not isEscapingPromise(promise)
select promise, "This promise appears to be a floating promise"

View File

@@ -1,6 +0,0 @@
{
"useTabs": true,
"printWidth": 120,
"semi": true,
"singleQuote": true
}

View File

@@ -1,61 +1,23 @@
{
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"required": [
"name",
"prependLicenseText"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
},
"fullLicenseText": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
},
"prependLicenseText": {
"type": "array",
"description": "A piece of text to prepend to the auto-detected license text of the dependency",
"items": {
"type": "string"
}
}
}
"type": "object",
"required": [
"name",
"licenseDetail"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
},
{
"type": "object",
"required": [
"name",
"fullLicenseText"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
},
"fullLicenseText": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
},
"prependLicenseText": {
"type": "array",
"description": "A piece of text to prepend to the auto-detected license text of the dependency",
"items": {
"type": "string"
}
}
"licenseDetail": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
}
]
}
}
}
}

View File

@@ -4,7 +4,6 @@
"recommendations": [
"ms-vscode.vscode-typescript-tslint-plugin",
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"msjsdiag.debugger-for-chrome"
]
}

76
.vscode/launch.json vendored
View File

@@ -16,7 +16,6 @@
"request": "attach",
"name": "Attach to Extension Host",
"port": 5870,
"timeout": 30000,
"restart": true,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
@@ -68,15 +67,15 @@
"name": "Launch azuredatastudio",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
"timeout": 45000
"timeout": 20000
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 45000
"timeout": 20000
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 45000
"timeout": 20000
},
"env": {
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
@@ -94,9 +93,6 @@
"request": "launch",
"name": "Launch ADS (Main Process)",
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
},
"runtimeArgs": [
"--no-cached-data"
],
@@ -128,33 +124,6 @@
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
{
"type": "chrome",
"request": "launch",
"name": "Launch ADS (Web) (TBD)",
"runtimeExecutable": "yarn",
"runtimeArgs": [
"web"
],
},
{
"type": "chrome",
"request": "launch",
"name": "Launch ADS (Web, Chrome) (TBD)",
"url": "http://localhost:8080",
"preLaunchTask": "Run web"
},
{
"type": "node",
"request": "launch",
"name": "Git Unit Tests",
"program": "${workspaceFolder}/extensions/git/node_modules/mocha/bin/_mocha",
"stopOnEntry": false,
"cwd": "${workspaceFolder}/extensions/git",
"outFiles": [
"${workspaceFolder}/extensions/git/out/**/*.js"
]
},
{
"name": "Launch Built-in Extension",
"type": "extensionHost",
@@ -193,10 +162,7 @@
"cwd": "${workspaceFolder}",
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"env": {
"MOCHA_COLORS": "true"
}
]
},
{
"type": "chrome",
@@ -214,22 +180,6 @@
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
{
"type": "chrome",
"request": "launch",
"name": "Run Extension Integration Tests",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
},
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
],
"compounds": [
{
@@ -246,13 +196,6 @@
"Run Extension Unit Tests"
]
},
{
"name": "Debug Extension Integration Tests",
"configurations": [
"Attach to Extension Host",
"Run Extension Integration Tests"
]
},
{
"name": "Debug azuredatastudio Main and Renderer",
"configurations": [
@@ -261,25 +204,18 @@
]
},
{
"name": "Debug Renderer and search processes",
"name": "Search and Renderer processes",
"configurations": [
"Launch azuredatastudio",
"Attach to Search Process"
]
},
{
"name": "Debug Renderer and Extension Host processes",
"name": "Renderer and Extension Host processes",
"configurations": [
"Launch azuredatastudio",
"Attach to Extension Host"
]
},
{
"name": "Attach Renderer and Extension Host",
"configurations": [
"Attach to azuredatastudio",
"Attach to Extension Host"
]
}
]
}

11
.vscode/settings.json vendored
View File

@@ -1,5 +1,6 @@
{
"editor.insertSpaces": false,
"files.eol": "\n",
"files.trimTrailingWhitespace": true,
"files.exclude": {
".git": true,
@@ -39,6 +40,7 @@
],
"typescript.tsdk": "node_modules/typescript/lib",
"npm.exclude": "**/extensions/**",
"git.ignoreLimitWarning": true,
"emmet.excludeLanguages": [],
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.quoteStyle": "single",
@@ -56,10 +58,5 @@
"url": "./.vscode/cglicenses.schema.json"
}
],
"git.ignoreLimitWarning": true,
"remote.extensionKind": {
"msjsdiag.debugger-for-chrome": "workspace"
},
"gulp.autoDetect": "off",
"files.insertFinalNewline": true
}
"git.ignoreLimitWarning": true
}

46
.vscode/tasks.json vendored
View File

@@ -5,10 +5,7 @@
"type": "npm",
"script": "watch",
"label": "Build VS Code",
"group": {
"kind": "build",
"isDefault": true
},
"group": "build",
"isBackground": true,
"presentation": {
"reveal": "never"
@@ -45,20 +42,6 @@
"applyTo": "allDocuments"
}
},
{
"type": "npm",
"script": "strict-null-check-watch",
"label": "TS - Strict Null Checks",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-strict-null-checks",
"applyTo": "allDocuments"
}
},
{
"type": "gulp",
"task": "tslint",
@@ -90,33 +73,14 @@
"problemMatcher": []
},
{
"type": "npm",
"script": "electron",
"type": "gulp",
"task": "electron",
"label": "Download electron"
},
{
"type": "gulp",
"task": "hygiene",
"problemMatcher": []
},
{
"type": "shell",
"command": "yarn web -- --no-launch",
"label": "Run web",
"isBackground": true,
// This section to make error go away when launching the debug config
"problemMatcher": {
"pattern": {
"regexp": ""
},
"background": {
"beginsPattern": ".*node .*",
"endsPattern": "Web UI available at .*"
}
},
"presentation": {
"reveal": "never"
}
},
}
]
}
}

View File

@@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron"
target "6.0.12"
target "3.1.8"
runtime "electron"

View File

@@ -1,75 +1,5 @@
# Change Log
## Version 1.12.2
* Release date: October 11, 2019
* Release status: General Availability
* Hotfix release (1.12.2): `Disable automatically starting the EH in inspect mode` https://github.com/microsoft/azuredatastudio/commit/c9bef82ace6c67190d0e83820011a2bbd1f793c1
## Version 1.12.1
* Release date: October 7, 2019
* Release status: General Availability
* Hotfix release: `Notebooks: Ensure quotes and backslashes are escaped properly in text editor model` https://github.com/microsoft/azuredatastudio/pull/7540
## Version 1.12.0
* Release date: October 2, 2019
* Release status: General Availability
## What's new in this version
* Announcing the Query History panel
* Improved Query Results Grid copy selection support
* TempDB page added to Server Reports extension
* PowerShell extension update
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/42?closed=1).
## Version 1.11.0
* Release date: September 10, 2019
* Release status: General Availability
## What's new in this version
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/41?closed=1).
## Version 1.10.0
* Release date: August 14, 2019
* Release status: General Availability
## What's new in this version
* [SandDance](https://github.com/microsoft/SandDance) integration — A new way to interact with data. Download the extension [here](https://docs.microsoft.com/sql/azure-data-studio/sanddance-extension)
* Notebook improvements
* Better loading performance
* Ability to right click SQL results grid to save your results as CSV, JSON, etc.
* Buttons to add code or text cells in-line
* [Other fixes and improvements](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+label%3A%22Area%3A+Notebooks%22+milestone%3A%22August+2019+Release%22+is%3Aclosed)
* SQL Server Dacpac extension can support Azure Active Directory authentication
* Updated SQL Server 2019 extension
* Visual Studio Code May Release Merge 1.37 - this includes changes from [1.36](https://code.visualstudio.com/updates/v1_37) and [1.37](https://code.visualstudio.com/updates/v1_37)
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/39?closed=1).
## Version 1.9.0
* Release date: July 11, 2019
* Release status: General Availability
## What's new in this version
* Release of [SentryOne Plan Explorer Extension](https://www.sentryone.com/products/sentryone-plan-explorer-extension-azure-data-studio)
* **Schema Compare**
* Schema Compare File Support (.SCMP)
* Cancel support
* [Other fixes and improvements](https://github.com/Microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22July+2019+Release%22+is%3Aclosed+label%3A%22Area%3A+Schema+Compare%22)
* **Notebooks**
* Plotly Support
* Open Notebook from Browser
* Python Package Management
* Performance & Markdown Enhancements
* Improved Keyboard Shortcuts
* [Other fixes and improvements](https://github.com/Microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22July+2019+Release%22+is%3Aclosed+label%3A%22Area%3A+Notebooks%22)
* **SQL Server Profiler**
* Filtering by Database Name
* Copy & Paste Support
* Save/Load Filter
* SQL Server 2019 Support
* New Language Packs Available
* Visual Studio Code May Release Merge 1.35 - the latest improvements can be found [here](https://code.visualstudio.com/updates/v1_35)
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/35?closed=1).
## Version 1.8.0
* Release date: June 6, 2019
* Release status: General Availability
@@ -225,7 +155,7 @@ We would like to thank all our users who raised issues, and in particular the fo
## What's new in this version
* Announcing the SQL Server 2019 Preview extension.
* Support for SQL Server 2019 preview features including Big Data Cluster support.
* Support for SQL Server 2019 preview features including big data cluster support.
* Azure Data Studio Notebooks
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
* SQL Server Polybase Create External Table Wizard

1
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1 @@
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.

View File

@@ -1,8 +1,7 @@
# Azure Data Studio
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://dev.azure.com/azuredatastudio/azuredatastudio/_apis/build/status/Azure%20Data%20Studio%20CI?branchName=master)](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=master)
[![Twitter Follow](https://img.shields.io/twitter/follow/azuredatastudio?style=social)](https://twitter.com/azuredatastudio)
[![Build Status](https://dev.azure.com/ms/azuredatastudio/_apis/build/status/Microsoft.azuredatastudio)](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=4)
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
@@ -10,13 +9,13 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
Platform | Link
-- | --
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2105135
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2105134
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2104938
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2105133
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2105132
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2104937
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2105131
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2094100
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2094200
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2094201
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2094202
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2094101
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2094102
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2094203
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
@@ -69,9 +68,6 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
## Contributions and "Thank You"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
* devmattrick for `Update row count as updates are received #6642`
* mottykohn for `In Message panel onclick scroll to line #6417`
* Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480`
* yamatoya for `fix the format #4899`
* GeoffYoung for `Fix sqlDropColumn description #4422`

View File

@@ -36,7 +36,6 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab
make-error: https://github.com/JsCommunity/make-error
minimist: https://github.com/substack/minimist
@@ -1176,35 +1175,7 @@ That's all there is to it!
=========================================
END OF jschardet NOTICES AND INFORMATION
%% jupyter-powershell NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2016 Sergei Vorobev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF jupyter-powershell NOTICES AND INFORMATION
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2015 Project Jupyter Contributors
All rights reserved.

View File

@@ -0,0 +1,66 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '10.15.1'
displayName: 'Install Node.js'
- script: |
npm i -g yarn
displayName: 'preinstall'
- script: |
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
# sh -e /etc/init.d/xvfb start
# sleep 3
displayName: 'Linux preinstall'
condition: eq(variables['Agent.OS'], 'Linux')
- script: |
yarn
displayName: 'Install'
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: 'Run TSLint'
- script: |
yarn strict-null-check
displayName: 'Run Strict Null Check'
- script: |
yarn compile
displayName: 'Compile'
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
displayName: 'Tests'
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
displayName: 'Tests'
condition: and(succeeded(), ne(variables['Agent.OS'], 'Linux'))
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: 'cobertura'
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
condition: ne(variables['Agent.OS'], 'Linux')

View File

@@ -0,0 +1,44 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '10.15.1'
displayName: 'Install Node.js'
- script: |
yarn
displayName: 'Yarn Install'
- script: |
yarn gulp electron-x64
displayName: 'Electron'
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: 'Run TSLint'
- script: |
yarn strict-null-check
displayName: 'Run Strict Null Check'
- script: |
yarn compile
displayName: 'Compile'
- script: |
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
displayName: 'Test'
- task: PublishTestResults@2
inputs:
testResultsFiles: 'test-results.xml'
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: 'cobertura'
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report

View File

@@ -1,18 +1,29 @@
trigger:
- master
- releases/*
jobs:
- job: Windows
pool:
vmImage: VS2017-Win2016
steps:
- template: build/azure-pipelines/win32/continuous-build-win32.yml
- job: Linux
# All tasks on Windows
- job: build_all_windows
displayName: Build all tasks (Windows)
pool:
vmImage: 'Ubuntu-16.04'
vmImage: vs2017-win2016
steps:
- template: build/azure-pipelines/linux/continuous-build-linux.yml
- template: azure-pipelines-windows.yml
- job: macOS
# All tasks on Linux
- job: build_all_linux
displayName: Build all tasks (Linux)
pool:
vmImage: macOS 10.13
vmImage: 'Ubuntu 16.04'
steps:
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml
- template: azure-pipelines-linux-mac.yml
# All tasks on macOS
- job: build_all_darwin
displayName: Build all tasks (macOS)
pool:
vmImage: macos-10.13
steps:
- template: azure-pipelines-linux-mac.yml

View File

@@ -1 +0,0 @@
2019-08-30T20:24:23.714Z

View File

@@ -1,8 +1,5 @@
# cleanup rules for native node modules, .gitignore style
nan/**
*/node_modules/nan/**
fsevents/binding.gyp
fsevents/fsevents.cc
fsevents/build/**
@@ -67,6 +64,14 @@ windows-process-tree/build/**
windows-process-tree/src/**
!windows-process-tree/**/*.node
gc-signals/binding.gyp
gc-signals/build/**
gc-signals/src/**
gc-signals/deps/**
!gc-signals/build/Release/*.node
!gc-signals/src/index.js
keytar/binding.gyp
keytar/build/**
keytar/src/**
@@ -78,57 +83,44 @@ node-pty/binding.gyp
node-pty/build/**
node-pty/src/**
node-pty/tools/**
node-pty/deps/**
!node-pty/build/Release/*.exe
!node-pty/build/Release/*.dll
!node-pty/build/Release/*.node
chart.js/node_modules/**
emmet/node_modules/**
pty.js/build/**
!pty.js/build/Release/**
# START SQL Modules
@angular/**/src/**
@angular/**/testing/**
angular2-grid/components/**
angular2-grid/directives/**
angular2-grid/interfaces/**
angular2-grid/modules/**
angular2-slickgrid/.vscode/**
angular2-slickgrid/components/**
angular2-slickgrid/examples/**
jquery-ui/external/**
jquery-ui/demos/**
core-js/**/**
slickgrid/node_modules/**
slickgrid/examples/**
# END SQL Modules
vscode-nsfw/binding.gyp
vscode-nsfw/build/**
vscode-nsfw/src/**
vscode-nsfw/openpa/**
vscode-nsfw/includes/**
!vscode-nsfw/build/Release/*.node
!vscode-nsfw/**/*.a
nsfw/binding.gyp
nsfw/build/**
nsfw/src/**
nsfw/openpa/**
nsfw/includes/**
!nsfw/build/Release/*.node
!nsfw/**/*.a
vsda/build/**
vsda/ci/**
vsda/src/**
vsda/.gitignore
vsda/binding.gyp
vsda/README.md
vsda/targets
vsda/build/**
vsda/*.bat
vsda/*.sh
vsda/*.cpp
vsda/*.h
!vsda/build/Release/vsda.node
vscode-windows-ca-certs/**/*
!vscode-windows-ca-certs/package.json
!vscode-windows-ca-certs/**/*.node
node-addon-api/**/*
node-addon-api/**/*

View File

@@ -1,19 +0,0 @@
#!/usr/bin/env bash
set -e
cd $BUILD_STAGINGDIRECTORY
mkdir extraction
cd extraction
git clone --depth 1 https://github.com/Microsoft/vscode-extension-telemetry.git
git clone --depth 1 https://github.com/Microsoft/vscode-chrome-debug-core.git
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug2.git
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug.git
git clone --depth 1 https://github.com/Microsoft/vscode-html-languageservice.git
git clone --depth 1 https://github.com/Microsoft/vscode-json-languageservice.git
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
$BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json
mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json
cd ..
rm -rf extraction

View File

@@ -0,0 +1,20 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as cp from 'child_process';
import * as path from 'path';
function yarnInstall(packageName: string): void {
cp.execSync(`yarn add --no-lockfile ${packageName}`);
cp.execSync(`yarn add --no-lockfile ${packageName}`, { cwd: path.join( process.cwd(), 'remote') });
}
const product = require('../../../product.json');
const dependencies = product.dependencies || {} as { [name: string]: string; };
Object.keys(dependencies).forEach(name => {
const url = dependencies[name];
yarnInstall(url);
});

View File

@@ -1,9 +0,0 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
# Publish webview contents
PACKAGEJSON="$REPO/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"

View File

@@ -1,87 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { basename, join } from 'path';
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit: string, files: readonly string[]): Promise<void> {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main(): void {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -152,6 +152,13 @@ async function publish(commit: string, quality: string, platform: string, type:
const queuedBy = process.env['BUILD_QUEUEDBY']!;
const sourceBranch = process.env['BUILD_SOURCEBRANCH']!;
const isReleased = (
// Insiders: nightly build from master
(quality === 'insider' && /^master$|^refs\/heads\/master$/.test(sourceBranch) && /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy)) ||
// Exploration: any build from electron-4.0.x branch
(quality === 'exploration' && /^electron-4.0.x$|^refs\/heads\/electron-4.0.x$/.test(sourceBranch))
);
console.log('Publishing...');
console.log('Quality:', quality);
@@ -161,6 +168,7 @@ async function publish(commit: string, quality: string, platform: string, type:
console.log('Version:', version);
console.log('Commit:', commit);
console.log('Is Update:', isUpdate);
console.log('Is Released:', isReleased);
console.log('File:', file);
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
@@ -215,15 +223,11 @@ async function publish(commit: string, quality: string, platform: string, type:
console.log('Asset:', JSON.stringify(asset, null, ' '));
// {{SQL CARBON EDIT}}
// Insiders: nightly build from master
const isReleased = (quality === 'insider' && /^master$|^refs\/heads\/master$/.test(sourceBranch) && /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy));
const release = {
id: commit,
timestamp: (new Date()).getTime(),
version,
isReleased: isReleased,
isReleased: config.frozen ? false : isReleased,
sourceBranch,
queuedBy,
assets: [] as Array<Asset>,
@@ -242,6 +246,11 @@ async function publish(commit: string, quality: string, platform: string, type:
}
function main(): void {
if (process.env['VSCODE_BUILD_SKIP_PUBLISH']) {
console.warn('Skipping publish due to VSCODE_BUILD_SKIP_PUBLISH');
return;
}
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {

View File

@@ -1,109 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { DocumentClient } from 'documentdb';
interface Config {
id: string;
frozen: boolean;
}
function createDefaultConfig(quality: string): Config {
return {
id: quality,
frozen: false
};
}
function getConfig(quality: string): Promise<Config> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise<Config>((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) { return e(err); }
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
});
});
}
function doRelease(commit: string, quality: string): Promise<void> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const query = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function update(): Promise<void> {
updateTries++;
return new Promise<void>((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
const release = results[0];
release.isReleased = true;
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
if (err) { return e(err); }
console.log('Build successfully updated.');
c();
});
});
});
}
return update();
}
async function release(commit: string, quality: string): Promise<void> {
const config = await getConfig(quality);
console.log('Quality config:', config);
if (config.frozen) {
console.log(`Skipping release because quality ${quality} is frozen.`);
return;
}
await doRelease(commit, quality);
}
function env(name: string): string {
const result = process.env[name];
if (!result) {
throw new Error(`Skipping release due to missing env: ${name}`);
}
return result;
}
async function main(): Promise<void> {
const commit = env('BUILD_SOURCEVERSION');
const quality = env('VSCODE_QUALITY');
await release(commit, quality);
}
main().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -36,6 +36,7 @@ export interface IVersionAccessor extends IApplicationAccessor {
enum Platform {
WIN_32 = 'win32-ia32',
WIN_64 = 'win32-x64',
LINUX_32 = 'linux-ia32',
LINUX_64 = 'linux-x64',
MAC_OS = 'darwin-x64'
}
@@ -146,10 +147,6 @@ async function ensureVersionAndSymbols(options: IOptions) {
// Check version does not exist
console.log(`HockeyApp: checking for existing version ${options.versions.code} (${options.platform})`);
const versions = await getVersions({ accessToken: options.access.hockeyAppToken, appId: options.access.hockeyAppId });
if (!Array.isArray(versions.app_versions)) {
throw new Error(`Unexpected response: ${JSON.stringify(versions)}`);
}
if (versions.app_versions.some(v => v.version === options.versions.code)) {
console.log(`HockeyApp: Returning without uploading symbols because version ${options.versions.code} (${options.platform}) was already found`);
return;
@@ -188,17 +185,13 @@ const hockeyAppToken = process.argv[3];
const is64 = process.argv[4] === 'x64';
const hockeyAppId = process.argv[5];
if (process.argv.length !== 6) {
throw new Error(`HockeyApp: Unexpected number of arguments. Got ${process.argv}`);
}
let platform: Platform;
if (process.platform === 'darwin') {
platform = Platform.MAC_OS;
} else if (process.platform === 'win32') {
platform = is64 ? Platform.WIN_64 : Platform.WIN_32;
} else {
platform = Platform.LINUX_64;
platform = is64 ? Platform.LINUX_64 : Platform.LINUX_32;
}
// Create version and upload symbols in HockeyApp
@@ -219,9 +212,7 @@ if (repository && codeVersion && electronVersion && (product.quality === 'stable
}).then(() => {
console.log('HockeyApp: done');
}).catch(error => {
console.error(`HockeyApp: error ${error} (AppID: ${hockeyAppId})`);
return process.exit(1);
console.error(`HockeyApp: error (${error})`);
});
} else {
console.log(`HockeyApp: skipping due to unexpected context (repository: ${repository}, codeVersion: ${codeVersion}, electronVersion: ${electronVersion}, quality: ${product.quality})`);

View File

@@ -153,6 +153,11 @@ async function sync(commit: string, quality: string): Promise<void> {
}
function main(): void {
if (process.env['VSCODE_BUILD_SKIP_PUBLISH']) {
error('Skipping publish due to VSCODE_BUILD_SKIP_PUBLISH');
return;
}
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {

View File

@@ -1,72 +0,0 @@
[
{
"eventPrefix": "typescript-language-features/",
"sourceDirs": [
"../../s/extensions/typescript-language-features"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "git/",
"sourceDirs": [
"../../s/extensions/git"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "extension-telemetry/",
"sourceDirs": [
"vscode-extension-telemetry"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "vscode-markdown/",
"sourceDirs": [
"../../s/extensions/markdown-language-features"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "html-language-features/",
"sourceDirs": [
"../../s/extensions/html-language-features",
"vscode-html-languageservice"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "json-language-features/",
"sourceDirs": [
"../../s/extensions/json-language-features",
"vscode-json-languageservice"
],
"excludedDirs": [],
"applyEndpoints": true
},
{
"eventPrefix": "ms-vscode.node2/",
"sourceDirs": [
"vscode-chrome-debug-core",
"vscode-node-debug2"
],
"excludedDirs": [],
"applyEndpoints": true,
"patchDebugEvents": true
},
{
"eventPrefix": "ms-vscode.node/",
"sourceDirs": [
"vscode-chrome-debug-core",
"vscode-node-debug"
],
"excludedDirs": [],
"applyEndpoints": true,
"patchDebugEvents": true
}
]

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
set -e
yarn gulp vscode-darwin-min
yarn gulp vscode-reh-darwin-min
yarn gulp upload-vscode-sourcemaps

View File

@@ -2,66 +2,49 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
yarn
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
yarn gulp electron-x64
displayName: Download Electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- script: |
yarn gulp hygiene --skip-tslint
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-null-check
displayName: Run Strict Null Check.
- script: | # {{SQL CARBON EDIT}} add step
yarn tslint
displayName: Run TSLint (gci)
# - script: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
# - script: | {{SQL CARBON EDIT}} remove step
# ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests
- script: |
./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
displayName: 'Component Detection'
inputs:
alertWarningLevel: High
failOnAlert: true

View File

@@ -1,31 +1,11 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -37,6 +17,8 @@ steps:
set -e
cat << EOF > ~/.netrc
machine monacotools.visualstudio.com
password $(devops-pat)
machine github.com
login vscode
password $(github-distro-mixin-password)
@@ -44,53 +26,23 @@ steps:
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
yarn
yarn gulp mixin
yarn gulp hygiene
yarn monaco-compile-check
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
displayName: Prepare build
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-darwin-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-darwin-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-darwin-min-ci
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
./build/azure-pipelines/darwin/build.sh
displayName: Build
- script: |
@@ -99,41 +51,11 @@ steps:
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
# Web Smoke Tests disabled due to https://github.com/microsoft/vscode/issues/80308
# - script: |
# set -e
# cd test/smoke
# yarn compile
# cd -
# yarn smoketest --web --headless
# continueOnError: true
# displayName: Run web smoke tests
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
cd test/smoke
yarn compile
cd -
yarn smoketest --web --headless
continueOnError: true
displayName: Run smoke tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e

View File

@@ -30,7 +30,7 @@ node build/azure-pipelines/common/publish.js \
../vscode-server-darwin.zip
# publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" x64 "$VSCODE_HOCKEYAPP_ID_MACOS"
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_MACOS"
# upload configuration
yarn gulp upload-vscode-configuration

View File

@@ -30,13 +30,7 @@ steps:
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
git fetch distro
# Push master branch into oss/master
git push distro origin/master:refs/heads/oss/master
# Push every release branch into oss/release
git for-each-ref --format="%(refname:short)" refs/remotes/origin/release/* | sed 's/^origin\/\(.*\)$/\0:refs\/heads\/oss\/\1/' | xargs git push distro
git push distro origin/master:refs/heads/master
git merge $(node -p "require('./package.json').distro")
displayName: Sync & Merge Distro
displayName: Sync & Merge Distro

View File

@@ -1,36 +0,0 @@
pool:
vmImage: 'Ubuntu-16.04'
trigger: none
pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git checkout origin/electron-6.0.x
git merge origin/master
# Push master branch into exploration branch
git push origin HEAD:electron-6.0.x
displayName: Sync & Merge Exploration

View File

@@ -1,39 +0,0 @@
trigger:
branches:
include: ['master']
pr: none
jobs:
- job: ExplorationMerge
pool:
vmImage: Ubuntu-16.04
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- script: |
set -e
cat << EOF > ~/.netrc
machine mssqltools.visualstudio.com
login azuredatastudio
password $(DEVOPS_PASSWORD)
EOF
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
git remote add explore "$ADS_EXPLORE_REPO"
git fetch explore
git checkout -b merge-branch explore/master
git merge origin/master
git push explore HEAD:master
displayName: Sync & Merge Explore
env:
ADS_EXPLORE_REPO: $(ADS_EXPLORE_REPO)
DEVOPS_PASSWORD: $(DEVOPS_PASSWORD)

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -e
yarn gulp "vscode-linux-$VSCODE_ARCH-min"
if [[ "$VSCODE_ARCH" != "ia32" ]]; then
yarn gulp vscode-reh-linux-$VSCODE_ARCH-min
fi

View File

@@ -2,7 +2,7 @@ steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
@@ -10,66 +10,46 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
yarn
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
yarn gulp electron-x64
displayName: Download Electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- script: |
yarn gulp hygiene --skip-tslint
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- script: | # {{SQL CARBON EDIT}} add gci checks
yarn tslint
displayName: Run TSLint (gci)
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-null-check
displayName: Run Strict Null Check
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
# - script: | {{SQL CARBON EDIT}} remove step
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
displayName: 'Component Detection'
inputs:
alertWarningLevel: High
failOnAlert: true

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,116 +0,0 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- task: Docker@1
displayName: 'Pull image'
inputs:
azureSubscriptionEndpoint: 'vscode-builds-subscription'
azureContainerRegistry: vscodehub.azurecr.io
command: 'Run an image'
imageName: 'vscode-linux-build-agent:$(VSCODE_ARCH)'
containerCommand: uname
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
CHILD_CONCURRENCY=1 ./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/prebuild.sh
displayName: Prebuild
- script: |
set -e
./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/build.sh
displayName: Build
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
./build/azure-pipelines/linux/multiarch/$(VSCODE_ARCH)/publish.sh
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View File

@@ -1,31 +1,11 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -35,7 +15,14 @@ steps:
- script: |
set -e
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
cat << EOF > ~/.netrc
machine monacotools.visualstudio.com
password $(devops-pat)
machine github.com
login vscode
password $(github-distro-mixin-password)
@@ -43,80 +30,34 @@ steps:
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
CHILD_CONCURRENCY=1 yarn
yarn gulp mixin
yarn gulp hygiene
yarn monaco-compile-check
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
displayName: Prepare build
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-linux-x64-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-linux-x64-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-linux-x64-min-ci
./build/azure-pipelines/linux/build.sh
displayName: Build
- script: |
set -e
yarn gulp "electron-$(VSCODE_ARCH)"
# xvfb seems to be crashing often, let's make sure it's always up
service xvfb start
displayName: Start xvfb
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64"
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
@@ -127,12 +68,12 @@ steps:
./build/azure-pipelines/linux/publish.sh
displayName: Publish
- task: PublishPipelineArtifact@0
displayName: 'Publish Pipeline Artifact'
inputs:
artifactName: snap-x64
targetPath: .build/linux/snap-tarball
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true
- task: PublishPipelineArtifact@0
displayName: 'Publish Pipeline Artifact'
inputs:
artifactName: snap-$(VSCODE_ARCH)
targetPath: .build/linux/snap-tarball

View File

@@ -4,7 +4,9 @@ REPO="$(pwd)"
ROOT="$REPO/.."
# Publish tarball
PLATFORM_LINUX="linux-x64"
PLATFORM_LINUX="linux-$VSCODE_ARCH"
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
BUILDNAME="VSCode-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(date +%s)"
@@ -19,42 +21,44 @@ rm -rf $ROOT/code-*.tar.*
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
# Publish Remote Extension Host
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
if [[ "$VSCODE_ARCH" != "ia32" ]]; then
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
rm -rf $ROOT/vscode-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
fi
# Publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_LINUX64"
# Publish DEB
yarn gulp "vscode-linux-x64-build-deb"
PLATFORM_DEB="linux-deb-x64"
DEB_ARCH="amd64"
yarn gulp "vscode-linux-$VSCODE_ARCH-build-deb"
PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
# Publish RPM
yarn gulp "vscode-linux-x64-build-rpm"
PLATFORM_RPM="linux-rpm-x64"
RPM_ARCH="x86_64"
yarn gulp "vscode-linux-$VSCODE_ARCH-build-rpm"
PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
# Publish Snap
yarn gulp "vscode-linux-x64-prepare-snap"
yarn gulp "vscode-linux-$VSCODE_ARCH-prepare-snap"
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)

View File

@@ -5,7 +5,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -16,7 +16,7 @@ steps:
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact'
inputs:
artifactName: snap-x64
artifactName: snap-$(VSCODE_ARCH)
targetPath: .build/linux/snap-tarball
- script: |
@@ -31,13 +31,14 @@ steps:
# Define variables
REPO="$(pwd)"
SNAP_ROOT="$REPO/.build/linux/snap/x64"
ARCH="$(VSCODE_ARCH)"
SNAP_ROOT="$REPO/.build/linux/snap/$ARCH"
# Install build dependencies
(cd build && yarn)
# Unpack snap tarball artifact, in order to preserve file perms
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$ARCH.tar.gz"
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
# Create snap package
@@ -46,9 +47,9 @@ steps:
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
(cd $SNAP_ROOT/code-* && sudo snapcraft snap --output "$SNAP_PATH")
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-$ARCH" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"

View File

@@ -1,41 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const json = require('gulp-json-editor');
const buffer = require('gulp-buffer');
const filter = require('gulp-filter');
const es = require('event-stream');
const vfs = require('vinyl-fs');
const fancyLog = require('fancy-log');
const ansiColors = require('ansi-colors');
function main() {
const quality = process.env['VSCODE_QUALITY'];
if (!quality) {
console.log('Missing VSCODE_QUALITY, skipping mixin');
return;
}
const productJsonFilter = filter('product.json', { restore: true });
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
return vfs
.src(`quality/${quality}/**`, { base: `quality/${quality}` })
.pipe(filter(f => !f.isDirectory()))
.pipe(productJsonFilter)
.pipe(buffer())
.pipe(json(o => Object.assign({}, require('../product.json'), o)))
.pipe(productJsonFilter.restore)
.pipe(es.mapSync(function (f) {
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
return f;
}))
.pipe(vfs.dest('.'));
}
main();

View File

@@ -1,150 +1,81 @@
resources:
containers:
- container: vscode-x64
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
endpoint: VSCodeHub
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
- container: vscode-ia32
endpoint: VSCodeHub
image: vscodehub.azurecr.io/vscode-linux-build-agent:ia32
- container: snapcraft
image: snapcore/snapcraft:stable
image: snapcore/snapcraft
jobs:
- job: Compile
pool:
vmImage: 'Ubuntu-16.04'
container: vscode-x64
steps:
- template: product-compile.yml
- job: Windows
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: x64
dependsOn:
- Compile
steps:
- template: win32/product-build-win32.yml
- job: Windows32
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
condition: eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: ia32
dependsOn:
- Compile
steps:
- template: win32/product-build-win32.yml
- job: Linux
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: vscode-x64
dependsOn:
- Compile
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnap
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: snapcraft
dependsOn: Linux
steps:
- template: linux/snap-build-linux.yml
- job: LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
- job: Linux32
condition: eq(variables['VSCODE_BUILD_LINUX_32BIT'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: armhf
dependsOn:
- Compile
VSCODE_ARCH: ia32
container: vscode-ia32
steps:
- template: linux/product-build-linux-multiarch.yml
- job: LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: arm64
dependsOn:
- Compile
steps:
- template: linux/product-build-linux-multiarch.yml
- job: LinuxAlpine
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: alpine
dependsOn:
- Compile
steps:
- template: linux/product-build-linux-multiarch.yml
- job: LinuxWeb
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WEB'], 'true'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
dependsOn:
- Compile
steps:
- template: web/product-build-web.yml
- template: linux/product-build-linux.yml
- job: macOS
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
pool:
vmImage: macOS 10.13
dependsOn:
- Compile
steps:
- template: darwin/product-build-darwin.yml
- job: Release
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
pool:
vmImage: 'Ubuntu-16.04'
dependsOn:
- Windows
- Windows32
- Linux
- LinuxSnap
- LinuxArmhf
- LinuxAlpine
- macOS
steps:
- template: release.yml
- job: Mooncake
pool:
vmImage: 'Ubuntu-16.04'
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
condition: true
dependsOn:
- Windows
- Windows32
- Linux
- LinuxSnap
- LinuxArmhf
- LinuxAlpine
- LinuxWeb
- Linux32
- macOS
steps:
- template: sync-mooncake.yml
trigger: none
pr: none
schedules:
- cron: "0 5 * * Mon-Fri"
displayName: Mon-Fri at 7:00
branches:
include:
- master
- template: sync-mooncake.yml

View File

@@ -1,133 +0,0 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
yarn gulp hygiene --skip-tslint
yarn gulp tslint
yarn monaco-compile-check
displayName: Run hygiene, tslint and monaco compile checks
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -
./build/azure-pipelines/common/extract-telemetry.sh
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
yarn gulp compile-build
yarn gulp compile-extensions-build
yarn gulp minify-vscode
yarn gulp minify-vscode-reh
yarn gulp minify-vscode-reh-web
displayName: Compile
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))

View File

@@ -0,0 +1,36 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cp = require("child_process");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
return false;
}
return true;
}

View File

@@ -13,23 +13,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
if [ "$TAG_VERSION" == "1.999.0" ]; then
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
exit 1
fi
displayName: Check 1.999.0 tag
versionSpec: "1.10.1"
- bash: |
# Install build dependencies

View File

@@ -0,0 +1,62 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const cp = require("child_process");
const path = require("path");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
updateDTSFile(outPath, tag);
console.log(`Done updating vscode.d.ts at ${outPath}`);
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath, tag) {
const oldContent = fs.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, tag);
fs.writeFileSync(outPath, newContent);
}
function getNewFileContent(content, tag) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return getNewFileHeader(tag) + content.slice(oldheader.length);
}
function getNewFileHeader(tag) {
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}

View File

@@ -1,22 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
(cd build ; yarn)
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/release.js

View File

@@ -5,7 +5,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'

View File

@@ -1,57 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const path = require('path');
const es = require('event-stream');
const azure = require('gulp-azure-storage');
const vfs = require('vinyl-fs');
const util = require('../lib/util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
// optionally allow to pass in explicit base/maps to upload
const [, , base, maps] = process.argv;
const fetch = function (base, maps = `${base}/**/*.map`) {
return vfs.src(maps, { base })
.pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
};
function main() {
const sources = [];
// vscode client maps (default)
if (!base) {
const vs = fetch('out-vscode-min'); // client source-maps only
sources.push(vs);
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
sources.push(extensionsOut);
}
// specific client base/maps
else {
sources.push(fetch(base, maps));
}
return es.merge(...sources)
.pipe(es.through(function (data) {
console.log('Uploading Sourcemap', data.relative); // debug
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'sourcemaps',
prefix: commit + '/'
}));
}
main();

View File

@@ -1,106 +0,0 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# inputs:
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: 'npm-vscode'
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# inputs:
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: 'npm-vscode'
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
# - script: |
# set -e
# yarn postinstall
# displayName: Run postinstall scripts
# condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-web-min-ci
displayName: Build
# upload only the workbench.web.api.js source maps because
# we just compiled these bits in the previous step and the
# general task to upload source maps has already been run
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.api.js.map
displayName: Upload sourcemaps (Web)
- script: |
set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/web/publish.sh
displayName: Publish

View File

@@ -1,18 +0,0 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
# Publish Web Client
WEB_BUILD_NAME="vscode-web"
WEB_TARBALL_FILENAME="vscode-web.tar.gz"
WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
BUILD="$ROOT/$WEB_BUILD_NAME"
PACKAGEJSON="$BUILD/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/vscode-web.tar.*
(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "web-standalone" archive-unsigned "$WEB_TARBALL_FILENAME" "$VERSION" true "$WEB_TARBALL_PATH"

View File

@@ -0,0 +1,5 @@
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-min" }
exec { yarn gulp "vscode-reh-win32-$env:VSCODE_ARCH-min" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-inno-updater" }

View File

@@ -2,70 +2,53 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
- powershell: |
yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
yarn
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# inputs:
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: '$(ArtifactFeed)'
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
- powershell: |
yarn electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- script: |
yarn gulp hygiene --skip-tslint
yarn gulp electron
displayName: Download Electron
- powershell: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn tslint
displayName: Run TSLint (gci)
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-null-check
displayName: Run Strict Null Check
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- powershell: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- powershell: |
yarn compile
displayName: Compile Sources
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- powershell: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests
# - powershell: | {{SQL CARBON EDIT}} remove step
# .\scripts\test-integration.bat --tfs "Integration Tests"
# displayName: Run Integration Tests
- powershell: |
.\scripts\test-integration.bat --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
displayName: 'Component Detection'
inputs:
alertWarningLevel: High
failOnAlert: true

View File

@@ -1,31 +1,11 @@
steps:
- powershell: |
mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode'
platformIndependent: true
alias: 'Compilation'
- powershell: |
$ErrorActionPreference = "Stop"
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
@@ -41,89 +21,45 @@ steps:
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
"machine monacotools.visualstudio.com`npassword $(devops-pat)`nmachine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
exec { git config user.email "vscode@microsoft.com" }
exec { git config user.name "VSCode" }
mkdir .build -ea 0
"$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch
displayName: Prepare tooling
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
exec { git fetch distro }
exec { git merge $(node -p "require('./package.json').distro") }
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: 'build/.cachesalt, .build/arch, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
exec { yarn --frozen-lockfile }
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: 'build/.cachesalt, .build/arch, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: 'npm-vscode'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn postinstall }
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/mixin }
displayName: Mix in quality
exec { yarn }
exec { yarn gulp mixin }
exec { yarn gulp hygiene }
exec { yarn monaco-compile-check }
exec { node build/azure-pipelines/common/installDistro.js }
exec { node build/lib/builtInExtensions.js }
displayName: Prepare build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-reh-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-reh-web-win32-$env:VSCODE_ARCH-min-ci" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-code-helper" }
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-inno-updater" }
.\build\azure-pipelines\win32\build.ps1
displayName: Build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn electron $(VSCODE_ARCH) }
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
@@ -205,7 +141,6 @@ steps:
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
$env:VSCODE_HOCKEYAPP_TOKEN = "$(vscode-hockeyapp-token)"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
.\build\azure-pipelines\win32\publish.ps1
displayName: Publish

View File

@@ -1,7 +0,0 @@
[
{
"name": "Microsoft.sqlservernotebook",
"version": "0.3.2",
"repo": "https://github.com/Microsoft/azuredatastudio"
}
]

View File

@@ -1,7 +1,2 @@
[
{
"name": "Microsoft.sqlservernotebook",
"version": "0.3.2",
"repo": "https://github.com/Microsoft/azuredatastudio"
}
]

View File

@@ -10,7 +10,7 @@ const path = require('path');
let window = null;
app.once('ready', () => {
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true } });
window = new BrowserWindow({ width: 800, height: 600 });
window.setMenuBarVisibility(false);
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
// window.webContents.openDevTools();

View File

@@ -0,0 +1,91 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const https = require("https");
const fs = require("fs");
const path = require("path");
const cp = require("child_process");
function ensureDir(filepath) {
if (!fs.existsSync(filepath)) {
ensureDir(path.dirname(filepath));
fs.mkdirSync(filepath);
}
}
function download(options, destination) {
ensureDir(path.dirname(destination));
return new Promise((c, e) => {
const fd = fs.openSync(destination, 'w');
const req = https.get(options, (res) => {
res.on('data', (chunk) => {
fs.writeSync(fd, chunk);
});
res.on('end', () => {
fs.closeSync(fd);
c();
});
});
req.on('error', (reqErr) => {
console.error(`request to ${options.host}${options.path} failed.`);
console.error(reqErr);
e(reqErr);
});
});
}
const MARKER_ARGUMENT = `_download_fork_`;
function base64encode(str) {
return Buffer.from(str, 'utf8').toString('base64');
}
function base64decode(str) {
return Buffer.from(str, 'base64').toString('utf8');
}
function downloadInExternalProcess(options) {
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
console.log(`Downloading ${url}...`);
return new Promise((c, e) => {
const child = cp.fork(__filename, [MARKER_ARGUMENT, base64encode(JSON.stringify(options))], {
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
});
let stderr = [];
child.stderr.on('data', (chunk) => {
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
});
child.on('exit', (code) => {
if (code === 0) {
// normal termination
console.log(`Finished downloading ${url}.`);
c();
}
else {
// abnormal termination
console.error(Buffer.concat(stderr).toString());
e(new Error(`Download of ${url} failed.`));
}
});
});
}
exports.downloadInExternalProcess = downloadInExternalProcess;
function _downloadInExternalProcess() {
let options;
try {
options = JSON.parse(base64decode(process.argv[3]));
}
catch (err) {
console.error(`Cannot read arguments`);
console.error(err);
process.exit(-1);
return;
}
download(options.requestOptions, options.destinationPath).then(() => {
process.exit(0);
}, (err) => {
console.error(err);
process.exit(-2);
});
}
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
// running as forked download script
_downloadInExternalProcess();
}

111
build/download/download.ts Normal file
View File

@@ -0,0 +1,111 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as https from 'https';
import * as fs from 'fs';
import * as path from 'path';
import * as cp from 'child_process';
function ensureDir(filepath: string) {
if (!fs.existsSync(filepath)) {
ensureDir(path.dirname(filepath));
fs.mkdirSync(filepath);
}
}
function download(options: https.RequestOptions, destination: string): Promise<void> {
ensureDir(path.dirname(destination));
return new Promise<void>((c, e) => {
const fd = fs.openSync(destination, 'w');
const req = https.get(options, (res) => {
res.on('data', (chunk) => {
fs.writeSync(fd, chunk);
});
res.on('end', () => {
fs.closeSync(fd);
c();
});
});
req.on('error', (reqErr) => {
console.error(`request to ${options.host}${options.path} failed.`);
console.error(reqErr);
e(reqErr);
});
});
}
const MARKER_ARGUMENT = `_download_fork_`;
function base64encode(str: string): string {
return Buffer.from(str, 'utf8').toString('base64');
}
function base64decode(str: string): string {
return Buffer.from(str, 'base64').toString('utf8');
}
export interface IDownloadRequestOptions {
host: string;
path: string;
}
export interface IDownloadOptions {
requestOptions: IDownloadRequestOptions;
destinationPath: string;
}
export function downloadInExternalProcess(options: IDownloadOptions): Promise<void> {
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
console.log(`Downloading ${url}...`);
return new Promise<void>((c, e) => {
const child = cp.fork(
__filename,
[MARKER_ARGUMENT, base64encode(JSON.stringify(options))],
{
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
}
);
let stderr: Buffer[] = [];
child.stderr.on('data', (chunk) => {
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
});
child.on('exit', (code) => {
if (code === 0) {
// normal termination
console.log(`Finished downloading ${url}.`);
c();
} else {
// abnormal termination
console.error(Buffer.concat(stderr).toString());
e(new Error(`Download of ${url} failed.`));
}
});
});
}
function _downloadInExternalProcess() {
let options: IDownloadOptions;
try {
options = JSON.parse(base64decode(process.argv[3]));
} catch (err) {
console.error(`Cannot read arguments`);
console.error(err);
process.exit(-1);
return;
}
download(options.requestOptions, options.destinationPath).then(() => {
process.exit(0);
}, (err) => {
console.error(err);
process.exit(-2);
});
}
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
// running as forked download script
_downloadInExternalProcess();
}

View File

@@ -5,12 +5,14 @@
'use strict';
const gulp = require('gulp');
const util = require('./lib/util');
const task = require('./lib/task');
const compilation = require('./lib/compilation');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
// Full compile, including nls and inline sources in sourcemaps, for build
const compileBuildTask = task.define('compile-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
gulp.task(compileBuildTask);
const compileClientBuildTask = task.define('compile-client-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
// All Build
const compileBuildTask = task.define('compile-build', task.parallel(compileClientBuildTask, compileExtensionsBuildTask));
exports.compileBuildTask = compileBuildTask;

View File

@@ -41,7 +41,12 @@ var editorEntryPoints = [
];
var editorResources = [
'out-editor-build/vs/base/browser/ui/codiconLabel/**/*.ttf'
'out-build/vs/{base,editor}/**/*.{svg,png}',
'!out-build/vs/base/browser/ui/splitview/**/*',
'!out-build/vs/base/browser/ui/toolbar/**/*',
'!out-build/vs/base/browser/ui/octiconLabel/**/*',
'!out-build/vs/workbench/**',
'!**/test/**'
];
var BUNDLED_FILE_HEADER = [
@@ -84,6 +89,9 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
`lib.dom.d.ts`,
`lib.webworker.importscripts.d.ts`
],
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
},
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
destRoot: path.join(root, 'out-editor-src')

View File

@@ -21,15 +21,9 @@ const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const plumber = require('gulp-plumber');
const ext = require('./lib/extensions');
const _ = require('underscore');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
// {{SQL CARBON EDIT}}
const sqlLocalizedExtensions = [
'dacpac',
'schema-compare'
];
// {{SQL CARBON EDIT}}
const compilations = glob.sync('**/tsconfig.json', {
cwd: extensionsPath,
@@ -42,38 +36,38 @@ const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile);
const relativeDirname = path.dirname(tsconfigFile);
const overrideOptions = {};
overrideOptions.sourceMap = true;
const tsconfig = require(absolutePath);
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
tsOptions.verbose = false;
tsOptions.sourceMap = true;
const name = relativeDirname.replace(/\//g, '-');
const root = path.join('extensions', relativeDirname);
const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**');
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
const out = path.join(root, 'out');
const baseUrl = getBaseUrl(out);
let headerId, headerOut;
let index = relativeDirname.indexOf('/');
if (index < 0) {
headerId = 'microsoft.' + relativeDirname; // {{SQL CARBON EDIT}}
headerId = 'vscode.' + relativeDirname;
headerOut = 'out';
} else {
headerId = 'microsoft.' + relativeDirname.substr(0, index); // {{SQL CARBON EDIT}}
headerId = 'vscode.' + relativeDirname.substr(0, index);
headerOut = relativeDirname.substr(index + 1) + '/out';
}
function createPipeline(build, emitError) {
const reporter = createReporter();
overrideOptions.inlineSources = Boolean(build);
overrideOptions.base = path.dirname(absolutePath);
tsOptions.inlineSources = !!build;
tsOptions.base = path.dirname(absolutePath);
const compilation = tsb.create(absolutePath, overrideOptions, false, err => reporter(err.toString()));
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
const pipeline = function () {
return function () {
const input = es.through();
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
const output = input
@@ -103,19 +97,15 @@ const tasks = compilations.map(function (tsconfigFile) {
return es.duplex(input, output);
};
// add src-stream for project files
pipeline.tsProjectSrc = () => {
return compilation.src(srcOpts);
};
return pipeline;
}
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(sqlLocalizedExtensions.includes(name), true); // {{SQL CARBON EDIT}}
const input = pipeline.tsProjectSrc();
const pipeline = createPipeline(false, true);
const input = gulp.src(src, srcOpts);
return input
.pipe(pipeline())
@@ -124,8 +114,8 @@ const tasks = compilations.map(function (tsconfigFile) {
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(false);
const input = pipeline.tsProjectSrc();
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
const input = gulp.src(src, srcOpts);
const watchInput = watcher(src, srcOpts);
return watchInput
.pipe(util.incremental(pipeline, input))
@@ -134,7 +124,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(true, true);
const input = pipeline.tsProjectSrc();
const input = gulp.src(src, srcOpts);
return input
.pipe(pipeline())
@@ -145,7 +135,11 @@ const tasks = compilations.map(function (tsconfigFile) {
gulp.task(compileTask);
gulp.task(watchTask);
return { compileTask, watchTask, compileBuildTask };
return {
compileTask: compileTask,
watchTask: watchTask,
compileBuildTask: compileBuildTask
};
});
const compileExtensionsTask = task.define('compile-extensions', task.parallel(...tasks.map(t => t.compileTask)));
@@ -156,17 +150,5 @@ const watchExtensionsTask = task.define('watch-extensions', task.parallel(...tas
gulp.task(watchExtensionsTask);
exports.watchExtensionsTask = watchExtensionsTask;
const compileExtensionsBuildLegacyTask = task.define('compile-extensions-build-legacy', task.parallel(...tasks.map(t => t.compileBuildTask)));
gulp.task(compileExtensionsBuildLegacyTask);
// Azure Pipelines
const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimraf('.build/extensions'));
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
cleanExtensionsBuildTask,
task.define('bundle-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))),
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build')))
));
gulp.task(compileExtensionsBuildTask);
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.parallel(...tasks.map(t => t.compileBuildTask)));
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;

View File

@@ -17,7 +17,6 @@ const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
const task = require('./lib/task');
/**
* Hygiene works by creating cascading subsets of all our files and
@@ -51,15 +50,12 @@ const indentationFilter = [
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/assert.js',
'!build/testSetup.js',
// except specific folders
'!test/automation/out/**',
'!test/smoke/out/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
@@ -68,12 +64,11 @@ const indentationFilter = [
// except multiple specific files
'!**/package.json',
'!**/package-lock.json', // {{SQL CARBON EDIT}}
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/codicon/**',
'!**/octicons/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
@@ -97,14 +92,12 @@ const indentationFilter = [
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
// {{SQL CARBON EDIT}}
'!**/*.{xlf,docx,sql,vsix,bacpac,ipynb}',
'!**/*.{xlf,docx,sql,vsix,bacpac}',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts'
'!extensions/mssql/notebooks/**'
];
const copyrightFilter = [
@@ -134,37 +127,34 @@ const copyrightFilter = [
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js',
// {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/parts/notebook/common/models/url.ts',
'!src/sql/workbench/parts/notebook/browser/models/renderMimeInterfaces.ts',
'!src/sql/workbench/parts/notebook/browser/models/outputProcessor.ts',
'!src/sql/workbench/parts/notebook/browser/models/mimemodel.ts',
'!src/sql/workbench/parts/notebook/browser/cellViews/media/*.css',
'!extensions/mssql/src/objectExplorerNodeProvider/webhdfs.ts',
'!src/sql/workbench/parts/notebook/outputs/tableRenderers.ts',
'!src/sql/workbench/parts/notebook/outputs/common/url.ts',
'!src/sql/workbench/parts/notebook/outputs/common/renderMimeInterfaces.ts',
'!src/sql/workbench/parts/notebook/outputs/common/outputProcessor.ts',
'!src/sql/workbench/parts/notebook/outputs/common/mimemodel.ts',
'!src/sql/workbench/parts/notebook/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/sanitizer.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/renderers.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/registry.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/factories.ts',
'!src/sql/workbench/parts/notebook/common/models/nbformat.ts',
'!src/sql/workbench/parts/notebook/outputs/sanitizer.ts',
'!src/sql/workbench/parts/notebook/outputs/renderers.ts',
'!src/sql/workbench/parts/notebook/outputs/registry.ts',
'!src/sql/workbench/parts/notebook/outputs/factories.ts',
'!src/sql/workbench/parts/notebook/models/nbformat.ts',
'!extensions/markdown-language-features/media/tomorrow.css',
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
'!src/sql/workbench/parts/notebook/electron-browser/cellViews/media/highlight.css',
'!src/sql/workbench/electron-browser/modelComponents/media/highlight.css',
'!src/sql/parts/modelComponents/highlight.css',
'!extensions/mssql/sqltoolsservice/**',
'!extensions/import/flatfileimportservice/**',
'!extensions/notebook/src/prompts/**',
'!extensions/mssql/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**',
'!extensions/query-history/images/**',
'!**/*.gif',
'!**/*.xlf',
'!**/*.dacpac',
@@ -179,82 +169,30 @@ const eslintFilter = [
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js',
'!**/test/**'
];
const tslintBaseFilter = [
const tslintFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/typescript-basics/test/colorize-fixtures/**',
'!extensions/typescript/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts' // {{SQL CARBON EDIT}},
'!extensions/html-language-features/server/lib/jquery.d.ts'
];
// {{SQL CARBON EDIT}}
const sqlFilter = [
'src/sql/**',
'extensions/**',
// Ignore VS Code extensions
'!extensions/bat/**',
'!extensions/configuration-editing/**',
'!extensions/docker/**',
'!extensions/extension-editing/**',
'!extensions/git/**',
'!extensions/git-ui/**',
'!extensions/image-preview/**',
'!extensions/insights-default/**',
'!extensions/json/**',
'!extensions/json-language-features/**',
'!extensions/markdown-basics/**',
'!extensions/markdown-language-features/**',
'!extensions/merge-conflict/**',
'!extensions/powershell/**',
'!extensions/python/**',
'!extensions/r/**',
'!extensions/theme-*/**',
'!extensions/vscode-*/**',
'!extensions/xml/**',
'!extensions/xml-language-features/**',
'!extensions/yarml/**',
const useStrictFilter = [
'src/**'
];
const tslintCoreFilter = [
'src/**/*.ts',
'test/**/*.ts',
'!extensions/**/*.ts',
'!test/automation/**',
'!test/smoke/**',
...tslintBaseFilter
];
const tslintExtensionsFilter = [
'extensions/**/*.ts',
'!src/**/*.ts',
'!test/**/*.ts',
'test/automation/**/*.ts',
...tslintBaseFilter
];
const tslintHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
...tslintBaseFilter
];
const fileLengthFilter = filter([
'**',
'!extensions/import/*.docx',
'!extensions/admin-tool-ext-win/license/**'
], {restore: true});
// {{SQL CARBON EDIT}}
const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -271,63 +209,17 @@ gulp.task('eslint', () => {
});
gulp.task('tslint', () => {
return es.merge([
const options = { emitError: true };
// Core: include type information (required by certain rules like no-nodejs-globals)
vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintCoreFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint', program: tslint.Linter.createProgram('src/tsconfig.json') }))
.pipe(gulptslint.default.report({ emitError: true })),
// Exenstions: do not include type information
vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintExtensionsFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.default.report({ emitError: true }))
]).pipe(es.through());
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.default.report(options));
});
function checkPackageJSON(actualPath) {
const actual = require(path.join(__dirname, '..', actualPath));
const rootPackageJSON = require('../package.json');
for (let depName in actual.dependencies) {
const depVersion = actual.dependencies[depName];
const rootDepVersion = rootPackageJSON.dependencies[depName];
if (!rootDepVersion) {
// missing in root is allowed
continue;
}
if (depVersion !== rootDepVersion) {
this.emit('error', `The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`);
}
}
}
const checkPackageJSONTask = task.define('check-package-json', () => {
return gulp.src('package.json')
.pipe(es.through(function() {
checkPackageJSON.call(this, 'remote/package.json');
checkPackageJSON.call(this, 'remote/web/package.json');
}));
});
gulp.task(checkPackageJSONTask);
function hygiene(some) {
let errorCount = 0;
const productJson = es.through(function (file) {
// const product = JSON.parse(file.contents.toString('utf8'));
// if (product.extensionsGallery) { // {{SQL CARBON EDIT}} @todo we need to research on what the point of this is
// console.error('product.json: Contains "extensionsGallery"');
// errorCount++;
// }
this.emit('data', file);
});
const indentation = es.through(function (file) {
const lines = file.contents.toString('utf8').split(/\r\n|\r|\n/);
file.__lines = lines;
@@ -363,6 +255,23 @@ function hygiene(some) {
this.emit('data', file);
});
// {{SQL CARBON EDIT}}
// Check for unnecessary 'use strict' lines. These are automatically added by the alwaysStrict compiler option so don't need to be added manually
const useStrict = es.through(function (file) {
const lines = file.__lines;
// Only take the first 10 lines to reduce false positives- the compiler will throw an error if it's not the first non-comment line in a file
// (10 is used to account for copyright and extraneous newlines)
lines.slice(0, 10).forEach((line, i) => {
if (/\s*'use\s*strict\s*'/.test(line)) {
console.error(file.relative + '(' + (i + 1) + ',1): Unnecessary \'use strict\' - this is already added by the compiler');
errorCount++;
}
});
this.emit('data', file);
});
// {{SQL CARBON EDIT}} END
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: false,
@@ -393,23 +302,6 @@ function hygiene(some) {
});
});
const filelength = es.through(function (file) {
const fileName = path.basename(file.relative);
const fileDir = path.dirname(file.relative);
//check the filename is < 50 characters (basename gets the filename with extension).
if (fileName.length > 50) {
console.error(`File name '${fileName}' under ${fileDir} is too long. Rename file to have less than 50 characters.`);
errorCount++;
}
if (file.relative.length > 150) {
console.error(`File path ${file.relative} exceeds acceptable file-length. Rename the path to have less than 150 characters.`);
errorCount++;
}
this.emit('data', file);
});
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
const tslintOptions = { fix: false, formatter: 'json' };
const tsLinter = new tslint.Linter(tslintOptions);
@@ -428,41 +320,20 @@ function hygiene(some) {
input = some;
}
// {{SQL CARBON EDIT}} Linting for SQL
const tslintSqlConfiguration = tslint.Configuration.findConfiguration('tslint-sql.json', '.');
const tslintSqlOptions = { fix: false, formatter: 'json' };
const sqlTsLinter = new tslint.Linter(tslintSqlOptions);
const sqlTsl = es.through(function (file) { //TODO restore
const contents = file.contents.toString('utf8');
sqlTsLinter.lint(file.relative, contents, tslintSqlConfiguration.results);
});
const productJsonFilter = filter('product.json', { restore: true });
const result = input
.pipe(fileLengthFilter)
.pipe(filelength)
.pipe(fileLengthFilter.restore)
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(productJsonFilter)
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
.pipe(productJsonFilter.restore)
.pipe(filter(indentationFilter))
.pipe(indentation)
.pipe(filter(copyrightFilter))
.pipe(copyrights);
let typescript = result
.pipe(filter(tslintHygieneFilter))
.pipe(formatting);
if (!process.argv.some(arg => arg === '--skip-tslint')) {
typescript = typescript.pipe(tsl);
typescript = typescript
.pipe(filter(sqlFilter)) // {{SQL CARBON EDIT}}
.pipe(sqlTsl);
}
const typescript = result
.pipe(filter(tslintFilter))
.pipe(formatting)
.pipe(tsl)
// {{SQL CARBON EDIT}}
.pipe(filter(useStrictFilter))
.pipe(useStrict);
const javascript = result
.pipe(filter(eslintFilter))
@@ -494,19 +365,6 @@ function hygiene(some) {
errorCount += tslintResult.failures.length;
}
const sqlTslintResult = sqlTsLinter.getResult();
if (sqlTslintResult.failures.length > 0) {
for (const failure of sqlTslintResult.failures) {
const name = failure.getFileName();
const position = failure.getStartPosition();
const line = position.getLineAndCharacter().line;
const character = position.getLineAndCharacter().character;
console.error(`${name}:${line + 1}:${character + 1}:${failure.getFailure()}`);
}
errorCount += sqlTslintResult.failures.length;
}
if (errorCount > 0) {
this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
} else {
@@ -548,7 +406,7 @@ function createGitIndexVinyls(paths) {
.then(r => r.filter(p => !!p));
}
gulp.task('hygiene', task.series(checkPackageJSONTask, () => hygiene()));
gulp.task('hygiene', () => hygiene());
// this allows us to run hygiene as a git pre-commit hook
if (require.main === module) {

View File

@@ -7,139 +7,10 @@
const gulp = require('gulp');
const path = require('path');
const es = require('event-stream');
const util = require('./lib/util');
const task = require('./lib/task');
const vfs = require('vinyl-fs');
const flatmap = require('gulp-flatmap');
const gunzip = require('gulp-gunzip');
const untar = require('gulp-untar');
const File = require('vinyl');
const fs = require('fs');
const remote = require('gulp-remote-retry-src');
const rename = require('gulp-rename');
const filter = require('gulp-filter');
const cp = require('child_process');
const REPO_ROOT = path.dirname(__dirname);
const BUILD_TARGETS = [
{ platform: 'win32', arch: 'ia32', pkgTarget: 'node8-win-x86' },
{ platform: 'win32', arch: 'x64', pkgTarget: 'node8-win-x64' },
{ platform: 'darwin', arch: null, pkgTarget: 'node8-macos-x64' },
{ platform: 'linux', arch: 'ia32', pkgTarget: 'node8-linux-x86' },
{ platform: 'linux', arch: 'x64', pkgTarget: 'node8-linux-x64' },
{ platform: 'linux', arch: 'armhf', pkgTarget: 'node8-linux-armv7' },
{ platform: 'linux', arch: 'arm64', pkgTarget: 'node8-linux-arm64' },
{ platform: 'linux', arch: 'alpine', pkgTarget: 'node8-linux-alpine' },
];
const noop = () => { return Promise.resolve(); };
gulp.task('vscode-reh-win32-ia32-min', noop);
gulp.task('vscode-reh-win32-x64-min', noop);
gulp.task('vscode-reh-darwin-min', noop);
gulp.task('vscode-reh-linux-x64-min', noop);
gulp.task('vscode-reh-linux-armhf-min', noop);
gulp.task('vscode-reh-linux-arm64-min', noop);
gulp.task('vscode-reh-linux-alpine-min', noop);
gulp.task('vscode-reh-web-win32-ia32-min', noop);
gulp.task('vscode-reh-web-win32-x64-min', noop);
gulp.task('vscode-reh-web-darwin-min', noop);
gulp.task('vscode-reh-web-linux-x64-min', noop);
gulp.task('vscode-reh-web-linux-alpine-min', noop);
function getNodeVersion() {
const yarnrc = fs.readFileSync(path.join(REPO_ROOT, 'remote', '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
const nodeVersion = getNodeVersion();
BUILD_TARGETS.forEach(({ platform, arch }) => {
if (platform === 'darwin') {
arch = 'x64';
}
gulp.task(task.define(`node-${platform}-${arch}`, () => {
const nodePath = path.join('.build', 'node', `v${nodeVersion}`, `${platform}-${arch}`);
if (!fs.existsSync(nodePath)) {
util.rimraf(nodePath);
return nodejs(platform, arch)
.pipe(vfs.dest(nodePath));
}
return Promise.resolve(null);
}));
});
const defaultNodeTask = gulp.task(`node-${process.platform}-${process.arch}`);
if (defaultNodeTask) {
gulp.task(task.define('node', defaultNodeTask));
}
function nodejs(platform, arch) {
if (arch === 'ia32') {
arch = 'x86';
}
if (platform === 'win32') {
return remote(`/dist/v${nodeVersion}/win-${arch}/node.exe`, { base: 'https://nodejs.org' })
.pipe(rename('node.exe'));
}
if (arch === 'alpine') {
const contents = cp.execSync(`docker run --rm node:${nodeVersion}-alpine /bin/sh -c 'cat \`which node\`'`, { maxBuffer: 100 * 1024 * 1024, encoding: 'buffer' });
return es.readArray([new File({ path: 'node', contents, stat: { mode: parseInt('755', 8) } })]);
}
if (platform === 'darwin') {
arch = 'x64';
}
if (arch === 'armhf') {
arch = 'armv7l';
}
return remote(`/dist/v${nodeVersion}/node-v${nodeVersion}-${platform}-${arch}.tar.gz`, { base: 'https://nodejs.org' })
.pipe(flatmap(stream => stream.pipe(gunzip()).pipe(untar())))
.pipe(filter('**/node'))
.pipe(util.setExecutableBit('**'))
.pipe(rename('node'));
}
function mixinServer(watch) {
const packageJSONPath = path.join(path.dirname(__dirname), 'package.json');
function exec(cmdLine) {
console.log(cmdLine);
cp.execSync(cmdLine, { stdio: "inherit" });
}
function checkout() {
const packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString());
exec('git fetch distro');
exec(`git checkout ${packageJSON['distro']} -- src/vs/server resources/server`);
exec('git reset HEAD src/vs/server resources/server');
}
checkout();
if (watch) {
console.log('Enter watch mode (observing package.json)');
const watcher = fs.watch(packageJSONPath);
watcher.addListener('change', () => {
try {
checkout();
} catch (e) {
console.log(e);
}
});
}
return Promise.resolve();
}
gulp.task(task.define('mixin-server', () => mixinServer(false)));
gulp.task(task.define('mixin-server-watch', () => mixinServer(true)));
gulp.task('vscode-reh-linux-arm-min', noop);

View File

@@ -4,15 +4,12 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const util = require('./lib/util');
const tsfmt = require('typescript-formatter');
const es = require('event-stream');
const filter = require('gulp-filter');
const del = require('del');
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
const path = require('path');
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
@@ -21,117 +18,66 @@ gulp.task('fmt', () => formatStagedFiles());
const formatFiles = (some) => {
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
replace: true,
tsfmt: true,
tslint: true,
tsconfig: true
// verbose: true
}).then(result => {
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
if (result.error) {
console.error(result.message);
}
cb(null, file);
tsfmt.processString(file.path, file.contents.toString('utf8'), {
replace: true,
tsfmt: true,
tslint: true,
tsconfig: true
// verbose: true
}).then(result => {
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
if (result.error) {
console.error(result.message);
}
cb(null, file);
}, err => {
cb(err);
}, err => {
cb(err);
});
});
});
return gulp.src(some, {
base: '.'
})
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(formatting);
return gulp.src(some, { base: '.' })
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(formatting);
};
const formatStagedFiles = () => {
const cp = require('child_process');
cp.exec('git diff --name-only', {
maxBuffer: 2000 * 1024
}, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
cp.exec('git diff --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
});
cp.exec('git diff --cached --name-only', {
maxBuffer: 2000 * 1024
}, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
});
};
function installService() {
let config = require('../extensions/mssql/config.json');
return platformInfo.getCurrent().then(p => {
let runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
var installer = new serviceDownloader(config);
let serviceInstallFolder = installer.getInstallDirectory(runtime);
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
return del(serviceInstallFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
});
}
gulp.task('install-sqltoolsservice', () => {
return installService();
});
function installSsmsMin() {
const config = require('../extensions/admin-tool-ext-win/config.json');
return platformInfo.getCurrent().then(p => {
const runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
var installer = new serviceDownloader(config);
const serviceInstallFolder = installer.getInstallDirectory(runtime);
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
return del(serviceCleanupFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
});
}
gulp.task('install-ssmsmin', () => {
return installSsmsMin();
});
};

View File

@@ -21,6 +21,7 @@ const json = require('gulp-json-editor');
const _ = require('underscore');
const util = require('./lib/util');
const task = require('./lib/task');
const ext = require('./lib/extensions');
const buildfile = require('../src/buildfile');
const common = require('./lib/optimize');
const root = path.dirname(__dirname);
@@ -29,14 +30,19 @@ const packageJson = require('../package.json');
const product = require('../product.json');
const crypto = require('crypto');
const i18n = require('./lib/i18n');
const ext = require('./lib/extensions'); // {{SQL CARBON EDIT}}
// {{SQL CARBON EDIT}}
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
// {{SQL CARBON EDIT}} - End
const deps = require('./dependencies');
const { config } = require('./lib/electron');
const getElectronVersion = require('./lib/electron').getElectronVersion;
const createAsar = require('./lib/asar').createAsar;
const { compileBuildTask } = require('./gulpfile.compile');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
// @ts-ignore
// {{SQL CARBON EDIT}}
var del = require('del');
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
// {{SQL CARBON EDIT}}
@@ -46,21 +52,16 @@ const nodeModules = [
'rxjs/Observable',
'rxjs/Subject',
'rxjs/Observer',
'slickgrid/lib/jquery.event.drag-2.3.0',
'slickgrid/lib/jquery-ui-1.9.2',
'slickgrid/slick.core',
'slickgrid/slick.grid',
'slickgrid/slick.editors',
'slickgrid/slick.dataview']
'ng2-charts']
.concat(Object.keys(product.dependencies || {}))
.concat(_.uniq(productionDependencies.map(d => d.name)))
.concat(baseModules);
// Build
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
buildfile.entrypoint('vs/workbench/workbench.main'),
buildfile.base,
buildfile.workbenchDesktop,
buildfile.workbench,
buildfile.code
]);
@@ -73,12 +74,12 @@ const vscodeResources = [
'out-build/bootstrap-amd.js',
'out-build/bootstrap-window.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,html}',
'out-build/vs/**/*.{svg,png,cur,html}',
'!out-build/vs/code/browser/**/*.html',
'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/languagePacks.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
'out-build/vs/base/browser/ui/codiconLabel/codicon/**',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
@@ -86,8 +87,9 @@ const vscodeResources = [
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/platform/files/**/*.exe',
'out-build/vs/platform/files/**/*.md',
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/code/electron-browser/workbench/**',
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
'out-build/vs/code/electron-browser/issue/issueReporter.js',
@@ -111,37 +113,117 @@ const vscodeResources = [
'out-build/sql/media/objectTypes/*.svg',
'out-build/sql/media/icons/*.svg',
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
'out-build/sql/setup.js',
'!**/test/**'
];
const BUNDLED_FILE_HEADER = [
'/*!--------------------------------------------------------',
' * Copyright (C) Microsoft Corporation. All rights reserved.',
' *--------------------------------------------------------*/'
].join('\n');
const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
util.rimraf('out-vscode'),
task.parallel(
util.rimraf('out-vscode'),
compileBuildTask
),
common.optimizeTask({
src: 'out-build',
entryPoints: vscodeEntryPoints,
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER,
out: 'out-vscode',
inlineAmdImages: true,
bundleInfo: undefined
})
));
gulp.task(optimizeVSCodeTask);
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
const minifyVSCodeTask = task.define('minify-vscode', task.series(
const optimizeIndexJSTask = task.define('optimize-index-js', task.series(
optimizeVSCodeTask,
util.rimraf('out-vscode-min'),
() => {
const fullpath = path.join(process.cwd(), 'out-vscode/bootstrap-window.js');
const contents = fs.readFileSync(fullpath).toString();
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
fs.writeFileSync(fullpath, newContents);
},
}
));
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
const minifyVSCodeTask = task.define('minify-vscode', task.series(
task.parallel(
util.rimraf('out-vscode-min'),
optimizeIndexJSTask
),
common.minifyTask('out-vscode', `${sourceMappingURLBase}/core`)
));
gulp.task(minifyVSCodeTask);
// Package
// @ts-ignore JSON checking: darwinCredits is optional
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
// @ts-ignore JSON checking: electronRepository is optional
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return gulp.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
/**
* Compute checksums for some files.
@@ -187,25 +269,28 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const out = sourceFolderName;
const checksums = computeChecksums(out, [
'vs/workbench/workbench.desktop.main.js',
'vs/workbench/workbench.desktop.main.css',
'vs/workbench/workbench.main.js',
'vs/workbench/workbench.main.css',
'vs/code/electron-browser/workbench/workbench.html',
'vs/code/electron-browser/workbench/workbench.js'
]);
const src = gulp.src(out + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
.pipe(util.setExecutableBit(['**/*.sh']));
.pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
const root = path.resolve(path.join(__dirname, '..'));
// {{SQL CARBON EDIT}}
ext.packageBuiltInExtensions();
const extensions = gulp.src(['.build/extensions/**', '!.build/extensions/node_modules/**'], { base: '.build', dot: true }); // {{SQL CARBON EDIT}} - don't package the node_modules directory
const sources = es.merge(src, extensions)
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
const sources = es.merge(src, ext.packageExtensionsStream({
sourceMappingURLBase: sourceMappingURLBase
}));
let version = packageJson.version;
// @ts-ignore JSON checking: quality is optional
const quality = product.quality;
if (quality && quality !== 'stable') {
@@ -242,24 +327,41 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const dataApi = gulp.src('src/sql/azdata.d.ts').pipe(rename('out/sql/azdata.d.ts'));
const sqlopsAPI = gulp.src('src/sql/sqlops.d.ts').pipe(rename('out/sql/sqlops.d.ts'));
const telemetry = gulp.src('.build/telemetry/**', { base: '.build/telemetry', dot: true });
const depsSrc = [
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
// @ts-ignore JSON checking: dependencies is optional
..._.flatten(Object.keys(product.dependencies || {}).map(d => [`node_modules/${d}/**`, `!node_modules/${d}/**/{test,tests}/**`]))
];
const root = path.resolve(path.join(__dirname, '..'));
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
const deps = gulp.src(dependenciesSrc, { base: '.', dot: true })
const deps = gulp.src(depsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
// {{SQL CARBON EDIT}}
let copiedModules = gulp.src([
'node_modules/jquery/**/*.*',
'node_modules/reflect-metadata/**/*.*',
'node_modules/slickgrid/**/*.*',
'node_modules/underscore/**/*.*',
'node_modules/zone.js/**/*.*',
'node_modules/chart.js/**/*.*',
'node_modules/chartjs-color/**/*.*',
'node_modules/chartjs-color-string/**/*.*',
'node_modules/color-convert/**/*.*',
'node_modules/color-name/**/*.*',
'node_modules/moment/**/*.*'
], { base: '.', dot: true });
let all = es.merge(
packageJsonStream,
packageJsonStream,
productJsonStream,
license,
api,
// {{SQL CARBON EDIT}}
copiedModules,
dataApi,
sqlopsAPI, // {{SQL CARBON EDIT}}
telemetry,
sqlopsAPI,
sources,
deps
);
@@ -283,17 +385,9 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions())
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
if (platform === 'linux') {
result = es.merge(result, gulp.src('resources/completions/bash/code', { base: '.' })
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename(function (f) { f.basename = product.applicationName; })));
result = es.merge(result, gulp.src('resources/completions/zsh/_code', { base: '.' })
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename(function (f) { f.basename = '_' + product.applicationName; })));
}
// result = es.merge(result, gulp.src('resources/completions/**', { base: '.' }));
if (platform === 'win32') {
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32', allowEmpty: true }));
@@ -308,7 +402,6 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
.pipe(replace('@@VERSION@@', version))
.pipe(replace('@@COMMIT@@', commit))
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(replace('@@DATAFOLDER@@', product.dataFolderName))
.pipe(replace('@@QUALITY@@', quality))
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
@@ -355,17 +448,12 @@ BUILD_TARGETS.forEach(buildTarget => {
const sourceFolderName = `out-vscode${dashed(minified)}`;
const destinationFolderName = `azuredatastudio${dashed(platform)}${dashed(arch)}`;
const vscodeTaskCI = task.define(`vscode${dashed(platform)}${dashed(arch)}${dashed(minified)}-ci`, task.series(
util.rimraf(path.join(buildRoot, destinationFolderName)),
packageTask(platform, arch, sourceFolderName, destinationFolderName, opts)
));
gulp.task(vscodeTaskCI);
const vscodeTask = task.define(`vscode${dashed(platform)}${dashed(arch)}${dashed(minified)}`, task.series(
compileBuildTask,
compileExtensionsBuildTask,
minified ? minifyVSCodeTask : optimizeVSCodeTask,
vscodeTaskCI
task.parallel(
minified ? minifyVSCodeTask : optimizeVSCodeTask,
util.rimraf(path.join(buildRoot, destinationFolderName))
),
packageTask(platform, arch, sourceFolderName, destinationFolderName, opts)
));
gulp.task(vscodeTask);
});
@@ -395,12 +483,10 @@ const apiToken = process.env.TRANSIFEX_API_TOKEN;
gulp.task(task.define(
'vscode-translations-push',
task.series(
compileBuildTask,
compileExtensionsBuildTask,
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = '.build/extensions/*';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
@@ -416,12 +502,10 @@ gulp.task(task.define(
gulp.task(task.define(
'vscode-translations-export',
task.series(
compileBuildTask,
compileExtensionsBuildTask,
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = '.build/extensions/*';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
@@ -454,6 +538,32 @@ gulp.task('vscode-translations-import', function () {
// {{SQL CARBON EDIT}} - End
});
// Sourcemaps
gulp.task('upload-vscode-sourcemaps', () => {
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
.pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
const extensionsOut = gulp.src('extensions/**/out/**/*.map', { base: '.' });
const extensionsDist = gulp.src('extensions/**/dist/**/*.map', { base: '.' });
return es.merge(vs, extensionsOut, extensionsDist)
.pipe(es.through(function (data) {
// debug
console.log('Uploading Sourcemap', data.relative);
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'sourcemaps',
prefix: commit + '/'
}));
});
// This task is only run for the MacOS build
const generateVSCodeConfigurationTask = task.define('generate-vscode-configuration', () => {
return new Promise((resolve, reject) => {
@@ -546,3 +656,51 @@ function getSettingsSearchBuildId(packageJson) {
throw new Error('Could not determine build number: ' + e.toString());
}
}
// {{SQL CARBON EDIT}}
// Install service locally before building carbon
function installService() {
let config = require('../extensions/mssql/src/config.json');
return platformInfo.getCurrent().then(p => {
let runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
var installer = new serviceDownloader(config);
let serviceInstallFolder = installer.getInstallDirectory(runtime);
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
return del(serviceInstallFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
});
}
gulp.task('install-sqltoolsservice', () => {
return installService();
});
function installSsmsMin() {
const config = require('../extensions/admin-tool-ext-win/src/config.json');
return platformInfo.getCurrent().then(p => {
const runtime = p.runtimeId;
// fix path since it won't be correct
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
var installer = new serviceDownloader(config);
const serviceInstallFolder = installer.getInstallDirectory(runtime);
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
return del(serviceCleanupFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
return installer.installService(runtime);
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
});
}
gulp.task('install-ssmsmin', () => {
return installSsmsMin();
});

View File

@@ -23,7 +23,7 @@ const commit = util.getVersion(root);
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
function getDebPackageArch(arch) {
return { x64: 'amd64', arm: 'armhf', arm64: "arm64" }[arch];
return { x64: 'amd64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
}
function prepareDebPackage(arch) {
@@ -43,8 +43,7 @@ function prepareDebPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
@@ -56,13 +55,11 @@ function prepareDebPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
const bash_completion = gulp.src('resources/completions/bash/code')
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename('usr/share/bash-completion/completions/' + product.applicationName));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('usr/share/bash-completion/completions/code'));
const zsh_completion = gulp.src('resources/completions/zsh/_code')
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename('usr/share/zsh/vendor-completions/_' + product.applicationName));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('usr/share/zsh/vendor-completions/_code'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -98,7 +95,7 @@ function prepareDebPackage(arch) {
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst'));
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, bash_completion, zsh_completion, code);
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, /* bash_completion, zsh_completion, */ code);
return all.pipe(vfs.dest(destination));
};
@@ -118,7 +115,7 @@ function getRpmBuildPath(rpmArch) {
}
function getRpmPackageArch(arch) {
return { x64: 'x86_64', arm: 'armhf', arm64: "arm64" }[arch];
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
}
function prepareRpmPackage(arch) {
@@ -137,7 +134,6 @@ function prepareRpmPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
@@ -150,13 +146,11 @@ function prepareRpmPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
const bash_completion = gulp.src('resources/completions/bash/code')
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename('BUILD/usr/share/bash-completion/completions/' + product.applicationName));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('BUILD/usr/share/bash-completion/completions/code'));
const zsh_completion = gulp.src('resources/completions/zsh/_code')
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename('BUILD/usr/share/zsh/site-functions/_' + product.applicationName));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('BUILD/usr/share/zsh/site-functions/_code'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -179,7 +173,7 @@ function prepareRpmPackage(arch) {
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
const all = es.merge(code, desktops, appdata, icon, bash_completion, zsh_completion, spec, specIcon);
const all = es.merge(code, desktops, appdata, icon, /* bash_completion, zsh_completion, */ spec, specIcon);
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
};
@@ -208,25 +202,21 @@ function prepareSnapPackage(arch) {
const destination = getSnapBuildPath(arch);
return function () {
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename(`snap/gui/${product.applicationName}.desktop`));
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename(`snap/gui/${product.applicationName}-url-handler.desktop`));
.pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `${product.applicationName} --force-user-env`))
.pipe(replace('@@ICON@@', `\${SNAP}/meta/gui/${product.linuxIconName}.png`))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
// An icon that is placed in snap/gui will be placed into meta/gui verbatim.
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename(`snap/gui/${product.linuxIconName}.png`));
.pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
@@ -247,11 +237,11 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch);
// Default target for snapcraft runs: pull, build, stage and prime, and finally assembles the snap.
return shell.task(`cd ${snapBuildPath} && snapcraft`);
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
}
const BUILD_TARGETS = [
{ arch: 'ia32' },
{ arch: 'x64' },
{ arch: 'arm' },
{ arch: 'arm64' },

View File

@@ -1,16 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const noop = () => { return Promise.resolve(); };
gulp.task('minify-vscode-web', noop);
gulp.task('vscode-web', noop);
gulp.task('vscode-web-min', noop);
gulp.task('vscode-web-ci', noop);
gulp.task('vscode-web-min-ci', noop);

View File

@@ -26,8 +26,8 @@ const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive')
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
// const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
function packageInnoSetup(iss, options, cb) {
options = options || {};
@@ -49,8 +49,9 @@ function packageInnoSetup(iss, options, cb) {
const defs = keys.map(key => `/d${key}=${definitions[key]}`);
const args = [
iss,
...defs,
`/sesrp=powershell.exe -ExecutionPolicy bypass ${signPS1} $f`
...defs
//,
//`/sesrp=powershell.exe -ExecutionPolicy bypass ${signPS1} $f`
];
cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] })
@@ -136,17 +137,12 @@ function copyInnoUpdater(arch) {
};
}
function updateIcon(executablePath) {
function patchInnoUpdater(arch) {
return cb => {
const icon = path.join(repoPath, 'resources', 'win32', 'code.ico');
rcedit(executablePath, { icon }, cb);
rcedit(path.join(buildPath(arch), 'tools', 'inno_updater.exe'), { icon }, cb);
};
}
gulp.task(task.define('vscode-win32-ia32-inno-updater', task.series(copyInnoUpdater('ia32'), updateIcon(path.join(buildPath('ia32'), 'tools', 'inno_updater.exe')))));
gulp.task(task.define('vscode-win32-x64-inno-updater', task.series(copyInnoUpdater('x64'), updateIcon(path.join(buildPath('x64'), 'tools', 'inno_updater.exe')))));
// CodeHelper.exe icon
gulp.task(task.define('vscode-win32-ia32-code-helper', task.series(updateIcon(path.join(buildPath('ia32'), 'resources', 'app', 'out', 'vs', 'platform', 'files', 'node', 'watcher', 'win32', 'CodeHelper.exe')))));
gulp.task(task.define('vscode-win32-x64-code-helper', task.series(updateIcon(path.join(buildPath('x64'), 'resources', 'app', 'out', 'vs', 'platform', 'files', 'node', 'watcher', 'win32', 'CodeHelper.exe')))));
gulp.task(task.define('vscode-win32-ia32-inno-updater', task.series(copyInnoUpdater('ia32'), patchInnoUpdater('ia32'))));
gulp.task(task.define('vscode-win32-x64-inno-updater', task.series(copyInnoUpdater('x64'), patchInnoUpdater('x64'))));

View File

@@ -18,10 +18,7 @@ const fancyLog = require('fancy-log');
const ansiColors = require('ansi-colors');
const root = path.dirname(path.dirname(__dirname));
// {{SQL CARBON EDIT}}
const quality = process.env['VSCODE_QUALITY'];
const builtInExtensions = quality && quality === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
// {{SQL CARBON EDIT}} - END
const builtInExtensions = require('../builtInExtensions.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
function getExtensionPath(extension) {

View File

@@ -11,6 +11,7 @@ const bom = require("gulp-bom");
const sourcemaps = require("gulp-sourcemaps");
const tsb = require("gulp-tsb");
const path = require("path");
const _ = require("underscore");
const monacodts = require("../monaco/api");
const nls = require("./nls");
const reporter_1 = require("./reporter");
@@ -21,7 +22,14 @@ const watch = require('./watch');
const reporter = reporter_1.createReporter();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
let options = {};
const tsconfig = require(`../../${src}/tsconfig.json`);
let options;
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
}
else {
options = tsconfig.compilerOptions;
}
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -30,14 +38,15 @@ function getTypeScriptCompilerOptions(src) {
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
}
function createCompile(src, build, emitError) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
function pipeline(token) {
const opts = _.clone(getTypeScriptCompilerOptions(src));
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
return function (token) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
@@ -48,28 +57,30 @@ function createCompile(src, build, emitError) {
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(compilation(token))
.pipe(ts(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: overrideOptions.sourceRoot
sourceRoot: opts.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
};
return pipeline;
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
function compileTask(src, out, build) {
return function () {
const compile = createCompile(src, build, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
@@ -84,8 +95,8 @@ exports.compileTask = compileTask;
function watchTask(out, build) {
return function () {
const compile = createCompile('src', build);
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true);
generator.execute();
return watchSrc
@@ -101,6 +112,7 @@ class MonacoGenerator {
this._executeSoonTimer = null;
this._isWatch = isWatch;
this.stream = es.through();
this._watchers = [];
this._watchedFiles = {};
let onWillReadFile = (moduleId, filePath) => {
if (!this._isWatch) {
@@ -110,10 +122,26 @@ class MonacoGenerator {
return;
}
this._watchedFiles[filePath] = true;
fs.watchFile(filePath, () => {
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
readFileSync(moduleId, filePath) {
@@ -123,9 +151,11 @@ class MonacoGenerator {
};
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
fs.watchFile(monacodts.RECIPE_PATH, () => {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
}
}
_executeSoon() {
@@ -138,6 +168,9 @@ class MonacoGenerator {
this.execute();
}, 20);
}
dispose() {
this._watchers.forEach(watcher => watcher.close());
}
_run() {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {

View File

@@ -12,21 +12,27 @@ import * as bom from 'gulp-bom';
import * as sourcemaps from 'gulp-sourcemaps';
import * as tsb from 'gulp-tsb';
import * as path from 'path';
import * as _ from 'underscore';
import * as monacodts from '../monaco/api';
import * as nls from './nls';
import { createReporter } from './reporter';
import * as util from './util';
import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
import ts = require('typescript');
const watch = require('./watch');
const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
function getTypeScriptCompilerOptions(src: string) {
const rootDir = path.join(__dirname, `../../${src}`);
let options: ts.CompilerOptions = {};
const tsconfig = require(`../../${src}/tsconfig.json`);
let options: { [key: string]: any };
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
} else {
options = tsconfig.compilerOptions;
}
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -35,17 +41,18 @@ function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
}
function createCompile(src: string, build: boolean, emitError?: boolean) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(getTypeScriptCompilerOptions(src));
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
function pipeline(token?: util.ICancellationToken) {
return function (token?: util.ICancellationToken) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
@@ -58,31 +65,39 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(compilation(token))
.pipe(ts(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: overrideOptions.sourceRoot
sourceRoot: opts.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
};
return pipeline;
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile(src, build, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
const srcPipe = es.merge(
gulp.src(`${src}/**`, { base: `${src}` }),
gulp.src(typesDts),
);
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
@@ -100,8 +115,11 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
return function () {
const compile = createCompile('src', build);
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src(typesDts),
);
const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true);
generator.execute();
@@ -119,6 +137,7 @@ class MonacoGenerator {
private readonly _isWatch: boolean;
public readonly stream: NodeJS.ReadWriteStream;
private readonly _watchers: fs.FSWatcher[];
private readonly _watchedFiles: { [filePath: string]: boolean; };
private readonly _fsProvider: monacodts.FSProvider;
private readonly _declarationResolver: monacodts.DeclarationResolver;
@@ -126,6 +145,7 @@ class MonacoGenerator {
constructor(isWatch: boolean) {
this._isWatch = isWatch;
this.stream = es.through();
this._watchers = [];
this._watchedFiles = {};
let onWillReadFile = (moduleId: string, filePath: string) => {
if (!this._isWatch) {
@@ -136,10 +156,26 @@ class MonacoGenerator {
}
this._watchedFiles[filePath] = true;
fs.watchFile(filePath, () => {
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
public readFileSync(moduleId: string, filePath: string): Buffer {
@@ -150,9 +186,11 @@ class MonacoGenerator {
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
fs.watchFile(monacodts.RECIPE_PATH, () => {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
}
}
@@ -168,6 +206,10 @@ class MonacoGenerator {
}, 20);
}
public dispose(): void {
this._watchers.forEach(watcher => watcher.close());
}
private _run(): monacodts.IMonacoDeclarationResult | null {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {

View File

@@ -2,88 +2,29 @@
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const vfs = require("vinyl-fs");
const filter = require("gulp-filter");
const json = require("gulp-json-editor");
const _ = require("underscore");
const util = require("./util");
const electron = require('gulp-atom-electron');
const fs = require('fs');
const path = require('path');
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
exports.getElectronVersion = getElectronVersion;
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
exports.config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, exports.config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch) {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
// @ts-ignore
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron
// @ts-ignore
if (require.main === module) {
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
const version = getElectronVersion();
const versionFile = path.join(root, '.build', 'electron', 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `v${version}`;
process.exit(isUpToDate ? 0 : 1);
}

View File

@@ -1,100 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as vfs from 'vinyl-fs';
import * as filter from 'gulp-filter';
import * as json from 'gulp-json-editor';
import * as _ from 'underscore';
import * as util from './util';
const electron = require('gulp-atom-electron');
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
export function getElectronVersion(): string {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)![1];
return target;
}
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions: string[], icon: string) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
export const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch): Promise<void> {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
}
if (require.main === module) {
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -13,7 +13,7 @@ const File = require("vinyl");
const vsce = require("vsce");
const stats_1 = require("./stats");
const util2 = require("./util");
const remote = require("gulp-remote-retry-src");
const remote = require("gulp-remote-src");
const vzip = require('gulp-vinyl-zip');
const filter = require("gulp-filter");
const rename = require("gulp-rename");
@@ -23,217 +23,13 @@ const buffer = require('gulp-buffer');
const json = require("gulp-json-editor");
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
const util = require('./util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
function fromLocal(extensionPath) {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
const input = fs.existsSync(webpackFilename)
? fromLocalWebpack(extensionPath)
: fromLocalNormal(extensionPath);
const tmLanguageJsonFilter = filter('**/*.tmLanguage.json', { restore: true });
return input
.pipe(tmLanguageJsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f) => {
f.contents = Buffer.from(JSON.stringify(JSON.parse(f.contents.toString('utf8'))));
return f;
}))
.pipe(tmLanguageJsonFilter.restore);
}
function fromLocalWebpack(extensionPath) {
const result = es.through();
const packagedDependencies = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = glob.sync(path.join(extensionPath, '/**/extension.webpack.config.js'), { ignore: ['**/node_modules'] });
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => {
const webpackDone = (err, stats) => {
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = Object.assign(Object.assign({}, require(webpackConfigPath)), { mode: 'production' });
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
this.emit('data', data);
}));
});
es.merge(...webpackStreams, patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath) {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
};
function fromMarketplace(extensionName, version, metadata) {
// {{SQL CARBON EDIT}}
const [, name] = extensionName.split('.');
const url = `https://sqlopsextensions.blob.core.windows.net/extensions/${name}/${name}-${version}.vsix`;
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: url,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
const packageJsonFilter = filter('package.json', { restore: true });
return remote('', options)
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}
exports.fromMarketplace = fromMarketplace;
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'vscode-test-resolver',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
// {{SQL CARBON EDIT}}
'integration-tests'
];
const root = path.resolve(path.join(__dirname, '..', '..'));
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'admin-tool-ext-win',
'agent',
'import',
'profiler',
'admin-pack',
'dacpac',
'schema-compare',
'cms',
'query-history',
'liveshare'
];
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
// {{SQL CARBON EDIT}} - End
function packageLocalExtensionsStream() {
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
const localExtensions = localExtensionDescriptions.map(extension => {
return fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(nodeModules, ...localExtensions)
.pipe(util2.setExecutableBit(['**/*.sh']));
}
exports.packageLocalExtensionsStream = packageLocalExtensionsStream;
function packageMarketplaceExtensionsStream() {
const extensions = builtInExtensions.map(extension => {
return fromMarketplace(extension.name, extension.version, extension.metadata)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(extensions)
.pipe(util2.setExecutableBit(['**/*.sh']));
}
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
const _ = require("underscore");
const vfs = require("vinyl-fs");
const deps = require('../dependencies');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
@@ -296,3 +92,242 @@ function packageExtensionTask(extensionName, platform, arch) {
}
exports.packageExtensionTask = packageExtensionTask;
// {{SQL CARBON EDIT}} - End
function fromLocal(extensionPath, sourceMappingURLBase) {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
}
else {
return fromLocalNormal(extensionPath);
}
}
function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
const result = es.through();
const packagedDependencies = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = glob.sync(path.join(extensionPath, '/**/extension.webpack.config.js'), { ignore: ['**/node_modules'] });
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackDone = (err, stats) => {
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath) {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
};
function fromMarketplace(extensionName, version, metadata) {
const [publisher, name] = extensionName.split('.');
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: url,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
const packageJsonFilter = filter('package.json', { restore: true });
return remote('', options)
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}
exports.fromMarketplace = fromMarketplace;
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'vscode-test-resolver',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
// {{SQL CARBON EDIT}}
'integration-tests'
];
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'admin-tool-ext-win',
'agent',
'import',
'profiler',
'admin-pack',
'big-data-cluster',
'dacpac',
'schema-compare',
'resource-deployment',
'cms'
];
const builtInExtensions = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders) {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
}
else {
const fn = streamProviders.shift();
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
return result;
}
function packageExtensionsStream(optsIn) {
const opts = optsIn || {};
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map((d) => path.relative(root, d.path)).map((d) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']));
// Original code commented out here
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
// const marketplaceExtensions = () => es.merge(
// ...builtInExtensions
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
// .map(extension => {
// return fromMarketplace(extension.name, extension.version, extension.metadata)
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
// })
// );
return sequence([localExtensions, localExtensionDependencies,])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
// {{SQL CARBON EDIT}} - End
}
exports.packageExtensionsStream = packageExtensionsStream;

View File

@@ -13,7 +13,7 @@ import * as File from 'vinyl';
import * as vsce from 'vsce';
import { createStatsStream } from './stats';
import * as util2 from './util';
import remote = require('gulp-remote-retry-src');
import remote = require('gulp-remote-src');
const vzip = require('gulp-vinyl-zip');
import filter = require('gulp-filter');
import rename = require('gulp-rename');
@@ -23,269 +23,15 @@ const buffer = require('gulp-buffer');
import json = require('gulp-json-editor');
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
const util = require('./util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
function fromLocal(extensionPath: string): Stream {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
const input = fs.existsSync(webpackFilename)
? fromLocalWebpack(extensionPath)
: fromLocalNormal(extensionPath);
const tmLanguageJsonFilter = filter('**/*.tmLanguage.json', { restore: true });
return input
.pipe(tmLanguageJsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f: File) => {
f.contents = Buffer.from(JSON.stringify(JSON.parse(f.contents.toString('utf8'))));
return f;
}))
.pipe(tmLanguageJsonFilter.restore);
}
function fromLocalWebpack(extensionPath: string): Stream {
const result = es.through();
const packagedDependencies: string[] = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = (<string[]>glob.sync(
path.join(extensionPath, '/**/extension.webpack.config.js'),
{ ignore: ['**/node_modules'] }
));
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data: any) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => {
const webpackDone = (err: any, stats: any) => {
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = {
...require(webpackConfigPath),
...{ mode: 'production' }
};
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data: File) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
const contents = (<Buffer>data.contents).toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
this.emit('data', data);
}));
});
es.merge(...webpackStreams, patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath: string): Stream {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe(createStatsStream(path.basename(extensionPath)));
}
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
};
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream {
// {{SQL CARBON EDIT}}
const [, name] = extensionName.split('.');
const url = `https://sqlopsextensions.blob.core.windows.net/extensions/${name}/${name}-${version}.vsix`;
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: url,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
const packageJsonFilter = filter('package.json', { restore: true });
return remote('', options)
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(p => p.dirname = p.dirname!.replace(/^extension\/?/, '')))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'vscode-test-resolver',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
// {{SQL CARBON EDIT}}
'integration-tests'
];
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'admin-tool-ext-win',
'agent',
'import',
'profiler',
'admin-pack',
'dacpac',
'schema-compare',
'cms',
'query-history',
'liveshare'
];
interface IBuiltInExtension {
name: string;
version: string;
repo: string;
metadata: any;
}
const builtInExtensions: IBuiltInExtension[] = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
// {{SQL CARBON EDIT}} - End
export function packageLocalExtensionsStream(): NodeJS.ReadWriteStream {
const localExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
const localExtensions = localExtensionDescriptions.map(extension => {
return fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(nodeModules, ...localExtensions)
.pipe(util2.setExecutableBit(['**/*.sh']));
}
export function packageMarketplaceExtensionsStream(): NodeJS.ReadWriteStream {
const extensions = builtInExtensions.map(extension => {
return fromMarketplace(extension.name, extension.version, extension.metadata)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(extensions)
.pipe(util2.setExecutableBit(['**/*.sh']));
}
const root = path.resolve(path.join(__dirname, '..', '..'));
// {{SQL CARBON EDIT}}
import * as _ from 'underscore';
import * as vfs from 'vinyl-fs';
const deps = require('../dependencies');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
export function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
@@ -351,3 +97,301 @@ export function packageExtensionTask(extensionName: string, platform: string, ar
};
}
// {{SQL CARBON EDIT}} - End
function fromLocal(extensionPath: string, sourceMappingURLBase?: string): Stream {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
} else {
return fromLocalNormal(extensionPath);
}
}
function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string | undefined): Stream {
const result = es.through();
const packagedDependencies: string[] = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = (<string[]>glob.sync(
path.join(extensionPath, '/**/extension.webpack.config.js'),
{ ignore: ['**/node_modules'] }
));
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data: any) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackDone = (err: any, stats: any) => {
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = {
...require(webpackConfigPath),
...{ mode: 'production' }
};
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data: File) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = (<Buffer>data.contents).toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath: string): Stream {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe(createStatsStream(path.basename(extensionPath)));
}
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
};
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream {
const [publisher, name] = extensionName.split('.');
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: url,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
const packageJsonFilter = filter('package.json', { restore: true });
return remote('', options)
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(p => p.dirname = p.dirname!.replace(/^extension\/?/, '')))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}
interface IPackageExtensionsOptions {
/**
* Set to undefined to package all of them.
*/
desiredExtensions?: string[];
sourceMappingURLBase?: string;
}
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'vscode-test-resolver',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
// {{SQL CARBON EDIT}}
'integration-tests'
];
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'admin-tool-ext-win',
'agent',
'import',
'profiler',
'admin-pack',
'big-data-cluster',
'dacpac',
'schema-compare',
'resource-deployment',
'cms'
];
// {{SQL CARBON EDIT}} - End
interface IBuiltInExtension {
name: string;
version: string;
repo: string;
metadata: any;
}
const builtInExtensions: IBuiltInExtension[] = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders: { (): Stream }[]): Stream {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
} else {
const fn = streamProviders.shift()!;
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
return result;
}
export function packageExtensionsStream(optsIn?: IPackageExtensionsOptions): NodeJS.ReadWriteStream {
const opts = optsIn || {};
const localExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map((d: any) => path.relative(root, d.path)).map((d: any) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']));
// Original code commented out here
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
// const marketplaceExtensions = () => es.merge(
// ...builtInExtensions
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
// .map(extension => {
// return fromMarketplace(extension.name, extension.version, extension.metadata)
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
// })
// );
return sequence([localExtensions, localExtensionDependencies, /*marketplaceExtensions*/])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
// {{SQL CARBON EDIT}} - End
}

View File

@@ -176,7 +176,6 @@ class XLF {
this.buffer.push(line.toString());
}
}
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
@@ -249,6 +248,7 @@ XLF.parse = function (xlfString) {
});
});
};
exports.XLF = XLF;
class Limiter {
constructor(maxDegreeOfParalellism) {
this.maxDegreeOfParalellism = maxDegreeOfParalellism;
@@ -586,7 +586,7 @@ function createXlfFilesForExtensions() {
}
return _xlf;
}
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
if (file.isBuffer()) {
const buffer = file.contents;
const basename = path.basename(file.path);
@@ -609,7 +609,7 @@ function createXlfFilesForExtensions() {
}
else if (basename === 'nls.metadata.json') {
const json = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@@ -912,8 +912,8 @@ function pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language
_coreAndExtensionResources.push(...json.workbench);
// extensions
let extensionsToLocalize = Object.create(null);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
});
@@ -1086,7 +1086,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];
if (externalExtensionId) {

View File

@@ -38,6 +38,10 @@
"name": "vs/workbench/contrib/codeEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/codeinset",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/callHierarchy",
"project": "vscode-workbench"
@@ -106,10 +110,6 @@
"name": "vs/workbench/contrib/quickopen",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/userData",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/remote",
"project": "vscode-workbench"
@@ -174,10 +174,6 @@
"name": "vs/workbench/contrib/webview",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/customEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/welcome",
"project": "vscode-workbench"
@@ -186,10 +182,6 @@
"name": "vs/workbench/contrib/outline",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/userDataSync",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"
@@ -242,10 +234,6 @@
"name": "vs/workbench/services/keybinding",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/lifecycle",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/mode",
"project": "vscode-workbench"
@@ -271,7 +259,7 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/workspaces",
"name": "vs/workbench/services/workspace",
"project": "vscode-workbench"
},
{
@@ -285,14 +273,6 @@
{
"name": "vs/workbench/services/preferences",
"project": "vscode-preferences"
},
{
"name": "vs/workbench/services/notification",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/userData",
"project": "vscode-workbench"
}
]
}

View File

@@ -25,7 +25,7 @@ function log(message: any, ...rest: any[]): void {
export interface Language {
id: string; // language id, e.g. zh-tw, de
translationId?: string; // language id used in translation tools, e.g. zh-hant, de (optional, if not set, the id is used)
translationId?: string; // language id used in translation tools, e.g zh-hant, de (optional, if not set, the id is used)
folderName?: string; // language specific folder name, e.g. cht, deu (optional, if not set, the id is used)
}
@@ -709,7 +709,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
}
return _xlf;
}
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
if (file.isBuffer()) {
const buffer: Buffer = file.contents as Buffer;
const basename = path.basename(file.path);
@@ -729,7 +729,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
} else if (basename === 'nls.metadata.json') {
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@@ -1053,8 +1053,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
// extensions
let extensionsToLocalize = Object.create(null);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
@@ -1253,7 +1253,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];

View File

@@ -1,15 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const fs = require("fs");
const root = path.dirname(path.dirname(__dirname));
const yarnrcPath = path.join(root, 'remote', '.yarnrc');
const yarnrc = fs.readFileSync(yarnrcPath, 'utf8');
const version = /^target\s+"([^"]+)"$/m.exec(yarnrc)[1];
const node = process.platform === 'win32' ? 'node.exe' : 'node';
const nodePath = path.join(root, '.build', 'node', `v${version}`, `${process.platform}-${process.arch}`, node);
console.log(nodePath);

View File

@@ -1,16 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as path from 'path';
import * as fs from 'fs';
const root = path.dirname(path.dirname(__dirname));
const yarnrcPath = path.join(root, 'remote', '.yarnrc');
const yarnrc = fs.readFileSync(yarnrcPath, 'utf8');
const version = /^target\s+"([^"]+)"$/m.exec(yarnrc)![1];
const node = process.platform === 'win32' ? 'node.exe' : 'node';
const nodePath = path.join(root, '.build', 'node', `v${version}`, `${process.platform}-${process.arch}`, node);
console.log(nodePath);

View File

@@ -5,7 +5,6 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream");
const fs = require("fs");
const gulp = require("gulp");
const concat = require("gulp-concat");
const minifyCSS = require("gulp-cssnano");
@@ -18,7 +17,7 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const path = require("path");
const pump = require("pump");
const terser = require("terser");
const uglifyes = require("uglify-es");
const VinylFile = require("vinyl");
const bundle = require("./bundle");
const i18n_1 = require("./i18n");
@@ -61,7 +60,7 @@ function loader(src, bundledFileHeader, bundleLoader) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
base: undefined,
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
@@ -97,7 +96,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
}
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : '';
const base = source.path ? root + `/${src}` : undefined;
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
@@ -114,17 +113,12 @@ function toBundleStream(src, bundledFileHeader, bundles) {
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
}));
}
const DEFAULT_FILE_HEADER = [
'/*!--------------------------------------------------------',
' * Copyright (C) Microsoft Corporation. All rights reserved.',
' *--------------------------------------------------------*/'
].join('\n');
function optimizeTask(opts) {
const src = opts.src;
const entryPoints = opts.entryPoints;
const resources = opts.resources;
const loaderConfig = opts.loaderConfig;
const bundledFileHeader = opts.header || DEFAULT_FILE_HEADER;
const bundledFileHeader = opts.header;
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
const out = opts.out;
return function () {
@@ -135,14 +129,6 @@ function optimizeTask(opts) {
if (err || !result) {
return bundlesStream.emit('error', JSON.stringify(err));
}
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
}
catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
const filteredResources = resources.slice();
@@ -178,39 +164,6 @@ function optimizeTask(opts) {
};
}
exports.optimizeTask = optimizeTask;
function inlineAmdImages(src, result) {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
@@ -241,7 +194,7 @@ function uglifyWithCopyrights() {
return false;
};
};
const minify = composer(terser);
const minify = composer(uglifyes);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {

View File

@@ -6,7 +6,6 @@
'use strict';
import * as es from 'event-stream';
import * as fs from 'fs';
import * as gulp from 'gulp';
import * as concat from 'gulp-concat';
import * as minifyCSS from 'gulp-cssnano';
@@ -20,7 +19,7 @@ import * as ansiColors from 'ansi-colors';
import * as path from 'path';
import * as pump from 'pump';
import * as sm from 'source-map';
import * as terser from 'terser';
import * as uglifyes from 'uglify-es';
import * as VinylFile from 'vinyl';
import * as bundle from './bundle';
import { Language, processNlsFiles } from './i18n';
@@ -75,7 +74,7 @@ function loader(src: string, bundledFileHeader: string, bundleLoader: boolean):
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
base: undefined,
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
@@ -115,7 +114,7 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : '';
const base = source.path ? root + `/${src}` : undefined;
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
@@ -157,15 +156,11 @@ export interface IOptimizeTaskOpts {
/**
* (basically the Copyright treatment)
*/
header?: string;
header: string;
/**
* (emit bundleInfo.json file)
*/
bundleInfo: boolean;
/**
* replace calls to `registerAndGetAmdImageURL` with data uris
*/
inlineAmdImages: boolean;
/**
* (out folder name)
*/
@@ -176,18 +171,12 @@ export interface IOptimizeTaskOpts {
languages?: Language[];
}
const DEFAULT_FILE_HEADER = [
'/*!--------------------------------------------------------',
' * Copyright (C) Microsoft Corporation. All rights reserved.',
' *--------------------------------------------------------*/'
].join('\n');
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
const src = opts.src;
const entryPoints = opts.entryPoints;
const resources = opts.resources;
const loaderConfig = opts.loaderConfig;
const bundledFileHeader = opts.header || DEFAULT_FILE_HEADER;
const bundledFileHeader = opts.header;
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
const out = opts.out;
@@ -199,14 +188,6 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); }
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
} catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
@@ -251,42 +232,6 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
};
}
function inlineAmdImages(src: string, result: bundle.IBundleResult): bundle.IBundleResult {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
declare class FileWithCopyright extends VinylFile {
public __hasOurCopyright: boolean;
}
@@ -324,7 +269,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
};
};
const minify = (composer as any)(terser);
const minify = (composer as any)(uglifyes);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {

View File

@@ -1,103 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const rollup = require("rollup");
const path = require("path");
// getting around stupid import rules
const nodeResolve = require('rollup-plugin-node-resolve');
const commonjs = require('rollup-plugin-commonjs');
async function rollupModule(options) {
const moduleName = options.moduleName;
try {
const inputFile = options.inputFile;
const outputDirectory = options.outputDirectory;
await fs.promises.mkdir(outputDirectory, {
recursive: true
});
const outputFileName = options.outputFileName;
const outputMapName = `${outputFileName}.map`;
const external = options.external || [];
const outputFilePath = path.resolve(outputDirectory, outputFileName);
const outputMapPath = path.resolve(outputDirectory, outputMapName);
const bundle = await rollup.rollup({
input: inputFile,
plugins: [
nodeResolve(),
commonjs(),
],
external,
});
const generatedBundle = await bundle.generate({
name: moduleName,
format: 'umd',
sourcemap: true
});
const result = generatedBundle.output[0];
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
await fs.promises.writeFile(outputFilePath, result.code);
await fs.promises.writeFile(outputMapPath, result.map);
return {
name: moduleName,
result: true
};
}
catch (ex) {
return {
name: moduleName,
result: false,
exception: ex
};
}
}
function rollupAngularSlickgrid(root) {
return new Promise(async (resolve, reject) => {
const result = await rollupModule({
moduleName: 'angular2-slickgrid',
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
outputFileName: 'angular2-slickgrid.umd.js'
});
if (!result.result) {
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
}
resolve();
});
}
exports.rollupAngularSlickgrid = rollupAngularSlickgrid;
function rollupAngular(root) {
return new Promise(async (resolve, reject) => {
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
const tasks = modules.map((module) => {
return rollupModule({
moduleName: `ng.${module}`,
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
outputFileName: `${module}.umd.js`,
external: modules.map(mn => `@angular/${mn}`)
});
});
// array of booleans
const x = await Promise.all(tasks);
const result = x.reduce((prev, current) => {
if (!current.result) {
prev.fails.push(current.name);
prev.exceptions.push(current.exception);
prev.result = false;
}
return prev;
}, {
fails: [],
exceptions: [],
result: true,
});
if (!result.result) {
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
}
resolve();
});
}
exports.rollupAngular = rollupAngular;

View File

@@ -1,125 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as fs from 'fs';
import * as rollup from 'rollup';
import * as path from 'path';
// getting around stupid import rules
const nodeResolve = require('rollup-plugin-node-resolve');
const commonjs = require('rollup-plugin-commonjs');
export interface IRollupOptions {
moduleName: string;
inputFile: string;
outputDirectory: string;
outputFileName: string;
external?: string[];
}
async function rollupModule(options: IRollupOptions) {
const moduleName = options.moduleName;
try {
const inputFile = options.inputFile;
const outputDirectory = options.outputDirectory;
await fs.promises.mkdir(outputDirectory, {
recursive: true
});
const outputFileName = options.outputFileName;
const outputMapName = `${outputFileName}.map`;
const external = options.external || [];
const outputFilePath = path.resolve(outputDirectory, outputFileName);
const outputMapPath = path.resolve(outputDirectory, outputMapName);
const bundle = await rollup.rollup({
input: inputFile,
plugins: [
nodeResolve(),
commonjs(),
],
external,
});
const generatedBundle = await bundle.generate({
name: moduleName,
format: 'umd',
sourcemap: true
});
const result = generatedBundle.output[0];
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
await fs.promises.writeFile(outputFilePath, result.code);
await fs.promises.writeFile(outputMapPath, result.map);
return {
name: moduleName,
result: true
};
} catch (ex) {
return {
name: moduleName,
result: false,
exception: ex
};
}
}
export function rollupAngularSlickgrid(root: string): Promise<void> {
return new Promise(async (resolve, reject) => {
const result = await rollupModule({
moduleName: 'angular2-slickgrid',
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
outputFileName: 'angular2-slickgrid.umd.js'
});
if (!result.result) {
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
}
resolve();
});
}
export function rollupAngular(root: string): Promise<void> {
return new Promise(async (resolve, reject) => {
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
const tasks = modules.map((module) => {
return rollupModule({
moduleName: `ng.${module}`,
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
outputFileName: `${module}.umd.js`,
external: modules.map(mn => `@angular/${mn}`)
});
});
// array of booleans
const x = await Promise.all(tasks);
const result = x.reduce<{ fails: string[]; exceptions: string[]; result: boolean }>((prev, current) => {
if (!current.result) {
prev.fails.push(current.name);
prev.exceptions.push(current.exception);
prev.result = false;
}
return prev;
}, {
fails: [],
exceptions: [],
result: true,
});
if (!result.result) {
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
}
resolve();
});
}

View File

@@ -31,7 +31,6 @@ function extractEditor(options) {
let compilerOptions;
if (tsConfig.extends) {
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
delete tsConfig.extends;
}
else {
compilerOptions = tsConfig.compilerOptions;
@@ -41,9 +40,9 @@ function extractEditor(options) {
compilerOptions.noUnusedLocals = false;
compilerOptions.preserveConstEnums = false;
compilerOptions.declaration = false;
compilerOptions.noImplicitAny = false;
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
options.compilerOptions = compilerOptions;
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
@@ -92,6 +91,8 @@ function extractEditor(options) {
}
delete tsConfig.compilerOptions.moduleResolution;
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
const tsConfigBase = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.base.json')).toString());
writeOutputFile('tsconfig.base.json', JSON.stringify(tsConfigBase, null, '\t'));
[
'vs/css.build.js',
'vs/css.d.ts',
@@ -130,7 +131,7 @@ function createESMSourcesAndResources2(options) {
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
continue;
}
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
@@ -250,37 +251,35 @@ function transportCSS(module, enqueue, write) {
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
const inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents, forceBase64) {
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
return _replaceURL(contents, (url) => {
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
if (fontMatch) {
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
const fontPath = path.join(path.dirname(module), relativeFontPath);
enqueue(fontPath);
return relativeFontPath;
}
const imagePath = path.join(path.dirname(module), url);
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
return '"data:' + MIME + DATA + '"';
enqueue(imagePath);
return url;
});
}
function _replaceURL(contents, replacer) {

View File

@@ -35,7 +35,6 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
let compilerOptions: { [key: string]: any };
if (tsConfig.extends) {
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
delete tsConfig.extends;
} else {
compilerOptions = tsConfig.compilerOptions;
}
@@ -45,13 +44,12 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
compilerOptions.noUnusedLocals = false;
compilerOptions.preserveConstEnums = false;
compilerOptions.declaration = false;
compilerOptions.noImplicitAny = false;
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
options.compilerOptions = compilerOptions;
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {
@@ -102,6 +100,8 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
delete tsConfig.compilerOptions.moduleResolution;
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
const tsConfigBase = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.base.json')).toString());
writeOutputFile('tsconfig.base.json', JSON.stringify(tsConfigBase, null, '\t'));
[
'vs/css.build.js',
@@ -154,7 +154,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
continue;
}
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
@@ -290,41 +290,40 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean): string {
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean, inlineByteLimit: number): string {
return _replaceURL(contents, (url) => {
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
if (fontMatch) {
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
const fontPath = path.join(path.dirname(module), relativeFontPath);
enqueue(fontPath);
return relativeFontPath;
}
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const imagePath = path.join(path.dirname(module), url);
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
return '"data:' + MIME + DATA + '"';
enqueue(imagePath);
return url;
});
}

Some files were not shown because too many files have changed in this diff Show More