Compare commits

..

8 Commits

Author SHA1 Message Date
Karl Burtram
5fdb967ffc Enable visualizer recommendaton prompt in stable build (#6737) 2019-08-13 18:03:59 -07:00
Charles Gagnon
4a911b60f7 Bump required azdata engine version for schema compare (#6717) 2019-08-13 15:42:51 -07:00
Cory Rivera
3cc849a999 Correctly set focus on new Notebook cells after clicking on inline buttons. (#6708) 2019-08-13 15:41:41 -07:00
Alan Ren
1a5469b7a1 revert the icon (#6733) 2019-08-13 15:41:15 -07:00
Anthony Dresser
c042aacd17 add sanity check when adding tables to the results (#6714) 2019-08-13 15:40:53 -07:00
Charles Gagnon
9bd007a555 Fix resultSerializer to correctly check returned file picker path (#6713) 2019-08-13 15:40:20 -07:00
Kim Santiago
75b9ab643d fix default connection not always being correct (#6711) 2019-08-13 15:35:28 -07:00
Lucy Zhang
c4d3a90266 Fix Books Unit Tests (#6702)
* wait for all toc.yml to be found

* add event to signal all toc.yml files read

* add workspae folder parameter back

* remove toc filter
2019-08-13 15:34:48 -07:00
3462 changed files with 156811 additions and 186281 deletions

View File

@@ -1,34 +1,49 @@
{ {
perform: true, perform: false,
alwaysRequireAssignee: false, alwaysRequireAssignee: false,
labelsRequiringAssignee: [], labelsRequiringAssignee: [],
autoAssignees: { autoAssignees: {
Area - Acquisition: [], accessibility: [],
Area - Azure: [], acquisition: [],
Area - Backup\Restore: [], agent: [],
Area - Charting\Insights: [], azure: [],
Area - Connection: [], backup: [],
Area - DacFX: [], bcdr: [],
Area - Dashboard: [], 'chart viewer': [],
Area - Data Explorer: [], connection: [],
Area - Edit Data: [], dacfx: [],
Area - Extensibility: [], dashboard: [],
Area - External Table: [], 'data explorer': [],
Area - Fundamentals: [], documentation: [],
Area - Language Service: [], 'edit data': [],
Area - Localization: [], export: [],
Area - Notebooks: [], extensibility: [],
Area - Performance: [], extensionManager: [],
Area - Query Editor: [ anthonydresser ], globalization: [],
Area - Query Plan: [], grid: [],
Area - Reliability: [], import: [],
Area - Resource Deployment: [], insights: [],
Area - Schema Compare: [], intellisense: [],
Area - Shell: [], localization: [],
Area - SQL Agent: [], 'managed instance': [],
Area - SQL Import: [], notebooks: [],
Area - SQL Profiler: [], 'object explorer': [],
Area - SQL 2019: [], performance: [],
Area - SSMS Integration: [] profiler: [],
'query editor': [],
'query execution': [],
reliability: [],
restore: [],
scripting: [],
'server group': [],
settings: [],
setup: [],
shell: [],
showplan: [],
snippet: [],
sql2019Preview: [],
sqldw: [],
supportability: [],
ux: []
} }
} }

View File

@@ -1,9 +0,0 @@
<!-- Thank you for submitting a Pull Request. Please:
* Read our Pull Request guidelines:
https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests.
* Associate an issue with the Pull Request.
* Ensure that the code is up-to-date with the `master` branch.
* Include a description of the proposed changes and how to test them.
-->
This PR fixes #

View File

@@ -1,118 +0,0 @@
name: CI
on:
push:
branches:
- master
- release/*
pull_request:
branches:
- master
- release/*
jobs:
linux:
runs-on: ubuntu-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
- run: |
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
name: Setup Build Environment
- uses: actions/setup-node@v1
with:
node-version: 10
# TODO: cache node modules
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests
windows:
runs-on: windows-2016
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- uses: actions/setup-python@v1
with:
python-version: '2.x'
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: .\scripts\test.bat --tfs "Unit Tests"
name: Run Unit Tests
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests
darwin:
runs-on: macos-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
# - run: ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests

View File

@@ -1,13 +0,0 @@
name: TSLint Enforcement
on: [pull_request]
jobs:
job:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@v1
- name: TSLint
uses: aaomidi/gh-action-tslint@master
with:
token: ${{ secrets.GITHUB_TOKEN }}
tslint_config: 'tslint-sql.json'

1
.gitignore vendored
View File

@@ -30,4 +30,3 @@ coverage/
test_data/ test_data/
test-results/ test-results/
yarn-error.log yarn-error.log
*.vsix

View File

@@ -1,33 +0,0 @@
/**
* @name No floating promises
* @kind problem
* @problem.severity error
* @id js/experimental/floating-promise
*/
import javascript
private predicate isEscapingPromise(PromiseDefinition promise) {
exists (DataFlow::Node escape | promise.flowsTo(escape) |
escape = any(DataFlow::InvokeNode invk).getAnArgument()
or
escape = any(DataFlow::FunctionNode fun).getAReturn()
or
escape = any(ThrowStmt t).getExpr().flow()
or
escape = any(GlobalVariable v).getAnAssignedExpr().flow()
or
escape = any(DataFlow::PropWrite write).getRhs()
or
exists(WithStmt with, Assignment assign |
with.mayAffect(assign.getLhs()) and
assign.getRhs().flow() = escape
)
)
}
from PromiseDefinition promise
where
not exists(promise.getAMethodCall(any(string m | m = "then" or m = "catch" or m = "finally"))) and
not exists (AwaitExpr e | promise.flowsTo(e.getOperand().flow())) and
not isEscapingPromise(promise)
select promise, "This promise appears to be a floating promise"

View File

@@ -1,6 +0,0 @@
{
"useTabs": true,
"printWidth": 120,
"semi": true,
"singleQuote": true
}

View File

@@ -1,61 +1,23 @@
{ {
"type": "array", "type": "array",
"items": { "items": {
"oneOf": [ "type": "object",
{ "required": [
"type": "object", "name",
"required": [ "licenseDetail"
"name", ],
"prependLicenseText" "properties": {
], "name": {
"properties": { "type": "string",
"name": { "description": "The name of the dependency"
"type": "string",
"description": "The name of the dependency"
},
"fullLicenseText": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
},
"prependLicenseText": {
"type": "array",
"description": "A piece of text to prepend to the auto-detected license text of the dependency",
"items": {
"type": "string"
}
}
}
}, },
{ "licenseDetail": {
"type": "object", "type": "array",
"required": [ "description": "The complete license text of the dependency",
"name", "items": {
"fullLicenseText" "type": "string"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
},
"fullLicenseText": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
},
"prependLicenseText": {
"type": "array",
"description": "A piece of text to prepend to the auto-detected license text of the dependency",
"items": {
"type": "string"
}
}
} }
} }
] }
} }
} }

75
.vscode/launch.json vendored
View File

@@ -16,7 +16,6 @@
"request": "attach", "request": "attach",
"name": "Attach to Extension Host", "name": "Attach to Extension Host",
"port": 5870, "port": 5870,
"timeout": 30000,
"restart": true, "restart": true,
"outFiles": [ "outFiles": [
"${workspaceFolder}/out/**/*.js" "${workspaceFolder}/out/**/*.js"
@@ -68,15 +67,15 @@
"name": "Launch azuredatastudio", "name": "Launch azuredatastudio",
"windows": { "windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat", "runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
"timeout": 45000 "timeout": 20000
}, },
"osx": { "osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh", "runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 45000 "timeout": 20000
}, },
"linux": { "linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh", "runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 45000 "timeout": 20000
}, },
"env": { "env": {
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null "VSCODE_EXTHOST_WILL_SEND_SOCKET": null
@@ -128,33 +127,6 @@
"webRoot": "${workspaceFolder}", "webRoot": "${workspaceFolder}",
"timeout": 45000 "timeout": 45000
}, },
{
"type": "chrome",
"request": "launch",
"name": "Launch ADS (Web) (TBD)",
"runtimeExecutable": "yarn",
"runtimeArgs": [
"web"
],
},
{
"type": "chrome",
"request": "launch",
"name": "Launch ADS (Web, Chrome) (TBD)",
"url": "http://localhost:8080",
"preLaunchTask": "Run web"
},
{
"type": "node",
"request": "launch",
"name": "Git Unit Tests",
"program": "${workspaceFolder}/extensions/git/node_modules/mocha/bin/_mocha",
"stopOnEntry": false,
"cwd": "${workspaceFolder}/extensions/git",
"outFiles": [
"${workspaceFolder}/extensions/git/out/**/*.js"
]
},
{ {
"name": "Launch Built-in Extension", "name": "Launch Built-in Extension",
"type": "extensionHost", "type": "extensionHost",
@@ -193,10 +165,7 @@
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
"outFiles": [ "outFiles": [
"${workspaceFolder}/out/**/*.js" "${workspaceFolder}/out/**/*.js"
], ]
"env": {
"MOCHA_COLORS": "true"
}
}, },
{ {
"type": "chrome", "type": "chrome",
@@ -214,22 +183,6 @@
"webRoot": "${workspaceFolder}", "webRoot": "${workspaceFolder}",
"timeout": 45000 "timeout": 45000
}, },
{
"type": "chrome",
"request": "launch",
"name": "Run Extension Integration Tests",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
},
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
], ],
"compounds": [ "compounds": [
{ {
@@ -246,13 +199,6 @@
"Run Extension Unit Tests" "Run Extension Unit Tests"
] ]
}, },
{
"name": "Debug Extension Integration Tests",
"configurations": [
"Attach to Extension Host",
"Run Extension Integration Tests"
]
},
{ {
"name": "Debug azuredatastudio Main and Renderer", "name": "Debug azuredatastudio Main and Renderer",
"configurations": [ "configurations": [
@@ -261,25 +207,18 @@
] ]
}, },
{ {
"name": "Debug Renderer and search processes", "name": "Search and Renderer processes",
"configurations": [ "configurations": [
"Launch azuredatastudio", "Launch azuredatastudio",
"Attach to Search Process" "Attach to Search Process"
] ]
}, },
{ {
"name": "Debug Renderer and Extension Host processes", "name": "Renderer and Extension Host processes",
"configurations": [ "configurations": [
"Launch azuredatastudio", "Launch azuredatastudio",
"Attach to Extension Host" "Attach to Extension Host"
] ]
},
{
"name": "Attach Renderer and Extension Host",
"configurations": [
"Attach to azuredatastudio",
"Attach to Extension Host"
]
} }
] ]
} }

View File

@@ -60,6 +60,5 @@
"remote.extensionKind": { "remote.extensionKind": {
"msjsdiag.debugger-for-chrome": "workspace" "msjsdiag.debugger-for-chrome": "workspace"
}, },
"gulp.autoDetect": "off",
"files.insertFinalNewline": true "files.insertFinalNewline": true
} }

46
.vscode/tasks.json vendored
View File

@@ -5,10 +5,7 @@
"type": "npm", "type": "npm",
"script": "watch", "script": "watch",
"label": "Build VS Code", "label": "Build VS Code",
"group": { "group": "build",
"kind": "build",
"isDefault": true
},
"isBackground": true, "isBackground": true,
"presentation": { "presentation": {
"reveal": "never" "reveal": "never"
@@ -45,20 +42,6 @@
"applyTo": "allDocuments" "applyTo": "allDocuments"
} }
}, },
{
"type": "npm",
"script": "strict-null-check-watch",
"label": "TS - Strict Null Checks",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-strict-null-checks",
"applyTo": "allDocuments"
}
},
{ {
"type": "gulp", "type": "gulp",
"task": "tslint", "task": "tslint",
@@ -90,33 +73,14 @@
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"type": "npm", "type": "gulp",
"script": "electron", "task": "electron",
"label": "Download electron" "label": "Download electron"
}, },
{ {
"type": "gulp", "type": "gulp",
"task": "hygiene", "task": "hygiene",
"problemMatcher": [] "problemMatcher": []
}, }
{
"type": "shell",
"command": "yarn web -- --no-launch",
"label": "Run web",
"isBackground": true,
// This section to make error go away when launching the debug config
"problemMatcher": {
"pattern": {
"regexp": ""
},
"background": {
"beginsPattern": ".*node .*",
"endsPattern": "Web UI available at .*"
}
},
"presentation": {
"reveal": "never"
}
},
] ]
} }

View File

@@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron" disturl "https://atom.io/download/electron"
target "6.0.12" target "4.2.7"
runtime "electron" runtime "electron"

View File

@@ -1,49 +1,5 @@
# Change Log # Change Log
## Version 1.12.2
* Release date: October 11, 2019
* Release status: General Availability
* Hotfix release (1.12.2): `Disable automatically starting the EH in inspect mode` https://github.com/microsoft/azuredatastudio/commit/c9bef82ace6c67190d0e83820011a2bbd1f793c1
## Version 1.12.1
* Release date: October 7, 2019
* Release status: General Availability
* Hotfix release: `Notebooks: Ensure quotes and backslashes are escaped properly in text editor model` https://github.com/microsoft/azuredatastudio/pull/7540
## Version 1.12.0
* Release date: October 2, 2019
* Release status: General Availability
## What's new in this version
* Announcing the Query History panel
* Improved Query Results Grid copy selection support
* TempDB page added to Server Reports extension
* PowerShell extension update
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/42?closed=1).
## Version 1.11.0
* Release date: September 10, 2019
* Release status: General Availability
## What's new in this version
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/41?closed=1).
## Version 1.10.0
* Release date: August 14, 2019
* Release status: General Availability
## What's new in this version
* [SandDance](https://github.com/microsoft/SandDance) integration — A new way to interact with data. Download the extension [here](https://docs.microsoft.com/sql/azure-data-studio/sanddance-extension)
* Notebook improvements
* Better loading performance
* Ability to right click SQL results grid to save your results as CSV, JSON, etc.
* Buttons to add code or text cells in-line
* [Other fixes and improvements](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+label%3A%22Area%3A+Notebooks%22+milestone%3A%22August+2019+Release%22+is%3Aclosed)
* SQL Server Dacpac extension can support Azure Active Directory authentication
* Updated SQL Server 2019 extension
* Visual Studio Code May Release Merge 1.37 - this includes changes from [1.36](https://code.visualstudio.com/updates/v1_37) and [1.37](https://code.visualstudio.com/updates/v1_37)
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/39?closed=1).
## Version 1.9.0 ## Version 1.9.0
* Release date: July 11, 2019 * Release date: July 11, 2019
* Release status: General Availability * Release status: General Availability
@@ -225,7 +181,7 @@ We would like to thank all our users who raised issues, and in particular the fo
## What's new in this version ## What's new in this version
* Announcing the SQL Server 2019 Preview extension. * Announcing the SQL Server 2019 Preview extension.
* Support for SQL Server 2019 preview features including Big Data Cluster support. * Support for SQL Server 2019 preview features including big data cluster support.
* Azure Data Studio Notebooks * Azure Data Studio Notebooks
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported. * The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
* SQL Server Polybase Create External Table Wizard * SQL Server Polybase Create External Table Wizard

View File

@@ -1,8 +1,7 @@
# Azure Data Studio # Azure Data Studio
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://dev.azure.com/azuredatastudio/azuredatastudio/_apis/build/status/Azure%20Data%20Studio%20CI?branchName=master)](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=master) [![Build Status](https://dev.azure.com/ms/azuredatastudio/_apis/build/status/Microsoft.azuredatastudio)](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=4)
[![Twitter Follow](https://img.shields.io/twitter/follow/azuredatastudio?style=social)](https://twitter.com/azuredatastudio)
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux. Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
@@ -10,13 +9,13 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
Platform | Link Platform | Link
-- | -- -- | --
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2105135 Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2098449
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2105134 Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2098450
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2104938 Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2098500
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2105133 macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2098501
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2105132 Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2098197
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2104937 Linux RPM | https://go.microsoft.com/fwlink/?linkid=2098280
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2105131 Linux DEB | https://go.microsoft.com/fwlink/?linkid=2098279
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions. Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
@@ -69,9 +68,6 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
## Contributions and "Thank You" ## Contributions and "Thank You"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes: We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
* devmattrick for `Update row count as updates are received #6642`
* mottykohn for `In Message panel onclick scroll to line #6417`
* Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480` * Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480`
* yamatoya for `fix the format #4899` * yamatoya for `fix the format #4899`
* GeoffYoung for `Fix sqlDropColumn description #4422` * GeoffYoung for `Fix sqlDropColumn description #4422`

View File

@@ -36,7 +36,6 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jquery-ui: https://github.com/jquery/jquery-ui jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab JupyterLab: https://github.com/jupyterlab/jupyterlab
make-error: https://github.com/JsCommunity/make-error make-error: https://github.com/JsCommunity/make-error
minimist: https://github.com/substack/minimist minimist: https://github.com/substack/minimist
@@ -1176,35 +1175,7 @@ That's all there is to it!
========================================= =========================================
END OF jschardet NOTICES AND INFORMATION END OF jschardet NOTICES AND INFORMATION
%% jupyter-powershell NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2016 Sergei Vorobev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF jupyter-powershell NOTICES AND INFORMATION
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE %% JupyterLab NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2015 Project Jupyter Contributors Copyright (c) 2015 Project Jupyter Contributors
All rights reserved. All rights reserved.

View File

@@ -0,0 +1,66 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '10.15.1'
displayName: 'Install Node.js'
- script: |
npm i -g yarn
displayName: 'preinstall'
- script: |
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
# sh -e /etc/init.d/xvfb start
# sleep 3
displayName: 'Linux preinstall'
condition: eq(variables['Agent.OS'], 'Linux')
- script: |
yarn
displayName: 'Install'
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: 'Run TSLint'
# - script: |
# yarn strict-null-check
# displayName: 'Run Strict Null Check'
- script: |
yarn compile
displayName: 'Compile'
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
displayName: 'Tests'
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
displayName: 'Tests'
condition: and(succeeded(), ne(variables['Agent.OS'], 'Linux'))
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: 'cobertura'
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
condition: ne(variables['Agent.OS'], 'Linux')

View File

@@ -0,0 +1,44 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '10.15.1'
displayName: 'Install Node.js'
- script: |
yarn
displayName: 'Yarn Install'
- script: |
yarn gulp electron-x64
displayName: 'Electron'
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: 'Run TSLint'
# - script: |
# yarn strict-null-check
# displayName: 'Run Strict Null Check'
- script: |
yarn compile
displayName: 'Compile'
- script: |
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
displayName: 'Test'
- task: PublishTestResults@2
inputs:
testResultsFiles: 'test-results.xml'
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: 'cobertura'
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report

View File

@@ -1,18 +1,29 @@
trigger:
- master
- releases/*
jobs: jobs:
- job: Windows
pool:
vmImage: VS2017-Win2016
steps:
- template: build/azure-pipelines/win32/continuous-build-win32.yml
- job: Linux # All tasks on Windows
- job: build_all_windows
displayName: Build all tasks (Windows)
pool: pool:
vmImage: 'Ubuntu-16.04' vmImage: vs2017-win2016
steps: steps:
- template: build/azure-pipelines/linux/continuous-build-linux.yml - template: azure-pipelines-windows.yml
- job: macOS # All tasks on Linux
- job: build_all_linux
displayName: Build all tasks (Linux)
pool: pool:
vmImage: macOS 10.13 vmImage: 'Ubuntu 16.04'
steps: steps:
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml - template: azure-pipelines-linux-mac.yml
# All tasks on macOS
- job: build_all_darwin
displayName: Build all tasks (macOS)
pool:
vmImage: macos-10.13
steps:
- template: azure-pipelines-linux-mac.yml

View File

@@ -1 +1 @@
2019-08-30T20:24:23.714Z 2019-07-11T05:47:05.444Z

View File

@@ -1,9 +0,0 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
# Publish webview contents
PACKAGEJSON="$REPO/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"

View File

@@ -1,87 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { basename, join } from 'path';
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit: string, files: readonly string[]): Promise<void> {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main(): void {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -4,64 +4,46 @@ steps:
versionSpec: "10.15.1" versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock' keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules' targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache vstsFeed: '$(ArtifactFeed)'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- script: | - script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile yarn --frozen-lockfile
displayName: Install Dependencies displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs: inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock' keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules' targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache vstsFeed: '$(ArtifactFeed)'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: | - script: |
yarn electron x64 yarn gulp electron-x64
displayName: Download Electron displayName: Download Electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- script: | - script: |
yarn gulp hygiene --skip-tslint yarn gulp hygiene
displayName: Run Hygiene Checks displayName: Run Hygiene Checks
- script: | - script: |
yarn gulp tslint yarn monaco-compile-check
displayName: Run TSLint Checks displayName: Run Monaco Editor Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-null-check
displayName: Run Strict Null Check.
- script: | # {{SQL CARBON EDIT}} add step
yarn tslint
displayName: Run TSLint (gci)
# - script: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: | - script: |
yarn compile yarn compile
displayName: Compile Sources displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step - script: |
# yarn download-builtin-extensions yarn download-builtin-extensions
# displayName: Download Built-in Extensions displayName: Download Built-in Extensions
- script: | - script: |
./scripts/test.sh --tfs "Unit Tests" ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests displayName: Run Unit Tests
# - script: | {{SQL CARBON EDIT}} remove step - script: |
# ./scripts/test-integration.sh --tfs "Integration Tests" ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests displayName: Run Integration Tests
- task: PublishTestResults@2 - task: PublishTestResults@2
displayName: Publish Tests Results displayName: Publish Tests Results
inputs: inputs:
testResultsFiles: '*-results.xml' testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results' searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed() condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
displayName: 'Component Detection'
inputs:
alertWarningLevel: High
failOnAlert: true

View File

@@ -2,12 +2,11 @@ steps:
- script: | - script: |
mkdir -p .build mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality' keyfile: 'build/.cachesalt, .build/commit'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min' targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode' vstsFeed: 'npm-vscode'
platformIndependent: true platformIndependent: true
@@ -25,7 +24,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets' displayName: 'Azure Key Vault: Get Secrets'
@@ -102,39 +101,11 @@ steps:
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests" ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
# Web Smoke Tests disabled due to https://github.com/microsoft/vscode/issues/80308
# - script: |
# set -e
# cd test/smoke
# yarn compile
# cd -
# yarn smoketest --web --headless
# continueOnError: true
# displayName: Run web smoke tests
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
cd test/smoke
yarn compile
cd -
yarn smoketest --web --headless
continueOnError: true
displayName: Run smoke tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
set -e set -e
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd

View File

@@ -1,36 +0,0 @@
pool:
vmImage: 'Ubuntu-16.04'
trigger: none
pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'vscode-builds-subscription'
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git checkout origin/electron-6.0.x
git merge origin/master
# Push master branch into exploration branch
git push origin HEAD:electron-6.0.x
displayName: Sync & Merge Exploration

View File

@@ -1,39 +0,0 @@
trigger:
branches:
include: ['master']
pr: none
jobs:
- job: ExplorationMerge
pool:
vmImage: Ubuntu-16.04
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- script: |
set -e
cat << EOF > ~/.netrc
machine mssqltools.visualstudio.com
login azuredatastudio
password $(DEVOPS_PASSWORD)
EOF
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
git remote add explore "$ADS_EXPLORE_REPO"
git fetch explore
git checkout -b merge-branch explore/master
git merge origin/master
git push explore HEAD:master
displayName: Sync & Merge Explore
env:
ADS_EXPLORE_REPO: $(ADS_EXPLORE_REPO)
DEVOPS_PASSWORD: $(DEVOPS_PASSWORD)

View File

@@ -2,7 +2,7 @@ steps:
- script: | - script: |
set -e set -e
sudo apt-get update sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults sudo update-rc.d xvfb defaults
@@ -12,64 +12,46 @@ steps:
versionSpec: "10.15.1" versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock' keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules' targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache vstsFeed: '$(ArtifactFeed)'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- script: | - script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile yarn --frozen-lockfile
displayName: Install Dependencies displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs: inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock' keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules' targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache vstsFeed: '$(ArtifactFeed)'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: | - script: |
yarn electron x64 yarn gulp electron-x64
displayName: Download Electron displayName: Download Electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- script: | - script: |
yarn gulp hygiene --skip-tslint yarn gulp hygiene
displayName: Run Hygiene Checks displayName: Run Hygiene Checks
- script: | - script: |
yarn gulp tslint yarn monaco-compile-check
displayName: Run TSLint Checks displayName: Run Monaco Editor Checks
- script: | # {{SQL CARBON EDIT}} add gci checks
yarn tslint
displayName: Run TSLint (gci)
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-null-check
displayName: Run Strict Null Check
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- script: | - script: |
yarn compile yarn compile
displayName: Compile Sources displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step - script: |
# yarn download-builtin-extensions yarn download-builtin-extensions
# displayName: Download Built-in Extensions displayName: Download Built-in Extensions
- script: | - script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests" DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests displayName: Run Unit Tests
# - script: | {{SQL CARBON EDIT}} remove step - script: |
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests displayName: Run Integration Tests
- task: PublishTestResults@2 - task: PublishTestResults@2
displayName: Publish Tests Results displayName: Publish Tests Results
inputs: inputs:
testResultsFiles: '*-results.xml' testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results' searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed() condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
displayName: 'Component Detection'
inputs:
alertWarningLevel: High
failOnAlert: true

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -1,3 +0,0 @@
#!/usr/bin/env bash
set -e
echo 'noop'

View File

@@ -2,12 +2,11 @@ steps:
- script: | - script: |
mkdir -p .build mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality' keyfile: 'build/.cachesalt, .build/commit'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min' targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode' vstsFeed: 'npm-vscode'
platformIndependent: true platformIndependent: true
@@ -25,7 +24,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets' displayName: 'Azure Key Vault: Get Secrets'
@@ -113,4 +112,4 @@ steps:
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 - task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection' displayName: 'Component Detection'
continueOnError: true continueOnError: true

View File

@@ -2,12 +2,11 @@ steps:
- script: | - script: |
mkdir -p .build mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality' keyfile: 'build/.cachesalt, .build/commit'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min' targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode' vstsFeed: 'npm-vscode'
platformIndependent: true platformIndependent: true
@@ -25,7 +24,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets' displayName: 'Azure Key Vault: Get Secrets'
@@ -105,14 +104,7 @@ steps:
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests" DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64" # yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64"
displayName: Run integration tests displayName: Run integration tests

View File

@@ -5,7 +5,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets' displayName: 'Azure Key Vault: Get Secrets'
@@ -51,4 +51,4 @@ steps:
# Publish snap package # Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \ AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \ AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH" node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"

View File

@@ -56,7 +56,7 @@ jobs:
- template: linux/snap-build-linux.yml - template: linux/snap-build-linux.yml
- job: LinuxArmhf - job: LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true')) condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
pool: pool:
vmImage: 'Ubuntu-16.04' vmImage: 'Ubuntu-16.04'
variables: variables:
@@ -66,19 +66,8 @@ jobs:
steps: steps:
- template: linux/product-build-linux-multiarch.yml - template: linux/product-build-linux-multiarch.yml
- job: LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: arm64
dependsOn:
- Compile
steps:
- template: linux/product-build-linux-multiarch.yml
- job: LinuxAlpine - job: LinuxAlpine
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true')) condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
pool: pool:
vmImage: 'Ubuntu-16.04' vmImage: 'Ubuntu-16.04'
variables: variables:
@@ -139,9 +128,6 @@ jobs:
steps: steps:
- template: sync-mooncake.yml - template: sync-mooncake.yml
trigger: none
pr: none
schedules: schedules:
- cron: "0 5 * * Mon-Fri" - cron: "0 5 * * Mon-Fri"
displayName: Mon-Fri at 7:00 displayName: Mon-Fri at 7:00

View File

@@ -2,12 +2,11 @@ steps:
- script: | - script: |
mkdir -p .build mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality' keyfile: 'build/.cachesalt, .build/commit'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min' targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode' vstsFeed: 'npm-vscode'
platformIndependent: true platformIndependent: true
@@ -20,7 +19,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: AzureKeyVault@1 - task: AzureKeyVault@1
@@ -87,10 +86,9 @@ steps:
- script: | - script: |
set -e set -e
yarn gulp hygiene --skip-tslint yarn gulp hygiene
yarn gulp tslint
yarn monaco-compile-check yarn monaco-compile-check
displayName: Run hygiene, tslint and monaco compile checks displayName: Run hygiene checks
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: | - script: |
@@ -99,13 +97,6 @@ steps:
displayName: Extract Telemetry displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: | - script: |
set -e set -e
yarn gulp compile-build yarn gulp compile-build
@@ -125,9 +116,9 @@ steps:
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs: inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality' keyfile: 'build/.cachesalt, .build/commit'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min' targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode' vstsFeed: 'npm-vscode'
platformIndependent: true platformIndependent: true
alias: 'Compilation' alias: 'Compilation'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))

View File

@@ -0,0 +1,36 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cp = require("child_process");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
return false;
}
return true;
}

View File

@@ -13,23 +13,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
if [ "$TAG_VERSION" == "1.999.0" ]; then
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
exit 1
fi
displayName: Check 1.999.0 tag
- bash: | - bash: |
# Install build dependencies # Install build dependencies

View File

@@ -0,0 +1,62 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const cp = require("child_process");
const path = require("path");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
updateDTSFile(outPath, tag);
console.log(`Done updating vscode.d.ts at ${outPath}`);
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath, tag) {
const oldContent = fs.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, tag);
fs.writeFileSync(outPath, newContent);
}
function getNewFileContent(content, tag) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return getNewFileHeader(tag) + content.slice(oldheader.length);
}
function getNewFileHeader(tag) {
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}

View File

@@ -5,7 +5,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets' displayName: 'Azure Key Vault: Get Secrets'

View File

@@ -13,37 +13,19 @@ const util = require('../lib/util');
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root); const commit = util.getVersion(root);
// optionally allow to pass in explicit base/maps to upload function main() {
const [, , base, maps] = process.argv; const vs = vfs.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' }) // client source-maps only
const fetch = function (base, maps = `${base}/**/*.map`) {
return vfs.src(maps, { base })
.pipe(es.mapSync(f => { .pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`; f.path = `${f.base}/core/${f.relative}`;
return f; return f;
})); }));
};
function main() { const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
const sources = [];
// vscode client maps (default) return es.merge(vs, extensionsOut)
if (!base) {
const vs = fetch('out-vscode-min'); // client source-maps only
sources.push(vs);
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
sources.push(extensionsOut);
}
// specific client base/maps
else {
sources.push(fetch(base, maps));
}
return es.merge(...sources)
.pipe(es.through(function (data) { .pipe(es.through(function (data) {
console.log('Uploading Sourcemap', data.relative); // debug // debug
console.log('Uploading Sourcemap', data.relative);
this.emit('data', data); this.emit('data', data);
})) }))
.pipe(azure.upload({ .pipe(azure.upload({
@@ -54,4 +36,4 @@ function main() {
})); }));
} }
main(); main();

View File

@@ -2,12 +2,11 @@ steps:
- script: | - script: |
mkdir -p .build mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality' keyfile: 'build/.cachesalt, .build/commit'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min' targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode' vstsFeed: 'npm-vscode'
platformIndependent: true platformIndependent: true
@@ -25,7 +24,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- task: AzureKeyVault@1 - task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets' displayName: 'Azure Key Vault: Get Secrets'
@@ -88,15 +87,6 @@ steps:
yarn gulp vscode-web-min-ci yarn gulp vscode-web-min-ci
displayName: Build displayName: Build
# upload only the workbench.web.api.js source maps because
# we just compiled these bits in the previous step and the
# general task to upload source maps has already been run
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.api.js.map
displayName: Upload sourcemaps (Web)
- script: | - script: |
set -e set -e
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \ AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \

View File

@@ -2,70 +2,52 @@ steps:
- task: NodeTool@0 - task: NodeTool@0
inputs: inputs:
versionSpec: "10.15.1" versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- task: UsePythonVersion@0 - task: UsePythonVersion@0
inputs: inputs:
versionSpec: '2.x' versionSpec: '2.x'
addToPath: true addToPath: true
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock' keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules' targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache vstsFeed: '$(ArtifactFeed)'
- powershell: | - powershell: |
yarn --frozen-lockfile yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
displayName: Install Dependencies displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs: inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock' keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules' targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache vstsFeed: '$(ArtifactFeed)'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true')) condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: | - powershell: |
yarn electron yarn gulp electron
env: displayName: Download Electron
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token - powershell: |
- script: | yarn gulp hygiene
yarn gulp hygiene --skip-tslint
displayName: Run Hygiene Checks displayName: Run Hygiene Checks
- script: | - powershell: |
yarn gulp tslint yarn monaco-compile-check
displayName: Run TSLint Checks displayName: Run Monaco Editor Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn tslint
displayName: Run TSLint (gci)
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-null-check
displayName: Run Strict Null Check
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- powershell: | - powershell: |
yarn compile yarn compile
displayName: Compile Sources displayName: Compile Sources
# - powershell: | {{SQL CARBON EDIT}} remove step - powershell: |
# yarn download-builtin-extensions yarn download-builtin-extensions
# displayName: Download Built-in Extensions displayName: Download Built-in Extensions
- powershell: | - powershell: |
.\scripts\test.bat --tfs "Unit Tests" .\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests displayName: Run Unit Tests
# - powershell: | {{SQL CARBON EDIT}} remove step - powershell: |
# .\scripts\test-integration.bat --tfs "Integration Tests" .\scripts\test-integration.bat --tfs "Integration Tests"
# displayName: Run Integration Tests displayName: Run Integration Tests
- task: PublishTestResults@2 - task: PublishTestResults@2
displayName: Publish Tests Results displayName: Publish Tests Results
inputs: inputs:
testResultsFiles: '*-results.xml' testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results' searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed() condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
displayName: 'Component Detection'
inputs:
alertWarningLevel: High
failOnAlert: true

View File

@@ -2,12 +2,11 @@ steps:
- powershell: | - powershell: |
mkdir .build -ea 0 mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit "$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
displayName: Prepare cache flag displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1 - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs: inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality' keyfile: 'build/.cachesalt, .build/commit'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min' targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'npm-vscode' vstsFeed: 'npm-vscode'
platformIndependent: true platformIndependent: true
@@ -25,7 +24,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2 - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs: inputs:
versionSpec: "1.x" versionSpec: "1.10.1"
- task: UsePythonVersion@0 - task: UsePythonVersion@0
inputs: inputs:
@@ -107,21 +106,16 @@ steps:
- powershell: | - powershell: |
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
exec { yarn electron $(VSCODE_ARCH) } exec { yarn gulp "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test.bat --build --tfs "Unit Tests" } exec { .\scripts\test.bat --build --tfs "Unit Tests" }
displayName: Run unit tests displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: | - powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
. build/azure-pipelines/win32/exec.ps1 . build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop" $ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)" exec { yarn gulp "electron-$(VSCODE_ARCH)" }
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" }
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false')) condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))

View File

@@ -1,7 +1,7 @@
[ [
{ {
"name": "Microsoft.sqlservernotebook", "name": "Microsoft.sqlservernotebook",
"version": "0.3.2", "version": "0.1.2",
"repo": "https://github.com/Microsoft/azuredatastudio" "repo": "https://github.com/Microsoft/azuredatastudio"
} }
] ]

View File

@@ -1,7 +1,2 @@
[ [
{
"name": "Microsoft.sqlservernotebook",
"version": "0.3.2",
"repo": "https://github.com/Microsoft/azuredatastudio"
}
] ]

View File

@@ -41,7 +41,12 @@ var editorEntryPoints = [
]; ];
var editorResources = [ var editorResources = [
'out-editor-build/vs/base/browser/ui/codiconLabel/**/*.ttf' 'out-build/vs/{base,editor}/**/*.{svg,png}',
'!out-build/vs/base/browser/ui/splitview/**/*',
'!out-build/vs/base/browser/ui/toolbar/**/*',
'!out-build/vs/base/browser/ui/octiconLabel/**/*',
'!out-build/vs/workbench/**',
'!**/test/**'
]; ];
var BUNDLED_FILE_HEADER = [ var BUNDLED_FILE_HEADER = [
@@ -84,6 +89,9 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
`lib.dom.d.ts`, `lib.dom.d.ts`,
`lib.webworker.importscripts.d.ts` `lib.webworker.importscripts.d.ts`
], ],
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
},
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/, importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
destRoot: path.join(root, 'out-editor-src') destRoot: path.join(root, 'out-editor-src')

View File

@@ -21,15 +21,10 @@ const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname); const root = path.dirname(__dirname);
const commit = util.getVersion(root); const commit = util.getVersion(root);
const plumber = require('gulp-plumber'); const plumber = require('gulp-plumber');
const _ = require('underscore');
const ext = require('./lib/extensions'); const ext = require('./lib/extensions');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions'); const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
// {{SQL CARBON EDIT}}
const sqlLocalizedExtensions = [
'dacpac',
'schema-compare'
];
// {{SQL CARBON EDIT}}
const compilations = glob.sync('**/tsconfig.json', { const compilations = glob.sync('**/tsconfig.json', {
cwd: extensionsPath, cwd: extensionsPath,
@@ -42,38 +37,38 @@ const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile); const absolutePath = path.join(extensionsPath, tsconfigFile);
const relativeDirname = path.dirname(tsconfigFile); const relativeDirname = path.dirname(tsconfigFile);
const overrideOptions = {}; const tsconfig = require(absolutePath);
overrideOptions.sourceMap = true; const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
tsOptions.verbose = false;
tsOptions.sourceMap = true;
const name = relativeDirname.replace(/\//g, '-'); const name = relativeDirname.replace(/\//g, '-');
const root = path.join('extensions', relativeDirname); const root = path.join('extensions', relativeDirname);
const srcBase = path.join(root, 'src'); const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**'); const src = path.join(srcBase, '**');
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
const out = path.join(root, 'out'); const out = path.join(root, 'out');
const baseUrl = getBaseUrl(out); const baseUrl = getBaseUrl(out);
let headerId, headerOut; let headerId, headerOut;
let index = relativeDirname.indexOf('/'); let index = relativeDirname.indexOf('/');
if (index < 0) { if (index < 0) {
headerId = 'microsoft.' + relativeDirname; // {{SQL CARBON EDIT}} headerId = 'vscode.' + relativeDirname;
headerOut = 'out'; headerOut = 'out';
} else { } else {
headerId = 'microsoft.' + relativeDirname.substr(0, index); // {{SQL CARBON EDIT}} headerId = 'vscode.' + relativeDirname.substr(0, index);
headerOut = relativeDirname.substr(index + 1) + '/out'; headerOut = relativeDirname.substr(index + 1) + '/out';
} }
function createPipeline(build, emitError) { function createPipeline(build, emitError) {
const reporter = createReporter(); const reporter = createReporter();
overrideOptions.inlineSources = Boolean(build); tsOptions.inlineSources = !!build;
overrideOptions.base = path.dirname(absolutePath); tsOptions.base = path.dirname(absolutePath);
const compilation = tsb.create(absolutePath, overrideOptions, false, err => reporter(err.toString())); const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
const pipeline = function () { return function () {
const input = es.through(); const input = es.through();
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true }); const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
const output = input const output = input
@@ -103,19 +98,15 @@ const tasks = compilations.map(function (tsconfigFile) {
return es.duplex(input, output); return es.duplex(input, output);
}; };
// add src-stream for project files
pipeline.tsProjectSrc = () => {
return compilation.src(srcOpts);
};
return pipeline;
} }
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out)); const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => { const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(sqlLocalizedExtensions.includes(name), true); // {{SQL CARBON EDIT}} const pipeline = createPipeline(false, true);
const input = pipeline.tsProjectSrc(); const input = gulp.src(src, srcOpts);
return input return input
.pipe(pipeline()) .pipe(pipeline())
@@ -124,8 +115,8 @@ const tasks = compilations.map(function (tsconfigFile) {
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => { const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(false); const pipeline = createPipeline(false);
const input = pipeline.tsProjectSrc(); const input = gulp.src(src, srcOpts);
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } }); const watchInput = watcher(src, srcOpts);
return watchInput return watchInput
.pipe(util.incremental(pipeline, input)) .pipe(util.incremental(pipeline, input))
@@ -134,7 +125,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => { const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(true, true); const pipeline = createPipeline(true, true);
const input = pipeline.tsProjectSrc(); const input = gulp.src(src, srcOpts);
return input return input
.pipe(pipeline()) .pipe(pipeline())
@@ -165,8 +156,8 @@ const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimr
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series( const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
cleanExtensionsBuildTask, cleanExtensionsBuildTask,
task.define('bundle-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))), task.define('bundle-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))),
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build'))) task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build'))),
)); ));
gulp.task(compileExtensionsBuildTask); gulp.task(compileExtensionsBuildTask);
exports.compileExtensionsBuildTask = compileExtensionsBuildTask; exports.compileExtensionsBuildTask = compileExtensionsBuildTask;

View File

@@ -17,7 +17,6 @@ const vfs = require('vinyl-fs');
const path = require('path'); const path = require('path');
const fs = require('fs'); const fs = require('fs');
const pall = require('p-all'); const pall = require('p-all');
const task = require('./lib/task');
/** /**
* Hygiene works by creating cascading subsets of all our files and * Hygiene works by creating cascading subsets of all our files and
@@ -51,15 +50,12 @@ const indentationFilter = [
'!src/vs/css.js', '!src/vs/css.js',
'!src/vs/css.build.js', '!src/vs/css.build.js',
'!src/vs/loader.js', '!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js', '!src/vs/base/common/marked/marked.js',
'!src/vs/base/node/terminateProcess.sh', '!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh', '!src/vs/base/node/cpuUsage.sh',
'!test/assert.js', '!test/assert.js',
'!build/testSetup.js',
// except specific folders // except specific folders
'!test/automation/out/**',
'!test/smoke/out/**', '!test/smoke/out/**',
'!extensions/vscode-api-tests/testWorkspace/**', '!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**', '!extensions/vscode-api-tests/testWorkspace2/**',
@@ -68,12 +64,11 @@ const indentationFilter = [
// except multiple specific files // except multiple specific files
'!**/package.json', '!**/package.json',
'!**/package-lock.json', // {{SQL CARBON EDIT}}
'!**/yarn.lock', '!**/yarn.lock',
'!**/yarn-error.log', '!**/yarn-error.log',
// except multiple specific folders // except multiple specific folders
'!**/codicon/**', '!**/octicons/**',
'!**/fixtures/**', '!**/fixtures/**',
'!**/lib/**', '!**/lib/**',
'!extensions/**/out/**', '!extensions/**/out/**',
@@ -103,8 +98,7 @@ const indentationFilter = [
'!extensions/admin-tool-ext-win/ssmsmin/**', '!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**', '!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**', '!extensions/mssql/notebooks/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', '!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts'
]; ];
const copyrightFilter = [ const copyrightFilter = [
@@ -135,15 +129,14 @@ const copyrightFilter = [
'!extensions/html-language-features/server/src/modes/typescript/*', '!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*', '!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts', '!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js',
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts', '!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/hdfs/webhdfs.ts', '!extensions/mssql/src/objectExplorerNodeProvider/webhdfs.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/tableRenderers.ts', '!src/sql/workbench/parts/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/parts/notebook/common/models/url.ts', '!src/sql/workbench/parts/notebook/common/models/url.ts',
'!src/sql/workbench/parts/notebook/browser/models/renderMimeInterfaces.ts', '!src/sql/workbench/parts/notebook/common/models/renderMimeInterfaces.ts',
'!src/sql/workbench/parts/notebook/browser/models/outputProcessor.ts', '!src/sql/workbench/parts/notebook/common/models/outputProcessor.ts',
'!src/sql/workbench/parts/notebook/browser/models/mimemodel.ts', '!src/sql/workbench/parts/notebook/common/models/mimemodel.ts',
'!src/sql/workbench/parts/notebook/browser/cellViews/media/*.css', '!src/sql/workbench/parts/notebook/browser/cellViews/media/*.css',
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts', '!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts', '!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
@@ -164,7 +157,6 @@ const copyrightFilter = [
'!extensions/notebook/src/prompts/**', '!extensions/notebook/src/prompts/**',
'!extensions/mssql/src/prompts/**', '!extensions/mssql/src/prompts/**',
'!extensions/notebook/resources/jupyter_config/**', '!extensions/notebook/resources/jupyter_config/**',
'!extensions/query-history/images/**',
'!**/*.gif', '!**/*.gif',
'!**/*.xlf', '!**/*.xlf',
'!**/*.dacpac', '!**/*.dacpac',
@@ -179,82 +171,36 @@ const eslintFilter = [
'!src/vs/nls.js', '!src/vs/nls.js',
'!src/vs/css.build.js', '!src/vs/css.build.js',
'!src/vs/nls.build.js', '!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js', '!src/**/marked.js',
'!**/test/**' '!**/test/**'
]; ];
const tslintBaseFilter = [ const tslintFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!**/fixtures/**', '!**/fixtures/**',
'!**/typings/**', '!**/typings/**',
'!**/node_modules/**', '!**/node_modules/**',
'!extensions/typescript-basics/test/colorize-fixtures/**', '!extensions/typescript/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**', '!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**', '!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts', '!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts', '!extensions/html-language-features/server/lib/jquery.d.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}}, // {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts' // {{SQL CARBON EDIT}}, '!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
]; ];
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const useStrictFilter = [
'src/**'
];
const sqlFilter = [ const sqlFilter = [
'src/sql/**', 'src/sql/**'
'extensions/**',
// Ignore VS Code extensions
'!extensions/bat/**',
'!extensions/configuration-editing/**',
'!extensions/docker/**',
'!extensions/extension-editing/**',
'!extensions/git/**',
'!extensions/git-ui/**',
'!extensions/image-preview/**',
'!extensions/insights-default/**',
'!extensions/json/**',
'!extensions/json-language-features/**',
'!extensions/markdown-basics/**',
'!extensions/markdown-language-features/**',
'!extensions/merge-conflict/**',
'!extensions/powershell/**',
'!extensions/python/**',
'!extensions/r/**',
'!extensions/theme-*/**',
'!extensions/vscode-*/**',
'!extensions/xml/**',
'!extensions/xml-language-features/**',
'!extensions/yarml/**',
]; ];
const tslintCoreFilter = [ // {{SQL CARBON EDIT}}
'src/**/*.ts',
'test/**/*.ts',
'!extensions/**/*.ts',
'!test/automation/**',
'!test/smoke/**',
...tslintBaseFilter
];
const tslintExtensionsFilter = [
'extensions/**/*.ts',
'!src/**/*.ts',
'!test/**/*.ts',
'test/automation/**/*.ts',
...tslintBaseFilter
];
const tslintHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
...tslintBaseFilter
];
const fileLengthFilter = filter([
'**',
'!extensions/import/*.docx',
'!extensions/admin-tool-ext-win/license/**'
], {restore: true});
const copyrightHeaderLines = [ const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------', '/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.', ' * Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -271,49 +217,14 @@ gulp.task('eslint', () => {
}); });
gulp.task('tslint', () => { gulp.task('tslint', () => {
return es.merge([ const options = { emitError: true };
// Core: include type information (required by certain rules like no-nodejs-globals) return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
vfs.src(all, { base: '.', follow: true, allowEmpty: true }) .pipe(filter(tslintFilter))
.pipe(filter(tslintCoreFilter)) .pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint', program: tslint.Linter.createProgram('src/tsconfig.json') })) .pipe(gulptslint.default.report(options));
.pipe(gulptslint.default.report({ emitError: true })),
// Exenstions: do not include type information
vfs.src(all, { base: '.', follow: true, allowEmpty: true })
.pipe(filter(tslintExtensionsFilter))
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.default.report({ emitError: true }))
]).pipe(es.through());
}); });
function checkPackageJSON(actualPath) {
const actual = require(path.join(__dirname, '..', actualPath));
const rootPackageJSON = require('../package.json');
for (let depName in actual.dependencies) {
const depVersion = actual.dependencies[depName];
const rootDepVersion = rootPackageJSON.dependencies[depName];
if (!rootDepVersion) {
// missing in root is allowed
continue;
}
if (depVersion !== rootDepVersion) {
this.emit('error', `The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`);
}
}
}
const checkPackageJSONTask = task.define('check-package-json', () => {
return gulp.src('package.json')
.pipe(es.through(function() {
checkPackageJSON.call(this, 'remote/package.json');
checkPackageJSON.call(this, 'remote/web/package.json');
}));
});
gulp.task(checkPackageJSONTask);
function hygiene(some) { function hygiene(some) {
let errorCount = 0; let errorCount = 0;
@@ -363,6 +274,23 @@ function hygiene(some) {
this.emit('data', file); this.emit('data', file);
}); });
// {{SQL CARBON EDIT}}
// Check for unnecessary 'use strict' lines. These are automatically added by the alwaysStrict compiler option so don't need to be added manually
const useStrict = es.through(function (file) {
const lines = file.__lines;
// Only take the first 10 lines to reduce false positives- the compiler will throw an error if it's not the first non-comment line in a file
// (10 is used to account for copyright and extraneous newlines)
lines.slice(0, 10).forEach((line, i) => {
if (/\s*'use\s*strict\s*'/.test(line)) {
console.error(file.relative + '(' + (i + 1) + ',1): Unnecessary \'use strict\' - this is already added by the compiler');
errorCount++;
}
});
this.emit('data', file);
});
// {{SQL CARBON EDIT}} END
const formatting = es.map(function (file, cb) { const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), { tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: false, verify: false,
@@ -393,23 +321,6 @@ function hygiene(some) {
}); });
}); });
const filelength = es.through(function (file) {
const fileName = path.basename(file.relative);
const fileDir = path.dirname(file.relative);
//check the filename is < 50 characters (basename gets the filename with extension).
if (fileName.length > 50) {
console.error(`File name '${fileName}' under ${fileDir} is too long. Rename file to have less than 50 characters.`);
errorCount++;
}
if (file.relative.length > 150) {
console.error(`File path ${file.relative} exceeds acceptable file-length. Rename the path to have less than 150 characters.`);
errorCount++;
}
this.emit('data', file);
});
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.'); const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
const tslintOptions = { fix: false, formatter: 'json' }; const tslintOptions = { fix: false, formatter: 'json' };
const tsLinter = new tslint.Linter(tslintOptions); const tsLinter = new tslint.Linter(tslintOptions);
@@ -428,22 +339,20 @@ function hygiene(some) {
input = some; input = some;
} }
// {{SQL CARBON EDIT}} Linting for SQL
const tslintSqlConfiguration = tslint.Configuration.findConfiguration('tslint-sql.json', '.'); const tslintSqlConfiguration = tslint.Configuration.findConfiguration('tslint-sql.json', '.');
const tslintSqlOptions = { fix: false, formatter: 'json' }; const tslintSqlOptions = { fix: false, formatter: 'json' };
const sqlTsLinter = new tslint.Linter(tslintSqlOptions); const sqlTsLinter = new tslint.Linter(tslintSqlOptions);
const sqlTsl = es.through(function (file) { //TODO restore const sqlTsl = es.through(function (file) {
const contents = file.contents.toString('utf8'); const contents = file.contents.toString('utf8');
sqlTsLinter.lint(file.relative, contents, tslintSqlConfiguration.results); sqlTsLinter.lint(file.relative, contents, tslintSqlConfiguration.results);
this.emit('data', file);
}); });
const productJsonFilter = filter('product.json', { restore: true }); const productJsonFilter = filter('product.json', { restore: true });
const result = input const result = input
.pipe(fileLengthFilter)
.pipe(filelength)
.pipe(fileLengthFilter.restore)
.pipe(filter(f => !f.stat.isDirectory())) .pipe(filter(f => !f.stat.isDirectory()))
.pipe(productJsonFilter) .pipe(productJsonFilter)
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson) .pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
@@ -453,16 +362,15 @@ function hygiene(some) {
.pipe(filter(copyrightFilter)) .pipe(filter(copyrightFilter))
.pipe(copyrights); .pipe(copyrights);
let typescript = result const typescript = result
.pipe(filter(tslintHygieneFilter)) .pipe(filter(tslintFilter))
.pipe(formatting); .pipe(formatting)
.pipe(tsl)
if (!process.argv.some(arg => arg === '--skip-tslint')) { // {{SQL CARBON EDIT}}
typescript = typescript.pipe(tsl); .pipe(filter(useStrictFilter))
typescript = typescript .pipe(useStrict)
.pipe(filter(sqlFilter)) // {{SQL CARBON EDIT}} .pipe(filter(sqlFilter))
.pipe(sqlTsl); .pipe(sqlTsl);
}
const javascript = result const javascript = result
.pipe(filter(eslintFilter)) .pipe(filter(eslintFilter))
@@ -548,7 +456,7 @@ function createGitIndexVinyls(paths) {
.then(r => r.filter(p => !!p)); .then(r => r.filter(p => !!p));
} }
gulp.task('hygiene', task.series(checkPackageJSONTask, () => hygiene())); gulp.task('hygiene', () => hygiene());
// this allows us to run hygiene as a git pre-commit hook // this allows us to run hygiene as a git pre-commit hook
if (require.main === module) { if (require.main === module) {

View File

@@ -31,7 +31,6 @@ const BUILD_TARGETS = [
{ platform: 'linux', arch: 'ia32', pkgTarget: 'node8-linux-x86' }, { platform: 'linux', arch: 'ia32', pkgTarget: 'node8-linux-x86' },
{ platform: 'linux', arch: 'x64', pkgTarget: 'node8-linux-x64' }, { platform: 'linux', arch: 'x64', pkgTarget: 'node8-linux-x64' },
{ platform: 'linux', arch: 'armhf', pkgTarget: 'node8-linux-armv7' }, { platform: 'linux', arch: 'armhf', pkgTarget: 'node8-linux-armv7' },
{ platform: 'linux', arch: 'arm64', pkgTarget: 'node8-linux-arm64' },
{ platform: 'linux', arch: 'alpine', pkgTarget: 'node8-linux-alpine' }, { platform: 'linux', arch: 'alpine', pkgTarget: 'node8-linux-alpine' },
]; ];
@@ -42,7 +41,6 @@ gulp.task('vscode-reh-win32-x64-min', noop);
gulp.task('vscode-reh-darwin-min', noop); gulp.task('vscode-reh-darwin-min', noop);
gulp.task('vscode-reh-linux-x64-min', noop); gulp.task('vscode-reh-linux-x64-min', noop);
gulp.task('vscode-reh-linux-armhf-min', noop); gulp.task('vscode-reh-linux-armhf-min', noop);
gulp.task('vscode-reh-linux-arm64-min', noop);
gulp.task('vscode-reh-linux-alpine-min', noop); gulp.task('vscode-reh-linux-alpine-min', noop);
gulp.task('vscode-reh-web-win32-ia32-min', noop); gulp.task('vscode-reh-web-win32-ia32-min', noop);

View File

@@ -4,6 +4,7 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
const gulp = require('gulp'); const gulp = require('gulp');
const util = require('./lib/util'); const util = require('./lib/util');
const tsfmt = require('typescript-formatter'); const tsfmt = require('typescript-formatter');
@@ -21,78 +22,72 @@ gulp.task('fmt', () => formatStagedFiles());
const formatFiles = (some) => { const formatFiles = (some) => {
const formatting = es.map(function (file, cb) { const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), { tsfmt.processString(file.path, file.contents.toString('utf8'), {
replace: true, replace: true,
tsfmt: true, tsfmt: true,
tslint: true, tslint: true,
tsconfig: true tsconfig: true
// verbose: true // verbose: true
}).then(result => { }).then(result => {
console.info('ran formatting on file ' + file.path + ' result: ' + result.message); console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
if (result.error) { if (result.error) {
console.error(result.message); console.error(result.message);
} }
cb(null, file); cb(null, file);
}, err => { }, err => {
cb(err); cb(err);
});
}); });
}); return gulp.src(some, { base: '.' })
return gulp.src(some, { .pipe(filter(f => !f.stat.isDirectory()))
base: '.' .pipe(formatting);
})
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(formatting);
}; };
const formatStagedFiles = () => { const formatStagedFiles = () => {
const cp = require('child_process'); const cp = require('child_process');
cp.exec('git diff --name-only', { cp.exec('git diff --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
maxBuffer: 2000 * 1024 if (err) {
}, (err, out) => { console.error();
if (err) { console.error(err);
console.error(); process.exit(1);
console.error(err); }
process.exit(1);
}
const some = out const some = out
.split(/\r?\n/) .split(/\r?\n/)
.filter(l => !!l) .filter(l => !!l)
.filter(l => l.match(/.*.ts$/i)); .filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => { formatFiles(some).on('error', err => {
console.error(); console.error();
console.error(err); console.error(err);
process.exit(1); process.exit(1);
});
}); });
});
cp.exec('git diff --cached --name-only', { cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
maxBuffer: 2000 * 1024 if (err) {
}, (err, out) => { console.error();
if (err) { console.error(err);
console.error(); process.exit(1);
console.error(err); }
process.exit(1);
}
const some = out const some = out
.split(/\r?\n/) .split(/\r?\n/)
.filter(l => !!l) .filter(l => !!l)
.filter(l => l.match(/.*.ts$/i)); .filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => { formatFiles(some).on('error', err => {
console.error(); console.error();
console.error(err); console.error(err);
process.exit(1); process.exit(1);
});
}); });
});
}; };
function installService() { function installService() {
let config = require('../extensions/mssql/config.json'); let config = require('../extensions/mssql/src/config.json');
return platformInfo.getCurrent().then(p => { return platformInfo.getCurrent().then(p => {
let runtime = p.runtimeId; let runtime = p.runtimeId;
// fix path since it won't be correct // fix path since it won't be correct
@@ -114,7 +109,7 @@ gulp.task('install-sqltoolsservice', () => {
}); });
function installSsmsMin() { function installSsmsMin() {
const config = require('../extensions/admin-tool-ext-win/config.json'); const config = require('../extensions/admin-tool-ext-win/src/config.json');
return platformInfo.getCurrent().then(p => { return platformInfo.getCurrent().then(p => {
const runtime = p.runtimeId; const runtime = p.runtimeId;
// fix path since it won't be correct // fix path since it won't be correct

View File

@@ -31,7 +31,7 @@ const crypto = require('crypto');
const i18n = require('./lib/i18n'); const i18n = require('./lib/i18n');
const ext = require('./lib/extensions'); // {{SQL CARBON EDIT}} const ext = require('./lib/extensions'); // {{SQL CARBON EDIT}}
const deps = require('./dependencies'); const deps = require('./dependencies');
const { config } = require('./lib/electron'); const getElectronVersion = require('./lib/electron').getElectronVersion;
const createAsar = require('./lib/asar').createAsar; const createAsar = require('./lib/asar').createAsar;
const { compileBuildTask } = require('./gulpfile.compile'); const { compileBuildTask } = require('./gulpfile.compile');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions'); const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
@@ -58,9 +58,10 @@ const nodeModules = [
// Build // Build
const vscodeEntryPoints = _.flatten([ const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.desktop.main'), buildfile.entrypoint('vs/workbench/workbench.main'),
buildfile.base, buildfile.base,
buildfile.workbenchDesktop, buildfile.serviceWorker,
buildfile.workbench,
buildfile.code buildfile.code
]); ]);
@@ -78,7 +79,7 @@ const vscodeResources = [
'out-build/vs/base/common/performance.js', 'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/languagePacks.js', 'out-build/vs/base/node/languagePacks.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}', 'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
'out-build/vs/base/browser/ui/codiconLabel/codicon/**', 'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css', 'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/contrib/debug/**/*.json', 'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt', 'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
@@ -86,6 +87,7 @@ const vscodeResources = [
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js', 'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/**/markdown.css', 'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json', 'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
'out-build/vs/platform/files/**/*.exe', 'out-build/vs/platform/files/**/*.exe',
'out-build/vs/platform/files/**/*.md', 'out-build/vs/platform/files/**/*.md',
'out-build/vs/code/electron-browser/workbench/**', 'out-build/vs/code/electron-browser/workbench/**',
@@ -123,7 +125,6 @@ const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
resources: vscodeResources, resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules), loaderConfig: common.loaderConfig(nodeModules),
out: 'out-vscode', out: 'out-vscode',
inlineAmdImages: true,
bundleInfo: undefined bundleInfo: undefined
}) })
)); ));
@@ -143,6 +144,73 @@ const minifyVSCodeTask = task.define('minify-vscode', task.series(
)); ));
gulp.task(minifyVSCodeTask); gulp.task(minifyVSCodeTask);
// Package
// @ts-ignore JSON checking: darwinCredits is optional
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
// @ts-ignore JSON checking: electronRepository is optional
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return gulp.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
/** /**
* Compute checksums for some files. * Compute checksums for some files.
* *
@@ -187,8 +255,8 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const out = sourceFolderName; const out = sourceFolderName;
const checksums = computeChecksums(out, [ const checksums = computeChecksums(out, [
'vs/workbench/workbench.desktop.main.js', 'vs/workbench/workbench.main.js',
'vs/workbench/workbench.desktop.main.css', 'vs/workbench/workbench.main.css',
'vs/code/electron-browser/workbench/workbench.html', 'vs/code/electron-browser/workbench/workbench.html',
'vs/code/electron-browser/workbench/workbench.js' 'vs/code/electron-browser/workbench/workbench.js'
]); ]);
@@ -200,7 +268,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
ext.packageBuiltInExtensions(); ext.packageBuiltInExtensions();
const extensions = gulp.src(['.build/extensions/**', '!.build/extensions/node_modules/**'], { base: '.build', dot: true }); // {{SQL CARBON EDIT}} - don't package the node_modules directory const extensions = gulp.src('.build/extensions/**', { base: '.build', dot: true });
const sources = es.merge(src, extensions) const sources = es.merge(src, extensions)
.pipe(filter(['**', '!**/*.js.map'], { dot: true })); .pipe(filter(['**', '!**/*.js.map'], { dot: true }));
@@ -285,15 +353,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true }))) .pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true })); .pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
if (platform === 'linux') { // result = es.merge(result, gulp.src('resources/completions/**', { base: '.' }));
result = es.merge(result, gulp.src('resources/completions/bash/code', { base: '.' })
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename(function (f) { f.basename = product.applicationName; })));
result = es.merge(result, gulp.src('resources/completions/zsh/_code', { base: '.' })
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename(function (f) { f.basename = '_' + product.applicationName; })));
}
if (platform === 'win32') { if (platform === 'win32') {
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32', allowEmpty: true })); result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32', allowEmpty: true }));
@@ -400,7 +460,7 @@ gulp.task(task.define(
optimizeVSCodeTask, optimizeVSCodeTask,
function () { function () {
const pathToMetadata = './out-vscode/nls.metadata.json'; const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = '.build/extensions/*'; const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}'; const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge( return es.merge(
@@ -421,7 +481,7 @@ gulp.task(task.define(
optimizeVSCodeTask, optimizeVSCodeTask,
function () { function () {
const pathToMetadata = './out-vscode/nls.metadata.json'; const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = '.build/extensions/*'; const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}'; const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge( return es.merge(

View File

@@ -43,8 +43,7 @@ function prepareDebPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort)) .pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`)) .pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol)); .pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' }) const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
@@ -56,13 +55,11 @@ function prepareDebPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' }) const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png')); .pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
const bash_completion = gulp.src('resources/completions/bash/code') // const bash_completion = gulp.src('resources/completions/bash/code')
.pipe(replace('@@APPNAME@@', product.applicationName)) // .pipe(rename('usr/share/bash-completion/completions/code'));
.pipe(rename('usr/share/bash-completion/completions/' + product.applicationName));
const zsh_completion = gulp.src('resources/completions/zsh/_code') // const zsh_completion = gulp.src('resources/completions/zsh/_code')
.pipe(replace('@@APPNAME@@', product.applicationName)) // .pipe(rename('usr/share/zsh/vendor-completions/_code'));
.pipe(rename('usr/share/zsh/vendor-completions/_' + product.applicationName));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir }) const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; })); .pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -98,7 +95,7 @@ function prepareDebPackage(arch) {
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@')) .pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst')); .pipe(rename('DEBIAN/postinst'));
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, bash_completion, zsh_completion, code); const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, /* bash_completion, zsh_completion, */ code);
return all.pipe(vfs.dest(destination)); return all.pipe(vfs.dest(destination));
}; };
@@ -137,7 +134,6 @@ function prepareRpmPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort)) .pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', product.linuxIconName)) .pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol)); .pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
@@ -150,13 +146,11 @@ function prepareRpmPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' }) const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png')); .pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
const bash_completion = gulp.src('resources/completions/bash/code') // const bash_completion = gulp.src('resources/completions/bash/code')
.pipe(replace('@@APPNAME@@', product.applicationName)) // .pipe(rename('BUILD/usr/share/bash-completion/completions/code'));
.pipe(rename('BUILD/usr/share/bash-completion/completions/' + product.applicationName));
const zsh_completion = gulp.src('resources/completions/zsh/_code') // const zsh_completion = gulp.src('resources/completions/zsh/_code')
.pipe(replace('@@APPNAME@@', product.applicationName)) // .pipe(rename('BUILD/usr/share/zsh/site-functions/_code'));
.pipe(rename('BUILD/usr/share/zsh/site-functions/_' + product.applicationName));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir }) const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; })); .pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -179,7 +173,7 @@ function prepareRpmPackage(arch) {
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' }) const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
.pipe(rename('SOURCES/' + product.applicationName + '.xpm')); .pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
const all = es.merge(code, desktops, appdata, icon, bash_completion, zsh_completion, spec, specIcon); const all = es.merge(code, desktops, appdata, icon, /* bash_completion, zsh_completion, */ spec, specIcon);
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch))); return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
}; };
@@ -208,25 +202,21 @@ function prepareSnapPackage(arch) {
const destination = getSnapBuildPath(arch); const destination = getSnapBuildPath(arch);
return function () { return function () {
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' }) const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename(`snap/gui/${product.applicationName}.desktop`)); .pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' }) const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename(`snap/gui/${product.applicationName}-url-handler.desktop`)); .pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
const desktops = es.merge(desktop, desktopUrlHandler) const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort)) .pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `${product.applicationName} --force-user-env`)) .pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@ICON@@', `\${SNAP}/meta/gui/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol)); .pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
// An icon that is placed in snap/gui will be placed into meta/gui verbatim.
const icon = gulp.src('resources/linux/code.png', { base: '.' }) const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename(`snap/gui/${product.linuxIconName}.png`)); .pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir }) const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; })); .pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
@@ -247,8 +237,7 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) { function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch); const snapBuildPath = getSnapBuildPath(arch);
// Default target for snapcraft runs: pull, build, stage and prime, and finally assembles the snap. return shell.task(`cd ${snapBuildPath} && snapcraft build`);
return shell.task(`cd ${snapBuildPath} && snapcraft`);
} }
const BUILD_TARGETS = [ const BUILD_TARGETS = [

View File

@@ -6,11 +6,146 @@
'use strict'; 'use strict';
const gulp = require('gulp'); const gulp = require('gulp');
const path = require('path');
const es = require('event-stream');
const util = require('./lib/util');
const task = require('./lib/task');
const common = require('./lib/optimize');
const product = require('../product.json');
const rename = require('gulp-rename');
const filter = require('gulp-filter');
const json = require('gulp-json-editor');
const _ = require('underscore');
const deps = require('./dependencies');
const vfs = require('vinyl-fs');
const packageJson = require('../package.json');
const { compileBuildTask } = require('./gulpfile.compile');
const noop = () => { return Promise.resolve(); }; const REPO_ROOT = path.dirname(__dirname);
const commit = util.getVersion(REPO_ROOT);
const BUILD_ROOT = path.dirname(REPO_ROOT);
const WEB_FOLDER = path.join(REPO_ROOT, 'remote', 'web');
gulp.task('minify-vscode-web', noop); const productionDependencies = deps.getProductionDependencies(WEB_FOLDER);
gulp.task('vscode-web', noop);
gulp.task('vscode-web-min', noop); const nodeModules = Object.keys(product.dependencies || {})
gulp.task('vscode-web-ci', noop); .concat(_.uniq(productionDependencies.map(d => d.name)));
gulp.task('vscode-web-min-ci', noop);
const vscodeWebResources = [
// Workbench
'out-build/vs/{base,platform,editor,workbench}/**/*.{svg,png,html}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/**/markdown.css',
// Webview
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
// Excludes
'!out-build/vs/**/{node,electron-browser,electron-main}/**',
'!out-build/vs/editor/standalone/**',
'!out-build/vs/workbench/**/*-tb.png',
'!**/test/**'
];
const buildfile = require('../src/buildfile');
const vscodeWebEntryPoints = [
buildfile.workbenchWeb,
buildfile.serviceWorker,
buildfile.keyboardMaps,
buildfile.base
];
const optimizeVSCodeWebTask = task.define('optimize-vscode-web', task.series(
util.rimraf('out-vscode-web'),
common.optimizeTask({
src: 'out-build',
entryPoints: _.flatten(vscodeWebEntryPoints),
otherSources: [],
resources: vscodeWebResources,
loaderConfig: common.loaderConfig(nodeModules),
out: 'out-vscode-web',
bundleInfo: undefined
})
));
const minifyVSCodeWebTask = task.define('minify-vscode-web', task.series(
optimizeVSCodeWebTask,
util.rimraf('out-vscode-web-min'),
common.minifyTask('out-vscode-web', `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`)
));
gulp.task(minifyVSCodeWebTask);
function packageTask(sourceFolderName, destinationFolderName) {
const destination = path.join(BUILD_ROOT, destinationFolderName);
return () => {
const src = gulp.src(sourceFolderName + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + sourceFolderName), 'out'); }))
.pipe(filter(['**', '!**/*.js.map']));
const sources = es.merge(src);
let version = packageJson.version;
const quality = product.quality;
if (quality && quality !== 'stable') {
version += '-' + quality;
}
const name = product.nameShort;
const packageJsonStream = gulp.src(['remote/web/package.json'], { base: 'remote/web' })
.pipe(json({ name, version }));
const date = new Date().toISOString();
const productJsonStream = gulp.src(['product.json'], { base: '.' })
.pipe(json({ commit, date }));
const license = gulp.src(['remote/LICENSE'], { base: 'remote' });
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(REPO_ROOT, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`, `!${d}/.bin/**`]));
const deps = gulp.src(dependenciesSrc, { base: 'remote/web', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')));
const favicon = gulp.src('resources/server/favicon.ico', { base: 'resources/server' });
let all = es.merge(
packageJsonStream,
productJsonStream,
license,
sources,
deps,
favicon
);
let result = all
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions());
return result.pipe(vfs.dest(destination));
};
}
const dashed = (str) => (str ? `-${str}` : ``);
['', 'min'].forEach(minified => {
const sourceFolderName = `out-vscode-web${dashed(minified)}`;
const destinationFolderName = `vscode-web`;
const vscodeWebTaskCI = task.define(`vscode-web${dashed(minified)}-ci`, task.series(
minified ? minifyVSCodeWebTask : optimizeVSCodeWebTask,
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
packageTask(sourceFolderName, destinationFolderName)
));
gulp.task(vscodeWebTaskCI);
const vscodeWebTask = task.define(`vscode-web${dashed(minified)}`, task.series(
compileBuildTask,
vscodeWebTaskCI
));
gulp.task(vscodeWebTask);
});

View File

@@ -19,8 +19,7 @@ const ansiColors = require('ansi-colors');
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(__dirname));
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const quality = process.env['VSCODE_QUALITY']; const builtInExtensions = require('../builtInExtensions-insiders.json');
const builtInExtensions = quality && quality === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
// {{SQL CARBON EDIT}} - END // {{SQL CARBON EDIT}} - END
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json'); const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');

View File

@@ -11,6 +11,7 @@ const bom = require("gulp-bom");
const sourcemaps = require("gulp-sourcemaps"); const sourcemaps = require("gulp-sourcemaps");
const tsb = require("gulp-tsb"); const tsb = require("gulp-tsb");
const path = require("path"); const path = require("path");
const _ = require("underscore");
const monacodts = require("../monaco/api"); const monacodts = require("../monaco/api");
const nls = require("./nls"); const nls = require("./nls");
const reporter_1 = require("./reporter"); const reporter_1 = require("./reporter");
@@ -21,7 +22,14 @@ const watch = require('./watch');
const reporter = reporter_1.createReporter(); const reporter = reporter_1.createReporter();
function getTypeScriptCompilerOptions(src) { function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`); const rootDir = path.join(__dirname, `../../${src}`);
let options = {}; const tsconfig = require(`../../${src}/tsconfig.json`);
let options;
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
}
else {
options = tsconfig.compilerOptions;
}
options.verbose = false; options.verbose = false;
options.sourceMap = true; options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -30,14 +38,15 @@ function getTypeScriptCompilerOptions(src) {
options.rootDir = rootDir; options.rootDir = rootDir;
options.baseUrl = rootDir; options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir); options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1; options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options; return options;
} }
function createCompile(src, build, emitError) { function createCompile(src, build, emitError) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json'); const opts = _.clone(getTypeScriptCompilerOptions(src));
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) }); opts.inlineSources = !!build;
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err)); opts.noFilesystemLookup = true;
function pipeline(token) { const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
return function (token) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path)); const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path)); const tsFilter = util.filter(data => /\.ts$/.test(data.path));
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path))); const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
@@ -48,28 +57,30 @@ function createCompile(src, build, emitError) {
.pipe(utf8Filter.restore) .pipe(utf8Filter.restore)
.pipe(tsFilter) .pipe(tsFilter)
.pipe(util.loadSourcemaps()) .pipe(util.loadSourcemaps())
.pipe(compilation(token)) .pipe(ts(token))
.pipe(noDeclarationsFilter) .pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through()) .pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore) .pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', { .pipe(sourcemaps.write('.', {
addComment: false, addComment: false,
includeContent: !!build, includeContent: !!build,
sourceRoot: overrideOptions.sourceRoot sourceRoot: opts.sourceRoot
})) }))
.pipe(tsFilter.restore) .pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError)); .pipe(reporter.end(!!emitError));
return es.duplex(input, output); return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
}; };
return pipeline;
} }
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
function compileTask(src, out, build) { function compileTask(src, out, build) {
return function () { return function () {
const compile = createCompile(src, build, true); const compile = createCompile(src, build, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` }); const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
let generator = new MonacoGenerator(false); let generator = new MonacoGenerator(false);
if (src === 'src') { if (src === 'src') {
generator.execute(); generator.execute();
@@ -84,8 +95,8 @@ exports.compileTask = compileTask;
function watchTask(out, build) { function watchTask(out, build) {
return function () { return function () {
const compile = createCompile('src', build); const compile = createCompile('src', build);
const src = gulp.src('src/**', { base: 'src' }); const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 }); const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true); let generator = new MonacoGenerator(true);
generator.execute(); generator.execute();
return watchSrc return watchSrc

View File

@@ -12,21 +12,27 @@ import * as bom from 'gulp-bom';
import * as sourcemaps from 'gulp-sourcemaps'; import * as sourcemaps from 'gulp-sourcemaps';
import * as tsb from 'gulp-tsb'; import * as tsb from 'gulp-tsb';
import * as path from 'path'; import * as path from 'path';
import * as _ from 'underscore';
import * as monacodts from '../monaco/api'; import * as monacodts from '../monaco/api';
import * as nls from './nls'; import * as nls from './nls';
import { createReporter } from './reporter'; import { createReporter } from './reporter';
import * as util from './util'; import * as util from './util';
import * as fancyLog from 'fancy-log'; import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors'; import * as ansiColors from 'ansi-colors';
import ts = require('typescript');
const watch = require('./watch'); const watch = require('./watch');
const reporter = createReporter(); const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions { function getTypeScriptCompilerOptions(src: string) {
const rootDir = path.join(__dirname, `../../${src}`); const rootDir = path.join(__dirname, `../../${src}`);
let options: ts.CompilerOptions = {}; const tsconfig = require(`../../${src}/tsconfig.json`);
let options: { [key: string]: any };
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
} else {
options = tsconfig.compilerOptions;
}
options.verbose = false; options.verbose = false;
options.sourceMap = true; options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -35,17 +41,18 @@ function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
options.rootDir = rootDir; options.rootDir = rootDir;
options.baseUrl = rootDir; options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir); options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1; options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options; return options;
} }
function createCompile(src: string, build: boolean, emitError?: boolean) { function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json'); const opts = _.clone(getTypeScriptCompilerOptions(src));
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) }; opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err)); const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
function pipeline(token?: util.ICancellationToken) { return function (token?: util.ICancellationToken) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path)); const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path)); const tsFilter = util.filter(data => /\.ts$/.test(data.path));
@@ -58,31 +65,39 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
.pipe(utf8Filter.restore) .pipe(utf8Filter.restore)
.pipe(tsFilter) .pipe(tsFilter)
.pipe(util.loadSourcemaps()) .pipe(util.loadSourcemaps())
.pipe(compilation(token)) .pipe(ts(token))
.pipe(noDeclarationsFilter) .pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through()) .pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore) .pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', { .pipe(sourcemaps.write('.', {
addComment: false, addComment: false,
includeContent: !!build, includeContent: !!build,
sourceRoot: overrideOptions.sourceRoot sourceRoot: opts.sourceRoot
})) }))
.pipe(tsFilter.restore) .pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError)); .pipe(reporter.end(!!emitError));
return es.duplex(input, output); return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
}; };
return pipeline;
} }
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream { export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () { return function () {
const compile = createCompile(src, build, true); const compile = createCompile(src, build, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
const srcPipe = es.merge(
gulp.src(`${src}/**`, { base: `${src}` }),
gulp.src(typesDts),
);
let generator = new MonacoGenerator(false); let generator = new MonacoGenerator(false);
if (src === 'src') { if (src === 'src') {
generator.execute(); generator.execute();
@@ -100,8 +115,11 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
return function () { return function () {
const compile = createCompile('src', build); const compile = createCompile('src', build);
const src = gulp.src('src/**', { base: 'src' }); const src = es.merge(
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 }); gulp.src('src/**', { base: 'src' }),
gulp.src(typesDts),
);
const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true); let generator = new MonacoGenerator(true);
generator.execute(); generator.execute();

View File

@@ -2,88 +2,29 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs"); const fs = require('fs');
const path = require("path"); const path = require('path');
const vfs = require("vinyl-fs");
const filter = require("gulp-filter");
const json = require("gulp-json-editor");
const _ = require("underscore");
const util = require("./util");
const electron = require('gulp-atom-electron');
const root = path.dirname(path.dirname(__dirname)); const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
function getElectronVersion() { function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8'); const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1]; // @ts-ignore
return target; const target = /^target "(.*)"$/m.exec(yarnrc)[1];
}
exports.getElectronVersion = getElectronVersion; return target;
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
exports.config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, exports.config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch) {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
} }
module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron
// @ts-ignore
if (require.main === module) { if (require.main === module) {
main(process.argv[2]).catch(err => { const version = getElectronVersion();
console.error(err); const versionFile = path.join(root, '.build', 'electron', 'version');
process.exit(1); const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
});
process.exit(isUpToDate ? 0 : 1);
} }

View File

@@ -1,100 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as vfs from 'vinyl-fs';
import * as filter from 'gulp-filter';
import * as json from 'gulp-json-editor';
import * as _ from 'underscore';
import * as util from './util';
const electron = require('gulp-atom-electron');
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
export function getElectronVersion(): string {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)![1];
return target;
}
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions: string[], icon: string) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
export const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch): Promise<void> {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
}
if (require.main === module) {
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -29,18 +29,12 @@ const commit = util.getVersion(root);
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`; const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
function fromLocal(extensionPath) { function fromLocal(extensionPath) {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js'); const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
const input = fs.existsSync(webpackFilename) if (fs.existsSync(webpackFilename)) {
? fromLocalWebpack(extensionPath) return fromLocalWebpack(extensionPath);
: fromLocalNormal(extensionPath); }
const tmLanguageJsonFilter = filter('**/*.tmLanguage.json', { restore: true }); else {
return input return fromLocalNormal(extensionPath);
.pipe(tmLanguageJsonFilter) }
.pipe(buffer())
.pipe(es.mapSync((f) => {
f.contents = Buffer.from(JSON.stringify(JSON.parse(f.contents.toString('utf8'))));
return f;
}))
.pipe(tmLanguageJsonFilter.restore);
} }
function fromLocalWebpack(extensionPath) { function fromLocalWebpack(extensionPath) {
const result = es.through(); const result = es.through();
@@ -101,7 +95,7 @@ function fromLocalWebpack(extensionPath) {
result.emit('error', compilation.warnings.join('\n')); result.emit('error', compilation.warnings.join('\n'));
} }
}; };
const webpackConfig = Object.assign(Object.assign({}, require(webpackConfigPath)), { mode: 'production' }); const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path); const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone) return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) { .pipe(es.through(function (data) {
@@ -199,10 +193,13 @@ const sqlBuiltInExtensions = [
'admin-pack', 'admin-pack',
'dacpac', 'dacpac',
'schema-compare', 'schema-compare',
'cms', 'cms'
'query-history',
'liveshare'
]; ];
// make resource deployment and BDC extension only available in insiders
if (process.env['VSCODE_QUALITY'] === 'stable') {
sqlBuiltInExtensions.push('resource-deployment');
sqlBuiltInExtensions.push('big-data-cluster');
}
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json'); const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
// {{SQL CARBON EDIT}} - End // {{SQL CARBON EDIT}} - End
function packageLocalExtensionsStream() { function packageLocalExtensionsStream() {

View File

@@ -30,20 +30,11 @@ const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${
function fromLocal(extensionPath: string): Stream { function fromLocal(extensionPath: string): Stream {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js'); const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
const input = fs.existsSync(webpackFilename) if (fs.existsSync(webpackFilename)) {
? fromLocalWebpack(extensionPath) return fromLocalWebpack(extensionPath);
: fromLocalNormal(extensionPath); } else {
return fromLocalNormal(extensionPath);
const tmLanguageJsonFilter = filter('**/*.tmLanguage.json', { restore: true }); }
return input
.pipe(tmLanguageJsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f: File) => {
f.contents = Buffer.from(JSON.stringify(JSON.parse(f.contents.toString('utf8'))));
return f;
}))
.pipe(tmLanguageJsonFilter.restore);
} }
function fromLocalWebpack(extensionPath: string): Stream { function fromLocalWebpack(extensionPath: string): Stream {
@@ -235,11 +226,16 @@ const sqlBuiltInExtensions = [
'admin-pack', 'admin-pack',
'dacpac', 'dacpac',
'schema-compare', 'schema-compare',
'cms', 'cms'
'query-history',
'liveshare'
]; ];
// make resource deployment and BDC extension only available in insiders
if (process.env['VSCODE_QUALITY'] === 'stable') {
sqlBuiltInExtensions.push('resource-deployment');
sqlBuiltInExtensions.push('big-data-cluster');
}
interface IBuiltInExtension { interface IBuiltInExtension {
name: string; name: string;
version: string; version: string;
@@ -350,4 +346,4 @@ export function packageExtensionTask(extensionName: string, platform: string, ar
return result.pipe(vfs.dest(destination)); return result.pipe(vfs.dest(destination));
}; };
} }
// {{SQL CARBON EDIT}} - End // {{SQL CARBON EDIT}} - End

View File

@@ -176,7 +176,6 @@ class XLF {
this.buffer.push(line.toString()); this.buffer.push(line.toString());
} }
} }
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) { XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => { return new Promise((resolve) => {
let parser = new xml2js.Parser(); let parser = new xml2js.Parser();
@@ -249,6 +248,7 @@ XLF.parse = function (xlfString) {
}); });
}); });
}; };
exports.XLF = XLF;
class Limiter { class Limiter {
constructor(maxDegreeOfParalellism) { constructor(maxDegreeOfParalellism) {
this.maxDegreeOfParalellism = maxDegreeOfParalellism; this.maxDegreeOfParalellism = maxDegreeOfParalellism;
@@ -586,7 +586,7 @@ function createXlfFilesForExtensions() {
} }
return _xlf; return _xlf;
} }
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) { gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
if (file.isBuffer()) { if (file.isBuffer()) {
const buffer = file.contents; const buffer = file.contents;
const basename = path.basename(file.path); const basename = path.basename(file.path);
@@ -609,7 +609,7 @@ function createXlfFilesForExtensions() {
} }
else if (basename === 'nls.metadata.json') { else if (basename === 'nls.metadata.json') {
const json = JSON.parse(buffer.toString('utf8')); const json = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path)); const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) { for (let file in json) {
const fileContent = json[file]; const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages); getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@@ -912,8 +912,8 @@ function pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language
_coreAndExtensionResources.push(...json.workbench); _coreAndExtensionResources.push(...json.workbench);
// extensions // extensions
let extensionsToLocalize = Object.create(null); let extensionsToLocalize = Object.create(null);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true); glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true); glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => { Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject }); _coreAndExtensionResources.push({ name: extension, project: extensionsProject });
}); });
@@ -1086,7 +1086,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' }); resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile); this.queue(translatedMainFile);
for (let extension in extensionsPacks) { for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]); const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile); this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension]; const externalExtensionId = externalExtensions[extension];
if (externalExtensionId) { if (externalExtensionId) {

View File

@@ -106,10 +106,6 @@
"name": "vs/workbench/contrib/quickopen", "name": "vs/workbench/contrib/quickopen",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/contrib/userData",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/contrib/remote", "name": "vs/workbench/contrib/remote",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -174,10 +170,6 @@
"name": "vs/workbench/contrib/webview", "name": "vs/workbench/contrib/webview",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/contrib/customEditor",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/contrib/welcome", "name": "vs/workbench/contrib/welcome",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -186,10 +178,6 @@
"name": "vs/workbench/contrib/outline", "name": "vs/workbench/contrib/outline",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/contrib/userDataSync",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/actions", "name": "vs/workbench/services/actions",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -242,10 +230,6 @@
"name": "vs/workbench/services/keybinding", "name": "vs/workbench/services/keybinding",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/lifecycle",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/mode", "name": "vs/workbench/services/mode",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -271,7 +255,7 @@
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{ {
"name": "vs/workbench/services/workspaces", "name": "vs/workbench/services/workspace",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{ {
@@ -285,14 +269,6 @@
{ {
"name": "vs/workbench/services/preferences", "name": "vs/workbench/services/preferences",
"project": "vscode-preferences" "project": "vscode-preferences"
},
{
"name": "vs/workbench/services/notification",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/userData",
"project": "vscode-workbench"
} }
] ]
} }

View File

@@ -709,7 +709,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
} }
return _xlf; return _xlf;
} }
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) { gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
if (file.isBuffer()) { if (file.isBuffer()) {
const buffer: Buffer = file.contents as Buffer; const buffer: Buffer = file.contents as Buffer;
const basename = path.basename(file.path); const basename = path.basename(file.path);
@@ -729,7 +729,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages); getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
} else if (basename === 'nls.metadata.json') { } else if (basename === 'nls.metadata.json') {
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8')); const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path)); const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) { for (let file in json) {
const fileContent = json[file]; const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages); getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@@ -1053,8 +1053,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
// extensions // extensions
let extensionsToLocalize = Object.create(null); let extensionsToLocalize = Object.create(null);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true); glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true); glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => { Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject }); _coreAndExtensionResources.push({ name: extension, project: extensionsProject });
@@ -1253,7 +1253,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
this.queue(translatedMainFile); this.queue(translatedMainFile);
for (let extension in extensionsPacks) { for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]); const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile); this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension]; const externalExtensionId = externalExtensions[extension];

View File

@@ -5,7 +5,6 @@
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream"); const es = require("event-stream");
const fs = require("fs");
const gulp = require("gulp"); const gulp = require("gulp");
const concat = require("gulp-concat"); const concat = require("gulp-concat");
const minifyCSS = require("gulp-cssnano"); const minifyCSS = require("gulp-cssnano");
@@ -18,7 +17,7 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors"); const ansiColors = require("ansi-colors");
const path = require("path"); const path = require("path");
const pump = require("pump"); const pump = require("pump");
const terser = require("terser"); const uglifyes = require("uglify-es");
const VinylFile = require("vinyl"); const VinylFile = require("vinyl");
const bundle = require("./bundle"); const bundle = require("./bundle");
const i18n_1 = require("./i18n"); const i18n_1 = require("./i18n");
@@ -135,14 +134,6 @@ function optimizeTask(opts) {
if (err || !result) { if (err || !result) {
return bundlesStream.emit('error', JSON.stringify(err)); return bundlesStream.emit('error', JSON.stringify(err));
} }
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
}
catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream); toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources // Remove css inlined resources
const filteredResources = resources.slice(); const filteredResources = resources.slice();
@@ -178,39 +169,6 @@ function optimizeTask(opts) {
}; };
} }
exports.optimizeTask = optimizeTask; exports.optimizeTask = optimizeTask;
function inlineAmdImages(src, result) {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
/** /**
* Wrap around uglify and allow the preserveComments function * Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file. * to have a file "context" to include our copyright only once per file.
@@ -241,7 +199,7 @@ function uglifyWithCopyrights() {
return false; return false;
}; };
}; };
const minify = composer(terser); const minify = composer(uglifyes);
const input = es.through(); const input = es.through();
const output = input const output = input
.pipe(flatmap((stream, f) => { .pipe(flatmap((stream, f) => {

View File

@@ -6,7 +6,6 @@
'use strict'; 'use strict';
import * as es from 'event-stream'; import * as es from 'event-stream';
import * as fs from 'fs';
import * as gulp from 'gulp'; import * as gulp from 'gulp';
import * as concat from 'gulp-concat'; import * as concat from 'gulp-concat';
import * as minifyCSS from 'gulp-cssnano'; import * as minifyCSS from 'gulp-cssnano';
@@ -20,7 +19,7 @@ import * as ansiColors from 'ansi-colors';
import * as path from 'path'; import * as path from 'path';
import * as pump from 'pump'; import * as pump from 'pump';
import * as sm from 'source-map'; import * as sm from 'source-map';
import * as terser from 'terser'; import * as uglifyes from 'uglify-es';
import * as VinylFile from 'vinyl'; import * as VinylFile from 'vinyl';
import * as bundle from './bundle'; import * as bundle from './bundle';
import { Language, processNlsFiles } from './i18n'; import { Language, processNlsFiles } from './i18n';
@@ -162,10 +161,6 @@ export interface IOptimizeTaskOpts {
* (emit bundleInfo.json file) * (emit bundleInfo.json file)
*/ */
bundleInfo: boolean; bundleInfo: boolean;
/**
* replace calls to `registerAndGetAmdImageURL` with data uris
*/
inlineAmdImages: boolean;
/** /**
* (out folder name) * (out folder name)
*/ */
@@ -199,14 +194,6 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
bundle.bundle(entryPoints, loaderConfig, function (err, result) { bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); } if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); }
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
} catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream); toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources // Remove css inlined resources
@@ -251,42 +238,6 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
}; };
} }
function inlineAmdImages(src: string, result: bundle.IBundleResult): bundle.IBundleResult {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
declare class FileWithCopyright extends VinylFile { declare class FileWithCopyright extends VinylFile {
public __hasOurCopyright: boolean; public __hasOurCopyright: boolean;
} }
@@ -324,7 +275,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
}; };
}; };
const minify = (composer as any)(terser); const minify = (composer as any)(uglifyes);
const input = es.through(); const input = es.through();
const output = input const output = input
.pipe(flatmap((stream, f) => { .pipe(flatmap((stream, f) => {

View File

@@ -1,103 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const rollup = require("rollup");
const path = require("path");
// getting around stupid import rules
const nodeResolve = require('rollup-plugin-node-resolve');
const commonjs = require('rollup-plugin-commonjs');
async function rollupModule(options) {
const moduleName = options.moduleName;
try {
const inputFile = options.inputFile;
const outputDirectory = options.outputDirectory;
await fs.promises.mkdir(outputDirectory, {
recursive: true
});
const outputFileName = options.outputFileName;
const outputMapName = `${outputFileName}.map`;
const external = options.external || [];
const outputFilePath = path.resolve(outputDirectory, outputFileName);
const outputMapPath = path.resolve(outputDirectory, outputMapName);
const bundle = await rollup.rollup({
input: inputFile,
plugins: [
nodeResolve(),
commonjs(),
],
external,
});
const generatedBundle = await bundle.generate({
name: moduleName,
format: 'umd',
sourcemap: true
});
const result = generatedBundle.output[0];
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
await fs.promises.writeFile(outputFilePath, result.code);
await fs.promises.writeFile(outputMapPath, result.map);
return {
name: moduleName,
result: true
};
}
catch (ex) {
return {
name: moduleName,
result: false,
exception: ex
};
}
}
function rollupAngularSlickgrid(root) {
return new Promise(async (resolve, reject) => {
const result = await rollupModule({
moduleName: 'angular2-slickgrid',
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
outputFileName: 'angular2-slickgrid.umd.js'
});
if (!result.result) {
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
}
resolve();
});
}
exports.rollupAngularSlickgrid = rollupAngularSlickgrid;
function rollupAngular(root) {
return new Promise(async (resolve, reject) => {
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
const tasks = modules.map((module) => {
return rollupModule({
moduleName: `ng.${module}`,
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
outputFileName: `${module}.umd.js`,
external: modules.map(mn => `@angular/${mn}`)
});
});
// array of booleans
const x = await Promise.all(tasks);
const result = x.reduce((prev, current) => {
if (!current.result) {
prev.fails.push(current.name);
prev.exceptions.push(current.exception);
prev.result = false;
}
return prev;
}, {
fails: [],
exceptions: [],
result: true,
});
if (!result.result) {
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
}
resolve();
});
}
exports.rollupAngular = rollupAngular;

View File

@@ -1,125 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as fs from 'fs';
import * as rollup from 'rollup';
import * as path from 'path';
// getting around stupid import rules
const nodeResolve = require('rollup-plugin-node-resolve');
const commonjs = require('rollup-plugin-commonjs');
export interface IRollupOptions {
moduleName: string;
inputFile: string;
outputDirectory: string;
outputFileName: string;
external?: string[];
}
async function rollupModule(options: IRollupOptions) {
const moduleName = options.moduleName;
try {
const inputFile = options.inputFile;
const outputDirectory = options.outputDirectory;
await fs.promises.mkdir(outputDirectory, {
recursive: true
});
const outputFileName = options.outputFileName;
const outputMapName = `${outputFileName}.map`;
const external = options.external || [];
const outputFilePath = path.resolve(outputDirectory, outputFileName);
const outputMapPath = path.resolve(outputDirectory, outputMapName);
const bundle = await rollup.rollup({
input: inputFile,
plugins: [
nodeResolve(),
commonjs(),
],
external,
});
const generatedBundle = await bundle.generate({
name: moduleName,
format: 'umd',
sourcemap: true
});
const result = generatedBundle.output[0];
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
await fs.promises.writeFile(outputFilePath, result.code);
await fs.promises.writeFile(outputMapPath, result.map);
return {
name: moduleName,
result: true
};
} catch (ex) {
return {
name: moduleName,
result: false,
exception: ex
};
}
}
export function rollupAngularSlickgrid(root: string): Promise<void> {
return new Promise(async (resolve, reject) => {
const result = await rollupModule({
moduleName: 'angular2-slickgrid',
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
outputFileName: 'angular2-slickgrid.umd.js'
});
if (!result.result) {
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
}
resolve();
});
}
export function rollupAngular(root: string): Promise<void> {
return new Promise(async (resolve, reject) => {
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
const tasks = modules.map((module) => {
return rollupModule({
moduleName: `ng.${module}`,
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
outputFileName: `${module}.umd.js`,
external: modules.map(mn => `@angular/${mn}`)
});
});
// array of booleans
const x = await Promise.all(tasks);
const result = x.reduce<{ fails: string[]; exceptions: string[]; result: boolean }>((prev, current) => {
if (!current.result) {
prev.fails.push(current.name);
prev.exceptions.push(current.exception);
prev.result = false;
}
return prev;
}, {
fails: [],
exceptions: [],
result: true,
});
if (!result.result) {
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
}
resolve();
});
}

View File

@@ -130,7 +130,7 @@ function createESMSourcesAndResources2(options) {
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t')); write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
continue; continue;
} }
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) { if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
// Transport the files directly // Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file))); write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue; continue;
@@ -250,37 +250,35 @@ function transportCSS(module, enqueue, write) {
const filename = path.join(SRC_DIR, module); const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString(); const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148 const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64'); const inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents); write(module, newContents);
return true; return true;
function _rewriteOrInlineUrls(contents, forceBase64) { function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
return _replaceURL(contents, (url) => { return _replaceURL(contents, (url) => {
const fontMatch = url.match(/^(.*).ttf\?(.*)$/); let imagePath = path.join(path.dirname(module), url);
if (fontMatch) { let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter if (fileContents.length < inlineByteLimit) {
const fontPath = path.join(path.dirname(module), relativeFontPath); const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
enqueue(fontPath); let DATA = ';base64,' + fileContents.toString('base64');
return relativeFontPath; if (!forceBase64 && /\.svg$/.test(url)) {
} // .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
const imagePath = path.join(path.dirname(module), url); let newText = fileContents.toString()
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath)); .replace(/"/g, '\'')
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png'; .replace(/</g, '%3C')
let DATA = ';base64,' + fileContents.toString('base64'); .replace(/>/g, '%3E')
if (!forceBase64 && /\.svg$/.test(url)) { .replace(/&/g, '%26')
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris .replace(/#/g, '%23')
let newText = fileContents.toString() .replace(/\s+/g, ' ');
.replace(/"/g, '\'') let encodedData = ',' + newText;
.replace(/</g, '%3C') if (encodedData.length < DATA.length) {
.replace(/>/g, '%3E') DATA = encodedData;
.replace(/&/g, '%26') }
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
} }
return '"data:' + MIME + DATA + '"';
} }
return '"data:' + MIME + DATA + '"'; enqueue(imagePath);
return url;
}); });
} }
function _replaceURL(contents, replacer) { function _replaceURL(contents, replacer) {

View File

@@ -154,7 +154,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
continue; continue;
} }
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) { if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
// Transport the files directly // Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file))); write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue; continue;
@@ -290,41 +290,40 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
const filename = path.join(SRC_DIR, module); const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString(); const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148 const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64'); const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents); write(module, newContents);
return true; return true;
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean): string { function _rewriteOrInlineUrls(contents: string, forceBase64: boolean, inlineByteLimit: number): string {
return _replaceURL(contents, (url) => { return _replaceURL(contents, (url) => {
const fontMatch = url.match(/^(.*).ttf\?(.*)$/); let imagePath = path.join(path.dirname(module), url);
if (fontMatch) { let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
const fontPath = path.join(path.dirname(module), relativeFontPath);
enqueue(fontPath);
return relativeFontPath;
}
const imagePath = path.join(path.dirname(module), url); if (fileContents.length < inlineByteLimit) {
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath)); const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png'; let DATA = ';base64,' + fileContents.toString('base64');
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) { if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris // .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString() let newText = fileContents.toString()
.replace(/"/g, '\'') .replace(/"/g, '\'')
.replace(/</g, '%3C') .replace(/</g, '%3C')
.replace(/>/g, '%3E') .replace(/>/g, '%3E')
.replace(/&/g, '%26') .replace(/&/g, '%26')
.replace(/#/g, '%23') .replace(/#/g, '%23')
.replace(/\s+/g, ' '); .replace(/\s+/g, ' ');
let encodedData = ',' + newText; let encodedData = ',' + newText;
if (encodedData.length < DATA.length) { if (encodedData.length < DATA.length) {
DATA = encodedData; DATA = encodedData;
}
} }
return '"data:' + MIME + DATA + '"';
} }
return '"data:' + MIME + DATA + '"';
enqueue(imagePath);
return url;
}); });
} }

View File

@@ -1,45 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
class AbstractGlobalsRuleWalker extends Lint.RuleWalker {
constructor(file, program, opts, _config) {
super(file, opts);
this.program = program;
this._config = _config;
}
visitIdentifier(node) {
if (this.getDisallowedGlobals().some(disallowedGlobal => disallowedGlobal === node.text)) {
if (this._config.allowed && this._config.allowed.some(allowed => allowed === node.text)) {
return; // override
}
const checker = this.program.getTypeChecker();
const symbol = checker.getSymbolAtLocation(node);
if (symbol) {
const declarations = symbol.declarations;
if (Array.isArray(declarations) && symbol.declarations.some(declaration => {
if (declaration) {
const parent = declaration.parent;
if (parent) {
const sourceFile = parent.getSourceFile();
if (sourceFile) {
const fileName = sourceFile.fileName;
if (fileName && fileName.indexOf(this.getDefinitionPattern()) >= 0) {
return true;
}
}
}
}
return false;
})) {
this.addFailureAtNode(node, `Cannot use global '${node.text}' in '${this._config.target}'`);
}
}
}
super.visitIdentifier(node);
}
}
exports.AbstractGlobalsRuleWalker = AbstractGlobalsRuleWalker;

View File

@@ -1,57 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
interface AbstractGlobalsRuleConfig {
target: string;
allowed: string[];
}
export abstract class AbstractGlobalsRuleWalker extends Lint.RuleWalker {
constructor(file: ts.SourceFile, private program: ts.Program, opts: Lint.IOptions, private _config: AbstractGlobalsRuleConfig) {
super(file, opts);
}
protected abstract getDisallowedGlobals(): string[];
protected abstract getDefinitionPattern(): string;
visitIdentifier(node: ts.Identifier) {
if (this.getDisallowedGlobals().some(disallowedGlobal => disallowedGlobal === node.text)) {
if (this._config.allowed && this._config.allowed.some(allowed => allowed === node.text)) {
return; // override
}
const checker = this.program.getTypeChecker();
const symbol = checker.getSymbolAtLocation(node);
if (symbol) {
const declarations = symbol.declarations;
if (Array.isArray(declarations) && symbol.declarations.some(declaration => {
if (declaration) {
const parent = declaration.parent;
if (parent) {
const sourceFile = parent.getSourceFile();
if (sourceFile) {
const fileName = sourceFile.fileName;
if (fileName && fileName.indexOf(this.getDefinitionPattern()) >= 0) {
return true;
}
}
}
}
return false;
})) {
this.addFailureAtNode(node, `Cannot use global '${node.text}' in '${this._config.target}'`);
}
}
}
super.visitIdentifier(node);
}
}

View File

@@ -48,7 +48,9 @@ class DoubleQuotedStringArgRuleWalker extends Lint.RuleWalker {
const argText = arg.getText(); const argText = arg.getText();
const doubleQuotedArg = argText.length >= 2 && argText[0] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE && argText[argText.length - 1] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE; const doubleQuotedArg = argText.length >= 2 && argText[0] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE && argText[argText.length - 1] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE;
if (!doubleQuotedArg) { if (!doubleQuotedArg) {
const fix = Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `"${arg.getText().slice(1, arg.getWidth() - 1)}"`); const fix = [
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getWidth(), `"${arg.getText().slice(1, arg.getWidth() - 2)}"`),
];
this.addFailure(this.createFailure(arg.getStart(), arg.getWidth(), `Argument ${this.argIndex + 1} to '${functionName}' must be double quoted.`, fix)); this.addFailure(this.createFailure(arg.getStart(), arg.getWidth(), `Argument ${this.argIndex + 1} to '${functionName}' must be double quoted.`, fix));
return; return;
} }

View File

@@ -68,7 +68,9 @@ class DoubleQuotedStringArgRuleWalker extends Lint.RuleWalker {
const doubleQuotedArg = argText.length >= 2 && argText[0] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE && argText[argText.length - 1] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE; const doubleQuotedArg = argText.length >= 2 && argText[0] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE && argText[argText.length - 1] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE;
if (!doubleQuotedArg) { if (!doubleQuotedArg) {
const fix = Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `"${arg.getText().slice(1, arg.getWidth() - 1)}"`); const fix = [
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getWidth(), `"${arg.getText().slice(1, arg.getWidth() - 2)}"`),
];
this.addFailure(this.createFailure( this.addFailure(this.createFailure(
arg.getStart(), arg.getWidth(), arg.getStart(), arg.getWidth(),
`Argument ${this.argIndex! + 1} to '${functionName}' must be double quoted.`, fix)); `Argument ${this.argIndex! + 1} to '${functionName}' must be double quoted.`, fix));

View File

@@ -1,34 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
const minimatch = require("minimatch");
const abstractGlobalsRule_1 = require("./abstractGlobalsRule");
class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile, program) {
const configs = this.getOptions().ruleArguments;
for (const config of configs) {
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new NoDOMGlobalsRuleWalker(sourceFile, program, this.getOptions(), config));
}
}
return [];
}
}
exports.Rule = Rule;
class NoDOMGlobalsRuleWalker extends abstractGlobalsRule_1.AbstractGlobalsRuleWalker {
getDefinitionPattern() {
return 'lib.dom.d.ts';
}
getDisallowedGlobals() {
// intentionally not complete
return [
"window",
"document",
"HTMLElement"
];
}
}

View File

@@ -1,45 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as minimatch from 'minimatch';
import { AbstractGlobalsRuleWalker } from './abstractGlobalsRule';
interface NoDOMGlobalsRuleConfig {
target: string;
allowed: string[];
}
export class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile: ts.SourceFile, program: ts.Program): Lint.RuleFailure[] {
const configs = <NoDOMGlobalsRuleConfig[]>this.getOptions().ruleArguments;
for (const config of configs) {
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new NoDOMGlobalsRuleWalker(sourceFile, program, this.getOptions(), config));
}
}
return [];
}
}
class NoDOMGlobalsRuleWalker extends AbstractGlobalsRuleWalker {
getDefinitionPattern(): string {
return 'lib.dom.d.ts';
}
getDisallowedGlobals(): string[] {
// intentionally not complete
return [
"window",
"document",
"HTMLElement"
];
}
}

View File

@@ -1,41 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
const minimatch = require("minimatch");
const abstractGlobalsRule_1 = require("./abstractGlobalsRule");
class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile, program) {
const configs = this.getOptions().ruleArguments;
for (const config of configs) {
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new NoNodejsGlobalsRuleWalker(sourceFile, program, this.getOptions(), config));
}
}
return [];
}
}
exports.Rule = Rule;
class NoNodejsGlobalsRuleWalker extends abstractGlobalsRule_1.AbstractGlobalsRuleWalker {
getDefinitionPattern() {
return '@types/node';
}
getDisallowedGlobals() {
// https://nodejs.org/api/globals.html#globals_global_objects
return [
"NodeJS",
"Buffer",
"__dirname",
"__filename",
"clearImmediate",
"exports",
"global",
"module",
"process",
"setImmediate"
];
}
}

View File

@@ -1,52 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as minimatch from 'minimatch';
import { AbstractGlobalsRuleWalker } from './abstractGlobalsRule';
interface NoNodejsGlobalsConfig {
target: string;
allowed: string[];
}
export class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile: ts.SourceFile, program: ts.Program): Lint.RuleFailure[] {
const configs = <NoNodejsGlobalsConfig[]>this.getOptions().ruleArguments;
for (const config of configs) {
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new NoNodejsGlobalsRuleWalker(sourceFile, program, this.getOptions(), config));
}
}
return [];
}
}
class NoNodejsGlobalsRuleWalker extends AbstractGlobalsRuleWalker {
getDefinitionPattern(): string {
return '@types/node';
}
getDisallowedGlobals(): string[] {
// https://nodejs.org/api/globals.html#globals_global_objects
return [
"NodeJS",
"Buffer",
"__dirname",
"__filename",
"clearImmediate",
"exports",
"global",
"module",
"process",
"setImmediate"
];
}
}

View File

@@ -1,33 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
const minimatch = require("minimatch");
class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile) {
const args = this.getOptions().ruleArguments[0];
if (args.exclude.every(x => !minimatch(sourceFile.fileName, x))) {
return this.applyWithWalker(new NoSyncRuleWalker(sourceFile, this.getOptions()));
}
return [];
}
}
exports.Rule = Rule;
class NoSyncRuleWalker extends Lint.RuleWalker {
constructor(file, opts) {
super(file, opts);
}
visitCallExpression(node) {
if (node.expression && NoSyncRuleWalker.operations.some(x => node.expression.getText().indexOf(x) >= 0)) {
this.addFailureAtNode(node, `Do not use Sync operations`);
}
super.visitCallExpression(node);
}
}
NoSyncRuleWalker.operations = ['readFileSync', 'writeFileSync', 'existsSync', 'fchmodSync', 'lchmodSync',
'statSync', 'fstatSync', 'lstatSync', 'linkSync', 'symlinkSync', 'readlinkSync', 'realpathSync', 'unlinkSync', 'rmdirSync',
'mkdirSync', 'mkdtempSync', 'readdirSync', 'openSync', 'utimesSync', 'futimesSync', 'fsyncSync', 'writeSync', 'readSync',
'appendFileSync', 'accessSync', 'fdatasyncSync', 'copyFileSync'];

View File

@@ -1,45 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as minimatch from 'minimatch';
interface NoSyncRuleConfig {
exclude: string[];
}
export class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
const args = <NoSyncRuleConfig>this.getOptions().ruleArguments[0];
if (args.exclude.every(x => !minimatch(sourceFile.fileName, x))) {
return this.applyWithWalker(new NoSyncRuleWalker(sourceFile, this.getOptions()));
}
return [];
}
}
class NoSyncRuleWalker extends Lint.RuleWalker {
private static readonly operations = ['readFileSync', 'writeFileSync', 'existsSync', 'fchmodSync', 'lchmodSync',
'statSync', 'fstatSync', 'lstatSync', 'linkSync', 'symlinkSync', 'readlinkSync', 'realpathSync', 'unlinkSync', 'rmdirSync',
'mkdirSync', 'mkdtempSync', 'readdirSync', 'openSync', 'utimesSync', 'futimesSync', 'fsyncSync', 'writeSync', 'readSync',
'appendFileSync', 'accessSync', 'fdatasyncSync', 'copyFileSync'];
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
}
visitCallExpression(node: ts.CallExpression) {
if (node.expression && NoSyncRuleWalker.operations.some(x => node.expression.getText().indexOf(x) >= 0)) {
this.addFailureAtNode(node, `Do not use Sync operations`);
}
super.visitCallExpression(node);
}
}

View File

@@ -1,49 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript");
const Lint = require("tslint");
/**
* Implementation of the no-localize-keys-with-underscore rule which verifies that keys to the localize
* calls don't contain underscores (_) since those break the localization process.
*/
class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile) {
return this.applyWithWalker(new NoLocalizeKeysWithUnderscore(sourceFile, this.getOptions()));
}
}
exports.Rule = Rule;
const signatures = [
"localize",
"nls.localize"
];
class NoLocalizeKeysWithUnderscore extends Lint.RuleWalker {
constructor(file, opts) {
super(file, opts);
}
visitCallExpression(node) {
this.checkCallExpression(node);
super.visitCallExpression(node);
}
checkCallExpression(node) {
// If this isn't one of the localize functions then continue on
const functionName = node.expression.getText();
if (functionName && !signatures.some(s => s === functionName)) {
return;
}
const arg = node && node.arguments && node.arguments.length > 0 ? node.arguments[0] : undefined; // The key is the first element
// Ignore if the arg isn't a string - we expect the compiler to warn if that's an issue
if (arg && ts.isStringLiteral(arg)) {
if (arg.getText().indexOf('_') >= 0) {
const fix = [
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `${arg.getText().replace(/_/g, '.')}`),
];
this.addFailure(this.createFailure(arg.getStart(), arg.getWidth(), `Keys for localize calls must not contain underscores. Use periods (.) instead.`, fix));
return;
}
}
}
}

View File

@@ -1,55 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
/**
* Implementation of the no-localize-keys-with-underscore rule which verifies that keys to the localize
* calls don't contain underscores (_) since those break the localization process.
*/
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
return this.applyWithWalker(new NoLocalizeKeysWithUnderscore(sourceFile, this.getOptions()));
}
}
const signatures = [
"localize",
"nls.localize"
];
class NoLocalizeKeysWithUnderscore extends Lint.RuleWalker {
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
}
protected visitCallExpression(node: ts.CallExpression): void {
this.checkCallExpression(node);
super.visitCallExpression(node);
}
private checkCallExpression(node: ts.CallExpression): void {
// If this isn't one of the localize functions then continue on
const functionName = node.expression.getText();
if (functionName && !signatures.some(s => s === functionName)) {
return;
}
const arg = node && node.arguments && node.arguments.length > 0 ? node.arguments[0] : undefined; // The key is the first element
// Ignore if the arg isn't a string - we expect the compiler to warn if that's an issue
if(arg && ts.isStringLiteral(arg)) {
if (arg.getText().indexOf('_') >= 0) {
const fix = [
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `${arg.getText().replace(/_/g, '.')}`),
];
this.addFailure(this.createFailure(
arg.getStart(), arg.getWidth(),
`Keys for localize calls must not contain underscores. Use periods (.) instead.`, fix));
return;
}
}
}
}

View File

@@ -11,9 +11,6 @@ const Lint = require("tslint");
*/ */
class Rule extends Lint.Rules.AbstractRule { class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile) { apply(sourceFile) {
if (/\.d.ts$/.test(sourceFile.fileName)) {
return [];
}
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions())); return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
} }
} }

View File

@@ -11,9 +11,6 @@ import * as Lint from 'tslint';
*/ */
export class Rule extends Lint.Rules.AbstractRule { export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] { public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
if (/\.d.ts$/.test(sourceFile.fileName)) {
return [];
}
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions())); return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
} }
} }

View File

@@ -1,28 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile, program) {
if (program.getCompilerOptions().alwaysStrict) {
return this.applyWithWalker(new NoUselessStrictRuleWalker(sourceFile, this.getOptions()));
}
return [];
}
}
exports.Rule = Rule;
class NoUselessStrictRuleWalker extends Lint.RuleWalker {
visitStringLiteral(node) {
this.checkStringLiteral(node);
super.visitStringLiteral(node);
}
checkStringLiteral(node) {
const text = node.getText();
if (text === '\'use strict\'' || text === '"use strict"') {
this.addFailureAtNode(node, 'use strict directive is unnecessary');
}
}
}

View File

@@ -1,30 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as Lint from 'tslint';
import * as ts from 'typescript';
export class Rule extends Lint.Rules.TypedRule {
public applyWithProgram(sourceFile: ts.SourceFile, program: ts.Program): Lint.RuleFailure[] {
if (program.getCompilerOptions().alwaysStrict) {
return this.applyWithWalker(new NoUselessStrictRuleWalker(sourceFile, this.getOptions()));
}
return [];
}
}
class NoUselessStrictRuleWalker extends Lint.RuleWalker {
protected visitStringLiteral(node: ts.StringLiteral): void {
this.checkStringLiteral(node);
super.visitStringLiteral(node);
}
private checkStringLiteral(node: ts.StringLiteral): void {
const text = node.getText();
if (text === '\'use strict\'' || text === '"use strict"') {
this.addFailureAtNode(node, 'use strict directive is unnecessary');
}
}
}

View File

@@ -7,12 +7,10 @@ declare module "gulp-tsb" {
} }
export interface IncrementalCompiler { export interface IncrementalCompiler {
(token?: ICancellationToken): NodeJS.ReadWriteStream; (token?:ICancellationToken): NodeJS.ReadWriteStream;
src(opts?: { // program?: ts.Program;
cwd?: string;
base?: string;
}): NodeJS.ReadStream;
} }
export function create(projectPath: string, existingOptions: any, verbose?: boolean, onError?: (message: any) => void): IncrementalCompiler;
} export function create(configOrName: { [option: string]: string | number | boolean; } | string, verbose?: boolean, json?: boolean, onError?: (message: any) => void): IncrementalCompiler;
}

View File

@@ -121,7 +121,7 @@ function loadSourcemaps() {
return; return;
} }
if (!f.contents) { if (!f.contents) {
cb(undefined, f); cb(new Error('empty file'));
return; return;
} }
const contents = f.contents.toString('utf8'); const contents = f.contents.toString('utf8');
@@ -166,23 +166,20 @@ function stripSourceMappingURL() {
} }
exports.stripSourceMappingURL = stripSourceMappingURL; exports.stripSourceMappingURL = stripSourceMappingURL;
function rimraf(dir) { function rimraf(dir) {
const result = () => new Promise((c, e) => { let retries = 0;
let retries = 0; const retry = (cb) => {
const retry = () => { _rimraf(dir, { maxBusyTries: 1 }, (err) => {
_rimraf(dir, { maxBusyTries: 1 }, (err) => { if (!err) {
if (!err) { return cb();
return c(); }
} if (err.code === 'ENOTEMPTY' && ++retries < 5) {
if (err.code === 'ENOTEMPTY' && ++retries < 5) { return setTimeout(() => retry(cb), 10);
return setTimeout(() => retry(), 10); }
} return cb(err);
return e(err); });
}); };
}; retry.taskName = `clean-${path.basename(dir).toLowerCase()}`;
retry(); return retry;
});
result.taskName = `clean-${path.basename(dir).toLowerCase()}`;
return result;
} }
exports.rimraf = rimraf; exports.rimraf = rimraf;
function getVersion(root) { function getVersion(root) {
@@ -222,10 +219,3 @@ function versionStringToNumber(versionStr) {
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10); return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
} }
exports.versionStringToNumber = versionStringToNumber; exports.versionStringToNumber = versionStringToNumber;
function streamToPromise(stream) {
return new Promise((c, e) => {
stream.on('error', err => e(err));
stream.on('end', () => c());
});
}
exports.streamToPromise = streamToPromise;

View File

@@ -165,7 +165,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream {
} }
if (!f.contents) { if (!f.contents) {
cb(undefined, f); cb(new Error('empty file'));
return; return;
} }
@@ -218,29 +218,24 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
return es.duplex(input, output); return es.duplex(input, output);
} }
export function rimraf(dir: string): () => Promise<void> { export function rimraf(dir: string): (cb: any) => void {
const result = () => new Promise<void>((c, e) => { let retries = 0;
let retries = 0;
const retry = () => { const retry = (cb: (err?: any) => void) => {
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => { _rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
if (!err) { if (!err) {
return c(); return cb();
} }
if (err.code === 'ENOTEMPTY' && ++retries < 5) { if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(), 10); return setTimeout(() => retry(cb), 10);
} }
return e(err); return cb(err);
}); });
}; };
retry.taskName = `clean-${path.basename(dir).toLowerCase()}`;
retry(); return retry;
});
result.taskName = `clean-${path.basename(dir).toLowerCase()}`;
return result;
} }
export function getVersion(root: string): string | undefined { export function getVersion(root: string): string | undefined {
@@ -286,10 +281,3 @@ export function versionStringToNumber(versionStr: string) {
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10); return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
} }
export function streamToPromise(stream: NodeJS.ReadWriteStream): Promise<void> {
return new Promise((c, e) => {
stream.on('error', err => e(err));
stream.on('end', () => c());
});
}

View File

@@ -5,6 +5,17 @@
const es = require('event-stream'); const es = require('event-stream');
/** Ugly hack for gulp-tsb */
function handleDeletions() {
return es.mapSync(f => {
if (/\.ts$/.test(f.relative) && !f.contents) {
f.contents = Buffer.from('');
f.stat = { mtime: new Date() };
}
return f;
});
}
let watch = undefined; let watch = undefined;
@@ -13,5 +24,6 @@ if (!watch) {
} }
module.exports = function () { module.exports = function () {
return watch.apply(null, arguments); return watch.apply(null, arguments)
.pipe(handleDeletions());
}; };

View File

@@ -148,9 +148,8 @@ function getMassagedTopLevelDeclarationText(sourceFile, declaration, importName,
} }
}); });
} }
result = result.replace(/export default /g, 'export '); result = result.replace(/export default/g, 'export');
result = result.replace(/export declare /g, 'export '); result = result.replace(/export declare/g, 'export');
result = result.replace(/declare /g, '');
if (declaration.kind === ts.SyntaxKind.EnumDeclaration) { if (declaration.kind === ts.SyntaxKind.EnumDeclaration) {
result = result.replace(/const enum/, 'enum'); result = result.replace(/const enum/, 'enum');
enums.push(result); enums.push(result);
@@ -516,7 +515,7 @@ class DeclarationResolver {
'file.ts': fileContents 'file.ts': fileContents
}; };
const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {})); const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {}));
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text; const text = service.getEmitOutput('file.ts', true).outputFiles[0].text;
return new CacheEntry(ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5), mtime); return new CacheEntry(ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5), mtime);
} }
} }

View File

@@ -178,9 +178,8 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
} }
}); });
} }
result = result.replace(/export default /g, 'export '); result = result.replace(/export default/g, 'export');
result = result.replace(/export declare /g, 'export '); result = result.replace(/export declare/g, 'export');
result = result.replace(/declare /g, '');
if (declaration.kind === ts.SyntaxKind.EnumDeclaration) { if (declaration.kind === ts.SyntaxKind.EnumDeclaration) {
result = result.replace(/const enum/, 'enum'); result = result.replace(/const enum/, 'enum');
@@ -617,7 +616,7 @@ export class DeclarationResolver {
'file.ts': fileContents 'file.ts': fileContents
}; };
const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {})); const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {}));
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text; const text = service.getEmitOutput('file.ts', true).outputFiles[0].text;
return new CacheEntry( return new CacheEntry(
ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5), ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5),
mtime mtime

View File

@@ -48,7 +48,7 @@ declare namespace monaco.editor {
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors #include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
#include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule #include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule
#include(vs/editor/common/services/webWorker): MonacoWebWorker, IWebWorkerOptions #include(vs/editor/common/services/webWorker): MonacoWebWorker, IWebWorkerOptions
#include(vs/editor/standalone/browser/standaloneCodeEditor): IActionDescriptor, IStandaloneEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor #include(vs/editor/standalone/browser/standaloneCodeEditor): IActionDescriptor, IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
export interface ICommandHandler { export interface ICommandHandler {
(...args: any[]): void; (...args: any[]): void;
} }
@@ -62,7 +62,6 @@ export interface ICommandHandler {
#includeAll(vs/editor/common/editorCommon;editorOptions.=>): IScrollEvent #includeAll(vs/editor/common/editorCommon;editorOptions.=>): IScrollEvent
#includeAll(vs/editor/common/model/textModelEvents): #includeAll(vs/editor/common/model/textModelEvents):
#includeAll(vs/editor/common/controller/cursorEvents): #includeAll(vs/editor/common/controller/cursorEvents):
#include(vs/platform/accessibility/common/accessibility): AccessibilitySupport
#includeAll(vs/editor/common/config/editorOptions): #includeAll(vs/editor/common/config/editorOptions):
#includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>): #includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>):
#include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo #include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo

View File

@@ -1,7 +1,7 @@
{ {
"name": "monaco-editor-core", "name": "monaco-editor-core",
"private": true, "private": true,
"version": "0.18.0", "version": "0.16.0",
"description": "A browser based code editor", "description": "A browser based code editor",
"author": "Microsoft Corporation", "author": "Microsoft Corporation",
"license": "MIT", "license": "MIT",

View File

@@ -70,7 +70,6 @@ runtime "${runtime}"`;
} }
yarnInstall(`build`); // node modules required for build yarnInstall(`build`); // node modules required for build
yarnInstall('test/automation'); // node modules required for smoketest
yarnInstall('test/smoke'); // node modules required for smoketest yarnInstall('test/smoke'); // node modules required for smoketest
yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron
@@ -79,42 +78,4 @@ const processTreeDts = path.join('node_modules', 'windows-process-tree', 'typing
if (fs.existsSync(processTreeDts)) { if (fs.existsSync(processTreeDts)) {
console.log('Removing windows-process-tree.d.ts'); console.log('Removing windows-process-tree.d.ts');
fs.unlinkSync(processTreeDts); fs.unlinkSync(processTreeDts);
} }
function getInstalledVersion(packageName, cwd) {
const opts = {};
if (cwd) {
opts.cwd = cwd;
}
const result = cp.spawnSync(yarn, ['list', '--pattern', packageName], opts);
const stdout = result.stdout.toString();
const match = stdout.match(new RegExp(packageName + '@(\\S+)'));
if (!match || !match[1]) {
throw new Error('Unexpected output from yarn list: ' + stdout);
}
return match[1];
}
function assertSameVersionsBetweenFolders(packageName, otherFolder) {
const baseVersion = getInstalledVersion(packageName);
const otherVersion = getInstalledVersion(packageName, otherFolder);
if (baseVersion !== otherVersion) {
throw new Error(`Mismatched versions installed for ${packageName}: root has ${baseVersion}, ./${otherFolder} has ${otherVersion}. These should be the same!`);
}
}
// Check that modules in both the base package.json and remote/ have the same version installed
const requireSameVersionsInRemote = [
'xterm',
'xterm-addon-search',
'xterm-addon-web-links',
'node-pty',
'vscode-ripgrep'
];
requireSameVersionsInRemote.forEach(packageName => {
assertSameVersionsBetweenFolders(packageName, 'remote');
});

View File

@@ -31,4 +31,4 @@ if (!/yarn\.js$|yarnpkg$/.test(process.env['npm_execpath'])) {
if (err) { if (err) {
console.error(''); console.error('');
process.exit(1); process.exit(1);
} }

View File

@@ -132,7 +132,7 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
if (packageJsonPathOverride) { if (packageJsonPathOverride) {
packageJsonPath += packageJsonPathOverride; packageJsonPath += packageJsonPathOverride;
} }
packageJsonPath += 'package.json'; packageJsonPath += '/package.json';
for (let i = 0; i < cgmanifestRead.registrations.length; i++) { for (let i = 0; i < cgmanifestRead.registrations.length; i++) {
if (cgmanifestRead.registrations[i].component.git.repositoryUrl.substr(cgmanifestRead.registrations[i].component.git.repositoryUrl.length - repoId.length, repoId.length) === repoId) { if (cgmanifestRead.registrations[i].component.git.repositoryUrl.substr(cgmanifestRead.registrations[i].component.git.repositoryUrl.length - repoId.length, repoId.length) === repoId) {
cgmanifestRead.registrations[i].component.git.commitHash = info.commitSha; cgmanifestRead.registrations[i].component.git.commitHash = info.commitSha;

View File

@@ -24,9 +24,9 @@
"@types/pump": "^1.0.1", "@types/pump": "^1.0.1",
"@types/request": "^2.47.0", "@types/request": "^2.47.0",
"@types/rimraf": "^2.0.2", "@types/rimraf": "^2.0.2",
"@types/terser": "^3.12.0",
"@types/through": "^0.0.29", "@types/through": "^0.0.29",
"@types/through2": "^2.0.34", "@types/through2": "^2.0.34",
"@types/uglify-es": "^3.0.0",
"@types/underscore": "^1.8.9", "@types/underscore": "^1.8.9",
"@types/xml2js": "0.0.33", "@types/xml2js": "0.0.33",
"applicationinsights": "1.0.8", "applicationinsights": "1.0.8",
@@ -36,20 +36,15 @@
"github-releases": "^0.4.1", "github-releases": "^0.4.1",
"gulp-bom": "^1.0.0", "gulp-bom": "^1.0.0",
"gulp-sourcemaps": "^1.11.0", "gulp-sourcemaps": "^1.11.0",
"gulp-uglify": "^3.0.0",
"iconv-lite": "0.4.23", "iconv-lite": "0.4.23",
"mime": "^1.3.4", "mime": "^1.3.4",
"minimist": "^1.2.0", "minimist": "^1.2.0",
"request": "^2.85.0", "request": "^2.85.0",
"rollup": "^1.20.3",
"rollup-plugin-commonjs": "^10.1.0",
"rollup-plugin-node-resolve": "^5.2.0",
"terser": "4.3.8",
"tslint": "^5.9.1", "tslint": "^5.9.1",
"service-downloader": "github:anthonydresser/service-downloader#0.1.7", "service-downloader": "github:anthonydresser/service-downloader#0.1.5",
"typescript": "3.7.0-dev.20191017", "typescript": "3.5.2",
"vsce": "1.48.0", "vsce": "1.48.0",
"vscode-telemetry-extractor": "^1.5.4", "vscode-telemetry-extractor": "1.5.3",
"xml2js": "^0.4.17" "xml2js": "^0.4.17"
}, },
"scripts": { "scripts": {

View File

@@ -1,57 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const fs = require('fs').promises;
const path = require('path');
const readConfiguration = (async () => {
const parseConfigString = ((content) => {
try {
const result = JSON.parse(content);
return result;
}
catch (ex) {
console.log('Could NOT parse TEST_RUN_LIST:', content);
}
});
// Attempt to read from an enviornment variable
const testRunlist = process.env['TEST_RUN_LIST'];
if (testRunlist && testRunlist !== '') {
const result = parseConfigString(testRunlist);
if (result) {
console.log('Using the environment test run list:', result);
return result;
}
}
// Attempt to read from a config file
let testRunPath = process.env['TEST_RUN_LIST_FILE'];
if (!testRunPath || testRunPath === '') {
testRunPath = path.resolve(__dirname, '..', 'runlist.json');
}
try {
const contents = await fs.readFile(testRunPath);
return parseConfigString(contents);
}
catch (ex) {
console.log(`error reading file ${testRunPath}:`, ex);
}
});
(async () => {
const keys = process.argv.slice(2);
const configuration = await readConfiguration();
if (!configuration) {
console.log('no configuration was setup');
return;
}
const testList = [];
keys.forEach((key) => {
const arr = configuration[key];
if (arr) {
testList.push(...arr);
}
});
const result = `(${testList.join('|')})`;
console.log(result);
process.env['TEST_GREP'] = result;
})();

View File

@@ -1,67 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const fs = require('fs').promises;
const path = require('path');
interface IntegrationTestConfig {
[key: string]: string[];
}
const readConfiguration = (async (): Promise<IntegrationTestConfig | void> => {
const parseConfigString = ((content: string): (IntegrationTestConfig | void) => {
try {
const result = JSON.parse(content);
return result as IntegrationTestConfig;
} catch (ex) {
console.log('Could NOT parse TEST_RUN_LIST:', content);
}
});
// Attempt to read from an enviornment variable
const testRunlist = process.env['TEST_RUN_LIST'];
if (testRunlist && testRunlist !== '') {
const result = parseConfigString(testRunlist);
if (result) {
console.log('Using the environment test run list:', result);
return result;
}
}
// Attempt to read from a config file
let testRunPath = process.env['TEST_RUN_LIST_FILE'];
if (!testRunPath || testRunPath === '') {
testRunPath = path.resolve(__dirname, '..', 'runlist.json');
}
try {
const contents = await fs.readFile(testRunPath);
return parseConfigString(contents);
} catch (ex) {
console.log(`error reading file ${testRunPath}:`, ex);
}
});
(async (): Promise<string | void> => {
const keys = process.argv.slice(2);
const configuration = await readConfiguration();
if (!configuration) {
console.log('no configuration was setup');
return;
}
const testList: string[] = [];
keys.forEach((key) => {
const arr = configuration[key];
if (arr) {
testList.push(...arr);
}
});
const result = `(${testList.join('|')})`;
console.log(result);
process.env['TEST_GREP'] = result;
})();

View File

@@ -29,7 +29,7 @@ dependencies = [
[[package]] [[package]]
name = "inno_updater" name = "inno_updater"
version = "0.8.2" version = "0.8.0"
dependencies = [ dependencies = [
"byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crc 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "crc 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@@ -20,8 +20,8 @@ Compression=lzma
SolidCompression=yes SolidCompression=yes
AppMutex={code:GetAppMutex} AppMutex={code:GetAppMutex}
SetupMutex={#AppMutex}setup SetupMutex={#AppMutex}setup
WizardImageFile="{#RepoDir}\resources\win32\inno-big-100.bmp,{#RepoDir}\resources\win32\inno-big-125.bmp,{#RepoDir}\resources\win32\inno-big-150.bmp,{#RepoDir}\resources\win32\inno-big-175.bmp,{#RepoDir}\resources\win32\inno-big-200.bmp,{#RepoDir}\resources\win32\inno-big-225.bmp,{#RepoDir}\resources\win32\inno-big-250.bmp" WizardImageFile={#RepoDir}\resources\win32\inno-big.bmp
WizardSmallImageFile="{#RepoDir}\resources\win32\inno-small-100.bmp,{#RepoDir}\resources\win32\inno-small-125.bmp,{#RepoDir}\resources\win32\inno-small-150.bmp,{#RepoDir}\resources\win32\inno-small-175.bmp,{#RepoDir}\resources\win32\inno-small-200.bmp,{#RepoDir}\resources\win32\inno-small-225.bmp,{#RepoDir}\resources\win32\inno-small-250.bmp" WizardSmallImageFile={#RepoDir}\resources\win32\inno-small.bmp
SetupIconFile={#RepoDir}\resources\win32\code.ico SetupIconFile={#RepoDir}\resources\win32\code.ico
UninstallDisplayIcon={app}\{#ExeBasename}.exe UninstallDisplayIcon={app}\{#ExeBasename}.exe
ChangesEnvironment=true ChangesEnvironment=true
@@ -100,22 +100,12 @@ Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}Source
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}SourceFile\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1""" Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}SourceFile\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""
Root: HKCU; Subkey: "Environment"; ValueType: expandsz; ValueName: "Path"; ValueData: "{olddata};{app}\bin"; Tasks: addtopath; Check: NeedsAddPath(ExpandConstant('{app}\bin')) Root: HKCU; Subkey: "Environment"; ValueType: expandsz; ValueName: "Path"; ValueData: "{olddata};{app}\bin"; Tasks: addtopath; Check: NeedsAddPath(ExpandConstant('{app}\bin'))
; .sql
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\.sql\OpenWithProgids"; ValueType: none; ValueName: "{#RegValueName}"; Flags: deletevalue uninsdeletevalue; Tasks: associatewithfiles Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\.sql\OpenWithProgids"; ValueType: none; ValueName: "{#RegValueName}"; Flags: deletevalue uninsdeletevalue; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\.sql\OpenWithProgids"; ValueType: string; ValueName: "{#RegValueName}.sql"; ValueData: ""; Flags: uninsdeletevalue; Tasks: associatewithfiles Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\.sql\OpenWithProgids"; ValueType: string; ValueName: "{#RegValueName}.sql"; ValueData: ""; Flags: uninsdeletevalue; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; ValueName: ""; ValueData: "{cm:SourceFile,SQL}"; Flags: uninsdeletekey; Tasks: associatewithfiles Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; ValueName: ""; ValueData: "{cm:SourceFile,SQL}"; Flags: uninsdeletekey; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; ValueName: "AppUserModelID"; ValueData: "{#AppUserId}"; Flags: uninsdeletekey; Tasks: associatewithfiles Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; ValueName: "AppUserModelID"; ValueData: "{#AppUserId}"; Flags: uninsdeletekey; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql\DefaultIcon"; ValueType: string; ValueName: ""; ValueData: "{app}\resources\app\resources\win32\sql.ico"; Tasks: associatewithfiles Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql\DefaultIcon"; ValueType: string; ValueName: ""; ValueData: "{app}\resources\app\resources\win32\sql.ico"; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""; Tasks: associatewithfiles Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.sql\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""; Tasks: associatewithfiles
; .ipynb
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\.ipynb\OpenWithProgids"; ValueType: none; ValueName: "{#RegValueName}"; Flags: deletevalue uninsdeletevalue; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\.ipynb\OpenWithProgids"; ValueType: string; ValueName: "{#RegValueName}.ipynb"; ValueData: ""; Flags: uninsdeletevalue; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.ipynb"; ValueType: string; ValueName: ""; ValueData: "{cm:SourceFile,IPYNB}"; Flags: uninsdeletekey; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.ipynb"; ValueType: string; ValueName: "AppUserModelID"; ValueData: "{#AppUserId}"; Flags: uninsdeletekey; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.ipynb\DefaultIcon"; ValueType: string; ValueName: ""; ValueData: "{app}\resources\app\resources\win32\ipynb.ico"; Tasks: associatewithfiles
Root: {#SoftwareClassesRootKey}; Subkey: "Software\Classes\{#RegValueName}.ipynb\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""; Tasks: associatewithfiles
; Environment ; Environment
#if "user" == InstallTarget #if "user" == InstallTarget
#define EnvironmentRootKey "HKCU" #define EnvironmentRootKey "HKCU"

Some files were not shown because too many files have changed in this diff Show More