Compare commits

..

39 Commits

Author SHA1 Message Date
Charles Gagnon
f0b6180e15 Fix (#7434)
* Fix query history icons

* Capitalize description text
2019-10-01 10:07:27 -07:00
Karl Burtram
cd4660abaa Bump Import extension to 0.11.0 for Oct 2019-09-30 12:43:12 -07:00
Karl Burtram
8320686170 Bump agent extension to 0.43.0 for Oct 2019-09-30 12:42:12 -07:00
Charles Gagnon
0a6779f96e Add toggle query history capture command/action (#7427)
* Add toggle query history capture command/action

* Add extension updates
2019-09-30 12:03:15 -07:00
Charles Gagnon
3e17618056 Add Clear All Query History command/action (#7408)
* Add clear all query history action/command

* Fix display issue when clearing

* Change localize ID and fix registration
2019-09-30 12:01:53 -07:00
Charles Gagnon
7d27d2f4f4 Fix endpoint text overflowing and add title tooltip (#7390)
* Fix endpoint text overflowing and add title tooltip

* Remove unneeded string interning
2019-09-27 12:43:53 -07:00
Chris LaFreniere
30ae023d73 Stop clearing out connecting and connected state when cancelling out of connection dialog (#7254)
* Stop clearing out connecting and connected state

* tweaks

* Handling cancel while connecting

* fix typo

* PR comments
2019-09-27 12:36:02 -07:00
Chris LaFreniere
c36596d3b3 Notebook Tokenization Fixes (#7375)
* Fix don't like; unclear if grammar necessssary too

* Cleanup and sanity check

* Cleanup and sanity check

* Add test

* Call onBeforeAttached for 3 types of editor models
2019-09-27 12:35:53 -07:00
Anthony Dresser
afbe004c4f add sql folding (#7270) 2019-09-27 12:35:39 -07:00
Chris LaFreniere
539f2b2b2e Add Default File Type when Saving Chart (#7235)
* add file filter

* Code cleanup
2019-09-27 12:35:28 -07:00
Charles Gagnon
43e360165f Add enable logs setting for Flat File Import (#7342)
* Add config for enabling Flat File Import logging

* Move logs to default log location for extensions

* Add localized strings
2019-09-27 12:35:20 -07:00
Amir Omidi
c2727264ad EoL chars (#7225) 2019-09-27 12:35:11 -07:00
Karl Burtram
3c17ac1333 Activate XML features when SQL loads (#7228) 2019-09-27 12:35:03 -07:00
Charles Gagnon
218dad1b17 Update whoIsActive extension to use azdata (#7287)
* Update whoIsActive extension to use azdata

* Change path

* Update package-lock
2019-09-27 12:34:54 -07:00
jamesrod817
2ff4b51fe6 Tempdb (#7022)
* Server changes by James

* tempdb
2019-09-27 12:34:34 -07:00
Amir Omidi
423ad40210 Better cell selection (#6914)
* Better cell selection

* Explicit return type and undefined assignment

* More complex copy/paste

* Get TS to be less mad at me

* Remove EoL

* Fail safe if statement

* strict null check
2019-09-27 12:34:15 -07:00
Charles Gagnon
788103b2d3 Update query history README (#7164)
* Update query history README

* Fix typos
2019-09-27 12:34:07 -07:00
Charles Gagnon
c4ce709dfb Query History feature (#6579)
* Initial commit

* Fix up QueryEventType

* Making query history visible in view and open query command (#6479)

* Add QueryInfo to query event events

* Pull actual query text/connection info for displaying

* cons and expand (#6489)

* Making query history visible in view and open query command

* expand and icons

* Failure icon enabled (#6491)

* Making query history visible in view and open query command

* expand and icons

* failure icon enabled

* Minor cleanup

* Open query with connection and add run query (#6496)

* Add initial query-history extension

* Fix issues caused by master merge, cleanup and add query-history extension (#6567)

* Open query with connection and add run query

* Fix issues caused by latest master merges, cleanup and add query-history extension

* Remove child nodes (#6568)

* Open query with connection and add run query

* Fix issues caused by latest master merges, cleanup and add query-history extension

* Remove child node expansion

* Layering movement and add delete action (#6574)

* Open query with connection and add run query

* Fix issues caused by latest master merges, cleanup and add query-history extension

* Remove child node expansion

* Some layering movement and add delete action

* Move query tracking into service (#6578)

* Open query with connection and add run query

* Fix issues caused by latest master merges, cleanup and add query-history extension

* Remove child node expansion

* Some layering movement and add delete action

* Move query history tracking into service

* Add comment

* Fix actions

* Remove unnecessary type

* cleanup

* Remove unused section of README

* Fix merge issues and address PR comments

* Fix compile and tslint errors

* Change startup function name
2019-09-27 12:33:57 -07:00
Karl Burtram
e875e4a271 Bump to SQL Tools 2.0.0.17 for Notebook Agent fixes 2019-09-27 12:32:18 -07:00
Aasim Khan
ba99be6ec1 Added opening latest notebook run to context menu from notebooks pane (#7066)
* added agent notebooks, notebook history view and view materialized notebook button

* Got a basic UI running for viewing notebook history

* made some changes to make UI look good

* Added new notebook dialog

* Added new notebook Dialog

* Added create notebook dialog

* Added edit and delete notebook job

* Added some notebook history features

* Added new notebook job icons, fixed a minor bug
in openmaterializednotebookAPI and added fixed the
schedule Picker API.

* Fixed Bugs in Notebook Grid expansion

* Fixed Notebook table highlighting and
grid generation is done using code.

* fixed some UI bugs

* Added changes to reflect sqltoolservice api

* Fixed some localize keys

* Made changes in the PR and added
ability to open Template Notebooks from
notebook history view.

* Added pin and renaming to notebook history

* made some library calls async

* fixed an import bug caused by merging from master

* Validation in NotebookJobDialog

* Added entry points for scheduling notebooks
on file explorer and notebook editor

* Handled no active connections and
a small bug in collapsing grid

* fix a bug in scheduling notebook from explorer
and toolbar

* setting up agent providers from connection now

* changed modals

* Reupload edited template

* Add dialog info, solved an edit bug and localized
UI strings.

* Bug fixes in UI, notebook renaming and
editing template on fly.

* fixed a bug that failed editing notebook jobs from notebook jobs table

* Fixed a cyclic dependency, made strings const and
some other changes in the PR

* Made some cyclic dependency and some fixes from PR

* made some changes mentioned in the PR

* Changed storage database health text

* Changed the sqltoolservice version to the point to the latest build.

* Added open Latest notebook notebook run to notebooks view context menu

* Fixed a small compilation error

* fixed a spelling mistake in function name

* made changes mentioned in the PR added open Notebook Functionality to charts

* Changed some context menues strings and order

* made some changes from the PR and fixed an API call

* made some changes mentioned in the PR

* Changed sqltoolsservice version to point to the latest build
2019-09-27 11:28:19 -07:00
Karl Burtram
146aa48c51 Update SQL Tools to 2.0.0-release.16 for IntelliSense fix 2019-09-25 12:56:43 -07:00
Karl Burtram
60f82785c2 Bump package.json for October release 2019-09-11 15:33:55 -07:00
Aditya Bist
460c739a8d fix issue where sometimes ownerUri was null (#7094) 2019-09-05 15:57:46 -07:00
Cory Rivera
ba4c98cb0a Update CSS for inline notebook cell buttons (#7033)
(cherry picked from commit c94291af52)
2019-09-05 14:00:08 -07:00
Aditya Bist
f09241d4bf fix disconnect option for extension nodes (#7085) 2019-09-05 13:51:35 -07:00
Charles Gagnon
ca4a8bf485 Don't add unnecessary separator to OE action menu (#7071)
* Don't add unnecessary separator to OE action menu

* Fix another check
2019-09-05 09:27:25 -07:00
Maddy
965d418766 make books viewlet only available in insiders (#7055)
* make books viewlet only available in insiders

* insiders only: Books widget on dashboard
2019-09-05 09:24:03 -07:00
Karl Burtram
11b6b6f3a7 Fix agent extension version back to 0.42.0 (#7068) 2019-09-04 17:19:55 -07:00
Aditya Bist
667051c57a fix cms server deletion from memento (#7062) 2019-09-04 15:47:16 -07:00
Amir Omidi
aa0f27bf7f Remove the eol character at the end of string (#7056) 2019-09-04 15:34:34 -07:00
Aditya Bist
071585f17e removed separator from context menu (#7059)
* removed separator from context menu

* remove unused import
2019-09-04 15:34:23 -07:00
Udeesha Gautam
b5d4cead84 Trimming the text for SQLCMD button (#7050) 2019-09-04 15:26:50 -07:00
Amir Omidi
770e38e53f Installer icons (#7057) 2019-09-04 15:18:31 -07:00
Aasim Khan
73364777b5 Agent Notebooks Scheduler (#6786)
* added agent notebooks, notebook history view and view materialized notebook button

* Got a basic UI running for viewing notebook history

* made some changes to make UI look good

* Added new notebook dialog

* Added new notebook Dialog

* Added create notebook dialog

* Added edit and delete notebook job

* Added some notebook history features

* Added new notebook job icons, fixed a minor bug
in openmaterializednotebookAPI and added fixed the
schedule Picker API.

* Fixed Bugs in Notebook Grid expansion

* Fixed Notebook table highlighting and
grid generation is done using code.

* fixed some UI bugs

* Added changes to reflect sqltoolservice api

* Fixed some localize keys

* Made changes in the PR and added
ability to open Template Notebooks from
notebook history view.

* Added pin and renaming to notebook history

* made some library calls async

* fixed an import bug caused by merging from master

* Validation in NotebookJobDialog

* Added entry points for scheduling notebooks
on file explorer and notebook editor

* Handled no active connections and
a small bug in collapsing grid

* fix a bug in scheduling notebook from explorer
and toolbar

* setting up agent providers from connection now

* changed modals

* Reupload edited template

* Add dialog info, solved an edit bug and localized
UI strings.

* Bug fixes in UI, notebook renaming and
editing template on fly.

* fixed a bug that failed editing notebook jobs from notebook jobs table

* Fixed a cyclic dependency, made strings const and
some other changes in the PR

* Made some cyclic dependency and some fixes from PR

* made some changes mentioned in the PR

* Changed storage database health text

* Changed the sqltoolservice version to the point to the latest build.
2019-09-04 15:14:51 -07:00
Charles Gagnon
1b16ef2961 Register loadCompletionExtension command (#6985) 2019-09-04 15:14:30 -07:00
DrewSK
83c5f92b19 fix(snippets): ads parenthesis to sqlcreateindex snippet (#7020) 2019-09-04 14:40:24 -07:00
Anthony Dresser
12c9d41c99 fix index for panel push (#7035) 2019-09-04 14:37:12 -07:00
Karl Burtram
8745f38320 Update extension versions for September (#7053) 2019-09-04 13:18:25 -07:00
Chris LaFreniere
acc85c2c23 Notebooks: Fix double-click to edit with source as array (#7027)
* Fix dbl click to  edit with source as array

* Fix equality check
2019-09-03 13:47:59 -07:00
3077 changed files with 134726 additions and 134970 deletions

View File

@@ -1,34 +1,49 @@
{
perform: true,
perform: false,
alwaysRequireAssignee: false,
labelsRequiringAssignee: [],
autoAssignees: {
Area - Acquisition: [],
Area - Azure: [],
Area - Backup\Restore: [],
Area - Charting\Insights: [],
Area - Connection: [],
Area - DacFX: [],
Area - Dashboard: [],
Area - Data Explorer: [],
Area - Edit Data: [],
Area - Extensibility: [],
Area - External Table: [],
Area - Fundamentals: [],
Area - Language Service: [],
Area - Localization: [],
Area - Notebooks: [],
Area - Performance: [],
Area - Query Editor: [ anthonydresser ],
Area - Query Plan: [],
Area - Reliability: [],
Area - Resource Deployment: [],
Area - Schema Compare: [],
Area - Shell: [],
Area - SQL Agent: [],
Area - SQL Import: [],
Area - SQL Profiler: [],
Area - SQL 2019: [],
Area - SSMS Integration: []
accessibility: [],
acquisition: [],
agent: [],
azure: [],
backup: [],
bcdr: [],
'chart viewer': [],
connection: [],
dacfx: [],
dashboard: [],
'data explorer': [],
documentation: [],
'edit data': [],
export: [],
extensibility: [],
extensionManager: [],
globalization: [],
grid: [],
import: [],
insights: [],
intellisense: [],
localization: [],
'managed instance': [],
notebooks: [],
'object explorer': [],
performance: [],
profiler: [],
'query editor': [],
'query execution': [],
reliability: [],
restore: [],
scripting: [],
'server group': [],
settings: [],
setup: [],
shell: [],
showplan: [],
snippet: [],
sql2019Preview: [],
sqldw: [],
supportability: [],
ux: []
}
}

View File

@@ -1,9 +0,0 @@
<!-- Thank you for submitting a Pull Request. Please:
* Read our Pull Request guidelines:
https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests.
* Associate an issue with the Pull Request.
* Ensure that the code is up-to-date with the `master` branch.
* Include a description of the proposed changes and how to test them.
-->
This PR fixes #

View File

@@ -1,118 +0,0 @@
name: CI
on:
push:
branches:
- master
- release/*
pull_request:
branches:
- master
- release/*
jobs:
linux:
runs-on: ubuntu-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
- run: |
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
name: Setup Build Environment
- uses: actions/setup-node@v1
with:
node-version: 10
# TODO: cache node modules
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests
windows:
runs-on: windows-2016
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- uses: actions/setup-python@v1
with:
python-version: '2.x'
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: .\scripts\test.bat --tfs "Unit Tests"
name: Run Unit Tests
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests
darwin:
runs-on: macos-latest
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: 10
- run: yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene --skip-tslint
name: Run Hygiene Checks
- run: yarn gulp tslint
name: Run TSLint Checks
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
name: Run Strict Null Check
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests
# - run: ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests

View File

@@ -1,13 +0,0 @@
name: TSLint Enforcement
on: [pull_request]
jobs:
job:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@v1
- name: TSLint
uses: aaomidi/gh-action-tslint@master
with:
token: ${{ secrets.GITHUB_TOKEN }}
tslint_config: 'tslint-sql.json'

1
.gitignore vendored
View File

@@ -30,4 +30,3 @@ coverage/
test_data/
test-results/
yarn-error.log
*.vsix

View File

@@ -1,33 +0,0 @@
/**
* @name No floating promises
* @kind problem
* @problem.severity error
* @id js/experimental/floating-promise
*/
import javascript
private predicate isEscapingPromise(PromiseDefinition promise) {
exists (DataFlow::Node escape | promise.flowsTo(escape) |
escape = any(DataFlow::InvokeNode invk).getAnArgument()
or
escape = any(DataFlow::FunctionNode fun).getAReturn()
or
escape = any(ThrowStmt t).getExpr().flow()
or
escape = any(GlobalVariable v).getAnAssignedExpr().flow()
or
escape = any(DataFlow::PropWrite write).getRhs()
or
exists(WithStmt with, Assignment assign |
with.mayAffect(assign.getLhs()) and
assign.getRhs().flow() = escape
)
)
}
from PromiseDefinition promise
where
not exists(promise.getAMethodCall(any(string m | m = "then" or m = "catch" or m = "finally"))) and
not exists (AwaitExpr e | promise.flowsTo(e.getOperand().flow())) and
not isEscapingPromise(promise)
select promise, "This promise appears to be a floating promise"

View File

@@ -1,6 +0,0 @@
{
"useTabs": true,
"printWidth": 120,
"semi": true,
"singleQuote": true
}

75
.vscode/launch.json vendored
View File

@@ -16,7 +16,6 @@
"request": "attach",
"name": "Attach to Extension Host",
"port": 5870,
"timeout": 30000,
"restart": true,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
@@ -68,15 +67,15 @@
"name": "Launch azuredatastudio",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
"timeout": 45000
"timeout": 20000
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 45000
"timeout": 20000
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"timeout": 45000
"timeout": 20000
},
"env": {
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
@@ -128,33 +127,6 @@
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
{
"type": "chrome",
"request": "launch",
"name": "Launch ADS (Web) (TBD)",
"runtimeExecutable": "yarn",
"runtimeArgs": [
"web"
],
},
{
"type": "chrome",
"request": "launch",
"name": "Launch ADS (Web, Chrome) (TBD)",
"url": "http://localhost:8080",
"preLaunchTask": "Run web"
},
{
"type": "node",
"request": "launch",
"name": "Git Unit Tests",
"program": "${workspaceFolder}/extensions/git/node_modules/mocha/bin/_mocha",
"stopOnEntry": false,
"cwd": "${workspaceFolder}/extensions/git",
"outFiles": [
"${workspaceFolder}/extensions/git/out/**/*.js"
]
},
{
"name": "Launch Built-in Extension",
"type": "extensionHost",
@@ -193,10 +165,7 @@
"cwd": "${workspaceFolder}",
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"env": {
"MOCHA_COLORS": "true"
}
]
},
{
"type": "chrome",
@@ -214,22 +183,6 @@
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
{
"type": "chrome",
"request": "launch",
"name": "Run Extension Integration Tests",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
},
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
],
"compounds": [
{
@@ -246,13 +199,6 @@
"Run Extension Unit Tests"
]
},
{
"name": "Debug Extension Integration Tests",
"configurations": [
"Attach to Extension Host",
"Run Extension Integration Tests"
]
},
{
"name": "Debug azuredatastudio Main and Renderer",
"configurations": [
@@ -261,25 +207,18 @@
]
},
{
"name": "Debug Renderer and search processes",
"name": "Search and Renderer processes",
"configurations": [
"Launch azuredatastudio",
"Attach to Search Process"
]
},
{
"name": "Debug Renderer and Extension Host processes",
"name": "Renderer and Extension Host processes",
"configurations": [
"Launch azuredatastudio",
"Attach to Extension Host"
]
},
{
"name": "Attach Renderer and Extension Host",
"configurations": [
"Attach to azuredatastudio",
"Attach to Extension Host"
]
}
]
}
}

View File

@@ -60,6 +60,5 @@
"remote.extensionKind": {
"msjsdiag.debugger-for-chrome": "workspace"
},
"gulp.autoDetect": "off",
"files.insertFinalNewline": true
}
}

31
.vscode/tasks.json vendored
View File

@@ -5,10 +5,7 @@
"type": "npm",
"script": "watch",
"label": "Build VS Code",
"group": {
"kind": "build",
"isDefault": true
},
"group": "build",
"isBackground": true,
"presentation": {
"reveal": "never"
@@ -48,7 +45,7 @@
{
"type": "npm",
"script": "strict-null-check-watch",
"label": "TS - Strict Null Checks",
"label": "TS - Strict Null Cheks",
"isBackground": true,
"presentation": {
"reveal": "never"
@@ -90,8 +87,8 @@
"problemMatcher": []
},
{
"type": "npm",
"script": "electron",
"type": "gulp",
"task": "electron",
"label": "Download electron"
},
{
@@ -99,24 +96,6 @@
"task": "hygiene",
"problemMatcher": []
},
{
"type": "shell",
"command": "yarn web -- --no-launch",
"label": "Run web",
"isBackground": true,
// This section to make error go away when launching the debug config
"problemMatcher": {
"pattern": {
"regexp": ""
},
"background": {
"beginsPattern": ".*node .*",
"endsPattern": "Web UI available at .*"
}
},
"presentation": {
"reveal": "never"
}
},
]
}

View File

@@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron"
target "6.0.12"
target "4.2.9"
runtime "electron"

View File

@@ -1,33 +1,5 @@
# Change Log
## Version 1.12.2
* Release date: October 11, 2019
* Release status: General Availability
* Hotfix release (1.12.2): `Disable automatically starting the EH in inspect mode` https://github.com/microsoft/azuredatastudio/commit/c9bef82ace6c67190d0e83820011a2bbd1f793c1
## Version 1.12.1
* Release date: October 7, 2019
* Release status: General Availability
* Hotfix release: `Notebooks: Ensure quotes and backslashes are escaped properly in text editor model` https://github.com/microsoft/azuredatastudio/pull/7540
## Version 1.12.0
* Release date: October 2, 2019
* Release status: General Availability
## What's new in this version
* Announcing the Query History panel
* Improved Query Results Grid copy selection support
* TempDB page added to Server Reports extension
* PowerShell extension update
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/42?closed=1).
## Version 1.11.0
* Release date: September 10, 2019
* Release status: General Availability
## What's new in this version
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/41?closed=1).
## Version 1.10.0
* Release date: August 14, 2019
* Release status: General Availability
@@ -225,7 +197,7 @@ We would like to thank all our users who raised issues, and in particular the fo
## What's new in this version
* Announcing the SQL Server 2019 Preview extension.
* Support for SQL Server 2019 preview features including Big Data Cluster support.
* Support for SQL Server 2019 preview features including big data cluster support.
* Azure Data Studio Notebooks
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
* SQL Server Polybase Create External Table Wizard

View File

@@ -2,7 +2,6 @@
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://dev.azure.com/azuredatastudio/azuredatastudio/_apis/build/status/Azure%20Data%20Studio%20CI?branchName=master)](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=master)
[![Twitter Follow](https://img.shields.io/twitter/follow/azuredatastudio?style=social)](https://twitter.com/azuredatastudio)
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
@@ -10,13 +9,13 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
Platform | Link
-- | --
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2105135
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2105134
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2104938
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2105133
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2105132
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2104937
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2105131
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2100710
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2100711
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2100712
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2100809
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2100714
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2100810
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2100672
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
@@ -69,9 +68,6 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
## Contributions and "Thank You"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
* devmattrick for `Update row count as updates are received #6642`
* mottykohn for `In Message panel onclick scroll to line #6417`
* Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480`
* yamatoya for `fix the format #4899`
* GeoffYoung for `Fix sqlDropColumn description #4422`

View File

@@ -36,7 +36,6 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab
make-error: https://github.com/JsCommunity/make-error
minimist: https://github.com/substack/minimist
@@ -1176,35 +1175,7 @@ That's all there is to it!
=========================================
END OF jschardet NOTICES AND INFORMATION
%% jupyter-powershell NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2016 Sergei Vorobev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF jupyter-powershell NOTICES AND INFORMATION
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2015 Project Jupyter Contributors
All rights reserved.

View File

@@ -0,0 +1,84 @@
steps:
- script: |
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
sudo apt-get install -y libkrb5-dev
# sh -e /etc/init.d/xvfb start
# sleep 3
displayName: "Linux preinstall"
condition: eq(variables['Agent.OS'], 'Linux')
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "$(build-cache)"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.10.1"
- script: |
yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "$(build-cache)"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
displayName: Download Electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: "Run TSLint"
- script: |
yarn strict-null-check
displayName: "Run Strict Null Check"
- script: |
yarn compile
displayName: "Compile"
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
displayName: "Tests"
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
displayName: "Tests"
condition: and(succeeded(), ne(variables['Agent.OS'], 'Linux'))
- task: PublishTestResults@2
inputs:
testResultsFiles: "**/test-results.xml"
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: "cobertura"
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
condition: ne(variables['Agent.OS'], 'Linux')

View File

@@ -0,0 +1,65 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "$(build-cache)"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.10.1"
- script: |
yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "$(build-cache)"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn gulp electron-x64
displayName: "Electron"
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn tslint
displayName: "Run TSLint"
- script: |
yarn strict-null-check
displayName: "Run Strict Null Check"
- script: |
yarn compile
displayName: "Compile"
- script: |
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
displayName: "Test"
- task: PublishTestResults@2
inputs:
testResultsFiles: "test-results.xml"
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: "cobertura"
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report

View File

@@ -1,18 +1,22 @@
trigger:
- master
- release/*
jobs:
- job: Windows
pool:
vmImage: VS2017-Win2016
steps:
- template: build/azure-pipelines/win32/continuous-build-win32.yml
- job: Windows
pool:
vmImage: VS2017-Win2016
steps:
- template: azure-pipelines-windows.yml
- job: Linux
pool:
vmImage: 'Ubuntu-16.04'
steps:
- template: build/azure-pipelines/linux/continuous-build-linux.yml
- job: Linux
pool:
vmImage: "Ubuntu-16.04"
steps:
- template: azure-pipelines-linux-mac.yml
- job: macOS
pool:
vmImage: macOS 10.13
steps:
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml
- job: macOS
pool:
vmImage: macOS 10.13
steps:
- template: azure-pipelines-linux-mac.yml

View File

@@ -1 +1 @@
2019-08-30T20:24:23.714Z
2019-07-11T05:47:05.444Z

View File

@@ -1,9 +0,0 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
# Publish webview contents
PACKAGEJSON="$REPO/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"

View File

@@ -1,87 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { basename, join } from 'path';
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit: string, files: readonly string[]): Promise<void> {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main(): void {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -4,64 +4,46 @@ steps:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
yarn gulp electron-x64
displayName: Download Electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- script: |
yarn gulp hygiene --skip-tslint
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-null-check
displayName: Run Strict Null Check.
- script: | # {{SQL CARBON EDIT}} add step
yarn tslint
displayName: Run TSLint (gci)
# - script: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
# - script: | {{SQL CARBON EDIT}} remove step
# ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests
- script: |
./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
displayName: 'Component Detection'
inputs:
alertWarningLevel: High
failOnAlert: true

View File

@@ -25,7 +25,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -102,28 +102,20 @@ steps:
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
APP_NAME="`ls $APP_ROOT | head -n 1`"
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
# Web Smoke Tests disabled due to https://github.com/microsoft/vscode/issues/80308
# - script: |
# set -e
# cd test/smoke
# yarn compile
# cd -
# yarn smoketest --web --headless
# continueOnError: true
# displayName: Run web smoke tests
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
cd test/smoke
yarn compile
cd -
yarn smoketest --web --headless
continueOnError: true
displayName: Run web smoke tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e

View File

@@ -1,9 +1,6 @@
pool:
vmImage: 'Ubuntu-16.04'
trigger: none
pr: none
steps:
- task: NodeTool@0
inputs:
@@ -34,3 +31,13 @@ steps:
git push origin HEAD:electron-6.0.x
displayName: Sync & Merge Exploration
trigger: none
pr: none
schedules:
- cron: "0 5 * * Mon-Fri"
displayName: Mon-Fri at 7:00
branches:
include:
- master

View File

@@ -1,39 +0,0 @@
trigger:
branches:
include: ['master']
pr: none
jobs:
- job: ExplorationMerge
pool:
vmImage: Ubuntu-16.04
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- script: |
set -e
cat << EOF > ~/.netrc
machine mssqltools.visualstudio.com
login azuredatastudio
password $(DEVOPS_PASSWORD)
EOF
git config user.email "andresse@microsoft.com"
git config user.name "AzureDataStudio"
git remote add explore "$ADS_EXPLORE_REPO"
git fetch explore
git checkout -b merge-branch explore/master
git merge origin/master
git push explore HEAD:master
displayName: Sync & Merge Explore
env:
ADS_EXPLORE_REPO: $(ADS_EXPLORE_REPO)
DEVOPS_PASSWORD: $(DEVOPS_PASSWORD)

View File

@@ -2,7 +2,7 @@ steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
@@ -12,64 +12,46 @@ steps:
versionSpec: "10.15.1"
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- script: |
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
yarn --frozen-lockfile
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
yarn electron x64
yarn gulp electron-x64
displayName: Download Electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- script: |
yarn gulp hygiene --skip-tslint
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- script: | # {{SQL CARBON EDIT}} add gci checks
yarn tslint
displayName: Run TSLint (gci)
- script: | # {{SQL CARBON EDIT}} add strict null check
yarn strict-null-check
displayName: Run Strict Null Check
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn compile
displayName: Compile Sources
# - script: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
# - script: | {{SQL CARBON EDIT}} remove step
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
# displayName: Run Integration Tests
- script: |
DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
displayName: 'Component Detection'
inputs:
alertWarningLevel: High
failOnAlert: true

View File

@@ -25,7 +25,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'

View File

@@ -25,7 +25,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -105,14 +105,7 @@ steps:
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64"
displayName: Run integration tests

View File

@@ -5,7 +5,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
@@ -51,4 +51,4 @@ steps:
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"

View File

@@ -56,7 +56,7 @@ jobs:
- template: linux/snap-build-linux.yml
- job: LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
pool:
vmImage: 'Ubuntu-16.04'
variables:
@@ -78,7 +78,7 @@ jobs:
- template: linux/product-build-linux-multiarch.yml
- job: LinuxAlpine
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
pool:
vmImage: 'Ubuntu-16.04'
variables:

View File

@@ -20,7 +20,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: AzureKeyVault@1
@@ -87,10 +87,9 @@ steps:
- script: |
set -e
yarn gulp hygiene --skip-tslint
yarn gulp tslint
yarn gulp hygiene
yarn monaco-compile-check
displayName: Run hygiene, tslint and monaco compile checks
displayName: Run hygiene checks
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -99,13 +98,6 @@ steps:
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
yarn gulp compile-build

View File

@@ -0,0 +1,36 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cp = require("child_process");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
return false;
}
return true;
}

View File

@@ -13,23 +13,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
- bash: |
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
if [ "$TAG_VERSION" == "1.999.0" ]; then
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
-H 'Content-type: application/json; charset=utf-8' \
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
https://slack.com/api/chat.postMessage
exit 1
fi
displayName: Check 1.999.0 tag
versionSpec: "1.10.1"
- bash: |
# Install build dependencies

View File

@@ -0,0 +1,62 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const cp = require("child_process");
const path = require("path");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
updateDTSFile(outPath, tag);
console.log(`Done updating vscode.d.ts at ${outPath}`);
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath, tag) {
const oldContent = fs.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, tag);
fs.writeFileSync(outPath, newContent);
}
function getNewFileContent(content, tag) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return getNewFileHeader(tag) + content.slice(oldheader.length);
}
function getNewFileHeader(tag) {
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}

View File

@@ -5,7 +5,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'

View File

@@ -25,7 +25,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'

View File

@@ -2,70 +2,52 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "10.15.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
- powershell: |
yarn --frozen-lockfile
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
displayName: Install Dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
vstsFeed: '$(ArtifactFeed)'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
yarn electron
env:
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
- script: |
yarn gulp hygiene --skip-tslint
yarn gulp electron
displayName: Download Electron
- powershell: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn gulp tslint
displayName: Run TSLint Checks
- script: | # {{SQL CARBON EDIT}} add step
yarn tslint
displayName: Run TSLint (gci)
- script: | # {{SQL CARBON EDIT}} add step
yarn strict-null-check
displayName: Run Strict Null Check
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn monaco-compile-check
# displayName: Run Monaco Editor Checks
- powershell: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- powershell: |
yarn compile
displayName: Compile Sources
# - powershell: | {{SQL CARBON EDIT}} remove step
# yarn download-builtin-extensions
# displayName: Download Built-in Extensions
- powershell: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests
# - powershell: | {{SQL CARBON EDIT}} remove step
# .\scripts\test-integration.bat --tfs "Integration Tests"
# displayName: Run Integration Tests
- powershell: |
.\scripts\test-integration.bat --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
displayName: 'Component Detection'
inputs:
alertWarningLevel: High
failOnAlert: true

View File

@@ -25,7 +25,7 @@ steps:
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
@@ -107,21 +107,16 @@ steps:
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn electron $(VSCODE_ARCH) }
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
displayName: Run unit tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- powershell: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))

View File

@@ -1,7 +1,7 @@
[
{
"name": "Microsoft.sqlservernotebook",
"version": "0.3.2",
"version": "0.2.1",
"repo": "https://github.com/Microsoft/azuredatastudio"
}
]

View File

@@ -1,7 +1,2 @@
[
{
"name": "Microsoft.sqlservernotebook",
"version": "0.3.2",
"repo": "https://github.com/Microsoft/azuredatastudio"
}
]

View File

@@ -41,7 +41,12 @@ var editorEntryPoints = [
];
var editorResources = [
'out-editor-build/vs/base/browser/ui/codiconLabel/**/*.ttf'
'out-build/vs/{base,editor}/**/*.{svg,png}',
'!out-build/vs/base/browser/ui/splitview/**/*',
'!out-build/vs/base/browser/ui/toolbar/**/*',
'!out-build/vs/base/browser/ui/octiconLabel/**/*',
'!out-build/vs/workbench/**',
'!**/test/**'
];
var BUNDLED_FILE_HEADER = [
@@ -84,6 +89,9 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
`lib.dom.d.ts`,
`lib.webworker.importscripts.d.ts`
],
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
},
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
destRoot: path.join(root, 'out-editor-src')

View File

@@ -21,15 +21,10 @@ const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const plumber = require('gulp-plumber');
const _ = require('underscore');
const ext = require('./lib/extensions');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
// {{SQL CARBON EDIT}}
const sqlLocalizedExtensions = [
'dacpac',
'schema-compare'
];
// {{SQL CARBON EDIT}}
const compilations = glob.sync('**/tsconfig.json', {
cwd: extensionsPath,
@@ -42,38 +37,38 @@ const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile);
const relativeDirname = path.dirname(tsconfigFile);
const overrideOptions = {};
overrideOptions.sourceMap = true;
const tsconfig = require(absolutePath);
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
tsOptions.verbose = false;
tsOptions.sourceMap = true;
const name = relativeDirname.replace(/\//g, '-');
const root = path.join('extensions', relativeDirname);
const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**');
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
const out = path.join(root, 'out');
const baseUrl = getBaseUrl(out);
let headerId, headerOut;
let index = relativeDirname.indexOf('/');
if (index < 0) {
headerId = 'microsoft.' + relativeDirname; // {{SQL CARBON EDIT}}
headerId = 'vscode.' + relativeDirname;
headerOut = 'out';
} else {
headerId = 'microsoft.' + relativeDirname.substr(0, index); // {{SQL CARBON EDIT}}
headerId = 'vscode.' + relativeDirname.substr(0, index);
headerOut = relativeDirname.substr(index + 1) + '/out';
}
function createPipeline(build, emitError) {
const reporter = createReporter();
overrideOptions.inlineSources = Boolean(build);
overrideOptions.base = path.dirname(absolutePath);
tsOptions.inlineSources = !!build;
tsOptions.base = path.dirname(absolutePath);
const compilation = tsb.create(absolutePath, overrideOptions, false, err => reporter(err.toString()));
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
const pipeline = function () {
return function () {
const input = es.through();
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
const output = input
@@ -103,19 +98,15 @@ const tasks = compilations.map(function (tsconfigFile) {
return es.duplex(input, output);
};
// add src-stream for project files
pipeline.tsProjectSrc = () => {
return compilation.src(srcOpts);
};
return pipeline;
}
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(sqlLocalizedExtensions.includes(name), true); // {{SQL CARBON EDIT}}
const input = pipeline.tsProjectSrc();
const pipeline = createPipeline(false, true);
const input = gulp.src(src, srcOpts);
return input
.pipe(pipeline())
@@ -124,8 +115,8 @@ const tasks = compilations.map(function (tsconfigFile) {
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(false);
const input = pipeline.tsProjectSrc();
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
const input = gulp.src(src, srcOpts);
const watchInput = watcher(src, srcOpts);
return watchInput
.pipe(util.incremental(pipeline, input))
@@ -134,7 +125,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
const pipeline = createPipeline(true, true);
const input = pipeline.tsProjectSrc();
const input = gulp.src(src, srcOpts);
return input
.pipe(pipeline())
@@ -165,8 +156,8 @@ const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimr
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
cleanExtensionsBuildTask,
task.define('bundle-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))),
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build')))
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build'))),
));
gulp.task(compileExtensionsBuildTask);
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;

View File

@@ -17,7 +17,6 @@ const vfs = require('vinyl-fs');
const path = require('path');
const fs = require('fs');
const pall = require('p-all');
const task = require('./lib/task');
/**
* Hygiene works by creating cascading subsets of all our files and
@@ -56,10 +55,8 @@ const indentationFilter = [
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/assert.js',
'!build/testSetup.js',
// except specific folders
'!test/automation/out/**',
'!test/smoke/out/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
@@ -73,7 +70,7 @@ const indentationFilter = [
'!**/yarn-error.log',
// except multiple specific folders
'!**/codicon/**',
'!**/octicons/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/out/**',
@@ -103,8 +100,7 @@ const indentationFilter = [
'!extensions/admin-tool-ext-win/ssmsmin/**',
'!extensions/resource-deployment/notebooks/**',
'!extensions/mssql/notebooks/**',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts'
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
];
const copyrightFilter = [
@@ -135,10 +131,9 @@ const copyrightFilter = [
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
'!scripts/code-web.js',
// {{SQL CARBON EDIT}}
'!extensions/notebook/src/intellisense/text.ts',
'!extensions/mssql/src/hdfs/webhdfs.ts',
'!extensions/mssql/src/objectExplorerNodeProvider/webhdfs.ts',
'!src/sql/workbench/parts/notebook/browser/outputs/tableRenderers.ts',
'!src/sql/workbench/parts/notebook/common/models/url.ts',
'!src/sql/workbench/parts/notebook/browser/models/renderMimeInterfaces.ts',
@@ -193,43 +188,25 @@ const tslintBaseFilter = [
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts' // {{SQL CARBON EDIT}},
// {{SQL CARBON EDIT}}
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
];
// {{SQL CARBON EDIT}}
const sqlFilter = [
'src/sql/**',
'extensions/**',
// Ignore VS Code extensions
'!extensions/bat/**',
'!extensions/configuration-editing/**',
'!extensions/docker/**',
'!extensions/extension-editing/**',
'!extensions/git/**',
'!extensions/git-ui/**',
'!extensions/image-preview/**',
'!extensions/insights-default/**',
'!extensions/json/**',
'!extensions/json-language-features/**',
'!extensions/markdown-basics/**',
'!extensions/markdown-language-features/**',
'!extensions/merge-conflict/**',
'!extensions/powershell/**',
'!extensions/python/**',
'!extensions/r/**',
'!extensions/theme-*/**',
'!extensions/vscode-*/**',
'!extensions/xml/**',
'!extensions/xml-language-features/**',
'!extensions/yarml/**',
const useStrictFilter = [
'src/**'
];
const sqlFilter = [
'src/sql/**'
];
// {{SQL CARBON EDIT}}
const tslintCoreFilter = [
'src/**/*.ts',
'test/**/*.ts',
'!extensions/**/*.ts',
'!test/automation/**',
'!test/smoke/**',
...tslintBaseFilter
];
@@ -238,7 +215,6 @@ const tslintExtensionsFilter = [
'extensions/**/*.ts',
'!src/**/*.ts',
'!test/**/*.ts',
'test/automation/**/*.ts',
...tslintBaseFilter
];
@@ -249,12 +225,6 @@ const tslintHygieneFilter = [
...tslintBaseFilter
];
const fileLengthFilter = filter([
'**',
'!extensions/import/*.docx',
'!extensions/admin-tool-ext-win/license/**'
], {restore: true});
const copyrightHeaderLines = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -287,33 +257,6 @@ gulp.task('tslint', () => {
]).pipe(es.through());
});
function checkPackageJSON(actualPath) {
const actual = require(path.join(__dirname, '..', actualPath));
const rootPackageJSON = require('../package.json');
for (let depName in actual.dependencies) {
const depVersion = actual.dependencies[depName];
const rootDepVersion = rootPackageJSON.dependencies[depName];
if (!rootDepVersion) {
// missing in root is allowed
continue;
}
if (depVersion !== rootDepVersion) {
this.emit('error', `The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`);
}
}
}
const checkPackageJSONTask = task.define('check-package-json', () => {
return gulp.src('package.json')
.pipe(es.through(function() {
checkPackageJSON.call(this, 'remote/package.json');
checkPackageJSON.call(this, 'remote/web/package.json');
}));
});
gulp.task(checkPackageJSONTask);
function hygiene(some) {
let errorCount = 0;
@@ -363,6 +306,23 @@ function hygiene(some) {
this.emit('data', file);
});
// {{SQL CARBON EDIT}}
// Check for unnecessary 'use strict' lines. These are automatically added by the alwaysStrict compiler option so don't need to be added manually
const useStrict = es.through(function (file) {
const lines = file.__lines;
// Only take the first 10 lines to reduce false positives- the compiler will throw an error if it's not the first non-comment line in a file
// (10 is used to account for copyright and extraneous newlines)
lines.slice(0, 10).forEach((line, i) => {
if (/\s*'use\s*strict\s*'/.test(line)) {
console.error(file.relative + '(' + (i + 1) + ',1): Unnecessary \'use strict\' - this is already added by the compiler');
errorCount++;
}
});
this.emit('data', file);
});
// {{SQL CARBON EDIT}} END
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: false,
@@ -393,23 +353,6 @@ function hygiene(some) {
});
});
const filelength = es.through(function (file) {
const fileName = path.basename(file.relative);
const fileDir = path.dirname(file.relative);
//check the filename is < 50 characters (basename gets the filename with extension).
if (fileName.length > 50) {
console.error(`File name '${fileName}' under ${fileDir} is too long. Rename file to have less than 50 characters.`);
errorCount++;
}
if (file.relative.length > 150) {
console.error(`File path ${file.relative} exceeds acceptable file-length. Rename the path to have less than 150 characters.`);
errorCount++;
}
this.emit('data', file);
});
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
const tslintOptions = { fix: false, formatter: 'json' };
const tsLinter = new tslint.Linter(tslintOptions);
@@ -428,22 +371,20 @@ function hygiene(some) {
input = some;
}
// {{SQL CARBON EDIT}} Linting for SQL
const tslintSqlConfiguration = tslint.Configuration.findConfiguration('tslint-sql.json', '.');
const tslintSqlOptions = { fix: false, formatter: 'json' };
const sqlTsLinter = new tslint.Linter(tslintSqlOptions);
const sqlTsl = es.through(function (file) { //TODO restore
const sqlTsl = es.through(function (file) {
const contents = file.contents.toString('utf8');
sqlTsLinter.lint(file.relative, contents, tslintSqlConfiguration.results);
this.emit('data', file);
});
const productJsonFilter = filter('product.json', { restore: true });
const result = input
.pipe(fileLengthFilter)
.pipe(filelength)
.pipe(fileLengthFilter.restore)
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(productJsonFilter)
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
@@ -453,16 +394,15 @@ function hygiene(some) {
.pipe(filter(copyrightFilter))
.pipe(copyrights);
let typescript = result
const typescript = result
.pipe(filter(tslintHygieneFilter))
.pipe(formatting);
if (!process.argv.some(arg => arg === '--skip-tslint')) {
typescript = typescript.pipe(tsl);
typescript = typescript
.pipe(filter(sqlFilter)) // {{SQL CARBON EDIT}}
.pipe(sqlTsl);
}
.pipe(formatting)
.pipe(tsl)
// {{SQL CARBON EDIT}}
.pipe(filter(useStrictFilter))
.pipe(useStrict)
.pipe(filter(sqlFilter))
.pipe(sqlTsl);
const javascript = result
.pipe(filter(eslintFilter))
@@ -548,7 +488,7 @@ function createGitIndexVinyls(paths) {
.then(r => r.filter(p => !!p));
}
gulp.task('hygiene', task.series(checkPackageJSONTask, () => hygiene()));
gulp.task('hygiene', () => hygiene());
// this allows us to run hygiene as a git pre-commit hook
if (require.main === module) {

View File

@@ -4,6 +4,7 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const util = require('./lib/util');
const tsfmt = require('typescript-formatter');
@@ -21,78 +22,72 @@ gulp.task('fmt', () => formatStagedFiles());
const formatFiles = (some) => {
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
replace: true,
tsfmt: true,
tslint: true,
tsconfig: true
// verbose: true
}).then(result => {
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
if (result.error) {
console.error(result.message);
}
cb(null, file);
tsfmt.processString(file.path, file.contents.toString('utf8'), {
replace: true,
tsfmt: true,
tslint: true,
tsconfig: true
// verbose: true
}).then(result => {
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
if (result.error) {
console.error(result.message);
}
cb(null, file);
}, err => {
cb(err);
}, err => {
cb(err);
});
});
});
return gulp.src(some, {
base: '.'
})
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(formatting);
return gulp.src(some, { base: '.' })
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(formatting);
};
const formatStagedFiles = () => {
const cp = require('child_process');
cp.exec('git diff --name-only', {
maxBuffer: 2000 * 1024
}, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
cp.exec('git diff --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
});
cp.exec('git diff --cached --name-only', {
maxBuffer: 2000 * 1024
}, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
});
};
function installService() {
let config = require('../extensions/mssql/config.json');
let config = require('../extensions/mssql/src/config.json');
return platformInfo.getCurrent().then(p => {
let runtime = p.runtimeId;
// fix path since it won't be correct
@@ -114,7 +109,7 @@ gulp.task('install-sqltoolsservice', () => {
});
function installSsmsMin() {
const config = require('../extensions/admin-tool-ext-win/config.json');
const config = require('../extensions/admin-tool-ext-win/src/config.json');
return platformInfo.getCurrent().then(p => {
const runtime = p.runtimeId;
// fix path since it won't be correct

View File

@@ -31,7 +31,7 @@ const crypto = require('crypto');
const i18n = require('./lib/i18n');
const ext = require('./lib/extensions'); // {{SQL CARBON EDIT}}
const deps = require('./dependencies');
const { config } = require('./lib/electron');
const getElectronVersion = require('./lib/electron').getElectronVersion;
const createAsar = require('./lib/asar').createAsar;
const { compileBuildTask } = require('./gulpfile.compile');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
@@ -60,7 +60,8 @@ const nodeModules = [
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
buildfile.base,
buildfile.workbenchDesktop,
buildfile.serviceWorker,
buildfile.workbench,
buildfile.code
]);
@@ -78,7 +79,7 @@ const vscodeResources = [
'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/languagePacks.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
'out-build/vs/base/browser/ui/codiconLabel/codicon/**',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
@@ -86,6 +87,7 @@ const vscodeResources = [
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/contrib/tasks/**/*.json',
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
'out-build/vs/platform/files/**/*.exe',
'out-build/vs/platform/files/**/*.md',
'out-build/vs/code/electron-browser/workbench/**',
@@ -123,7 +125,6 @@ const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
out: 'out-vscode',
inlineAmdImages: true,
bundleInfo: undefined
})
));
@@ -143,6 +144,73 @@ const minifyVSCodeTask = task.define('minify-vscode', task.series(
));
gulp.task(minifyVSCodeTask);
// Package
// @ts-ignore JSON checking: darwinCredits is optional
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
// @ts-ignore JSON checking: electronRepository is optional
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return gulp.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
/**
* Compute checksums for some files.
*
@@ -200,7 +268,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
// {{SQL CARBON EDIT}}
ext.packageBuiltInExtensions();
const extensions = gulp.src(['.build/extensions/**', '!.build/extensions/node_modules/**'], { base: '.build', dot: true }); // {{SQL CARBON EDIT}} - don't package the node_modules directory
const extensions = gulp.src('.build/extensions/**', { base: '.build', dot: true });
const sources = es.merge(src, extensions)
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
@@ -400,7 +468,7 @@ gulp.task(task.define(
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = '.build/extensions/*';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
@@ -421,7 +489,7 @@ gulp.task(task.define(
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = '.build/extensions/*';
const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(

View File

@@ -43,8 +43,7 @@ function prepareDebPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
@@ -137,7 +136,6 @@ function prepareRpmPackage(arch) {
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
.pipe(replace('@@ICON@@', product.linuxIconName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
@@ -208,25 +206,21 @@ function prepareSnapPackage(arch) {
const destination = getSnapBuildPath(arch);
return function () {
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename(`snap/gui/${product.applicationName}.desktop`));
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename(`snap/gui/${product.applicationName}-url-handler.desktop`));
.pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@EXEC@@', `${product.applicationName} --force-user-env`))
.pipe(replace('@@ICON@@', `\${SNAP}/meta/gui/${product.linuxIconName}.png`))
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
// An icon that is placed in snap/gui will be placed into meta/gui verbatim.
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename(`snap/gui/${product.linuxIconName}.png`));
.pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
@@ -247,8 +241,7 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch);
// Default target for snapcraft runs: pull, build, stage and prime, and finally assembles the snap.
return shell.task(`cd ${snapBuildPath} && snapcraft`);
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
}
const BUILD_TARGETS = [

View File

@@ -6,11 +6,150 @@
'use strict';
const gulp = require('gulp');
const path = require('path');
const es = require('event-stream');
const util = require('./lib/util');
const task = require('./lib/task');
const common = require('./lib/optimize');
const product = require('../product.json');
const rename = require('gulp-rename');
const filter = require('gulp-filter');
const json = require('gulp-json-editor');
const _ = require('underscore');
const deps = require('./dependencies');
const vfs = require('vinyl-fs');
const packageJson = require('../package.json');
const { compileBuildTask } = require('./gulpfile.compile');
const noop = () => { return Promise.resolve(); };
const REPO_ROOT = path.dirname(__dirname);
const commit = util.getVersion(REPO_ROOT);
const BUILD_ROOT = path.dirname(REPO_ROOT);
const WEB_FOLDER = path.join(REPO_ROOT, 'remote', 'web');
gulp.task('minify-vscode-web', noop);
gulp.task('vscode-web', noop);
gulp.task('vscode-web-min', noop);
gulp.task('vscode-web-ci', noop);
gulp.task('vscode-web-min-ci', noop);
const productionDependencies = deps.getProductionDependencies(WEB_FOLDER);
const nodeModules = Object.keys(product.dependencies || {})
.concat(_.uniq(productionDependencies.map(d => d.name)));
const vscodeWebResources = [
// Workbench
'out-build/vs/{base,platform,editor,workbench}/**/*.{svg,png,html}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/**/markdown.css',
// Webview
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
// Extension Worker
'out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js',
// Excludes
'!out-build/vs/**/{node,electron-browser,electron-main}/**',
'!out-build/vs/editor/standalone/**',
'!out-build/vs/workbench/**/*-tb.png',
'!**/test/**'
];
const buildfile = require('../src/buildfile');
const vscodeWebEntryPoints = [
buildfile.workbenchWeb,
buildfile.serviceWorker,
buildfile.workerExtensionHost,
buildfile.keyboardMaps,
buildfile.base
];
const optimizeVSCodeWebTask = task.define('optimize-vscode-web', task.series(
util.rimraf('out-vscode-web'),
common.optimizeTask({
src: 'out-build',
entryPoints: _.flatten(vscodeWebEntryPoints),
otherSources: [],
resources: vscodeWebResources,
loaderConfig: common.loaderConfig(nodeModules),
out: 'out-vscode-web',
bundleInfo: undefined
})
));
const minifyVSCodeWebTask = task.define('minify-vscode-web', task.series(
optimizeVSCodeWebTask,
util.rimraf('out-vscode-web-min'),
common.minifyTask('out-vscode-web', `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`)
));
gulp.task(minifyVSCodeWebTask);
function packageTask(sourceFolderName, destinationFolderName) {
const destination = path.join(BUILD_ROOT, destinationFolderName);
return () => {
const src = gulp.src(sourceFolderName + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + sourceFolderName), 'out'); }))
.pipe(filter(['**', '!**/*.js.map']));
const sources = es.merge(src);
let version = packageJson.version;
const quality = product.quality;
if (quality && quality !== 'stable') {
version += '-' + quality;
}
const name = product.nameShort;
const packageJsonStream = gulp.src(['remote/web/package.json'], { base: 'remote/web' })
.pipe(json({ name, version }));
const date = new Date().toISOString();
const productJsonStream = gulp.src(['product.json'], { base: '.' })
.pipe(json({ commit, date }));
const license = gulp.src(['remote/LICENSE'], { base: 'remote' });
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(REPO_ROOT, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`, `!${d}/.bin/**`]));
const deps = gulp.src(dependenciesSrc, { base: 'remote/web', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')));
const favicon = gulp.src('resources/server/favicon.ico', { base: 'resources/server' });
let all = es.merge(
packageJsonStream,
productJsonStream,
license,
sources,
deps,
favicon
);
let result = all
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions());
return result.pipe(vfs.dest(destination));
};
}
const dashed = (str) => (str ? `-${str}` : ``);
['', 'min'].forEach(minified => {
const sourceFolderName = `out-vscode-web${dashed(minified)}`;
const destinationFolderName = `vscode-web`;
const vscodeWebTaskCI = task.define(`vscode-web${dashed(minified)}-ci`, task.series(
minified ? minifyVSCodeWebTask : optimizeVSCodeWebTask,
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
packageTask(sourceFolderName, destinationFolderName)
));
gulp.task(vscodeWebTaskCI);
const vscodeWebTask = task.define(`vscode-web${dashed(minified)}`, task.series(
compileBuildTask,
vscodeWebTaskCI
));
gulp.task(vscodeWebTask);
});

View File

@@ -19,8 +19,7 @@ const ansiColors = require('ansi-colors');
const root = path.dirname(path.dirname(__dirname));
// {{SQL CARBON EDIT}}
const quality = process.env['VSCODE_QUALITY'];
const builtInExtensions = quality && quality === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
const builtInExtensions = require('../builtInExtensions-insiders.json');
// {{SQL CARBON EDIT}} - END
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');

View File

@@ -11,6 +11,7 @@ const bom = require("gulp-bom");
const sourcemaps = require("gulp-sourcemaps");
const tsb = require("gulp-tsb");
const path = require("path");
const _ = require("underscore");
const monacodts = require("../monaco/api");
const nls = require("./nls");
const reporter_1 = require("./reporter");
@@ -21,7 +22,14 @@ const watch = require('./watch');
const reporter = reporter_1.createReporter();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
let options = {};
const tsconfig = require(`../../${src}/tsconfig.json`);
let options;
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
}
else {
options = tsconfig.compilerOptions;
}
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -30,14 +38,15 @@ function getTypeScriptCompilerOptions(src) {
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
}
function createCompile(src, build, emitError) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
function pipeline(token) {
const opts = _.clone(getTypeScriptCompilerOptions(src));
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
return function (token) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
@@ -48,28 +57,30 @@ function createCompile(src, build, emitError) {
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(compilation(token))
.pipe(ts(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: overrideOptions.sourceRoot
sourceRoot: opts.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
};
return pipeline;
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
function compileTask(src, out, build) {
return function () {
const compile = createCompile(src, build, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
@@ -84,8 +95,8 @@ exports.compileTask = compileTask;
function watchTask(out, build) {
return function () {
const compile = createCompile('src', build);
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true);
generator.execute();
return watchSrc

View File

@@ -12,21 +12,27 @@ import * as bom from 'gulp-bom';
import * as sourcemaps from 'gulp-sourcemaps';
import * as tsb from 'gulp-tsb';
import * as path from 'path';
import * as _ from 'underscore';
import * as monacodts from '../monaco/api';
import * as nls from './nls';
import { createReporter } from './reporter';
import * as util from './util';
import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
import ts = require('typescript');
const watch = require('./watch');
const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
function getTypeScriptCompilerOptions(src: string) {
const rootDir = path.join(__dirname, `../../${src}`);
let options: ts.CompilerOptions = {};
const tsconfig = require(`../../${src}/tsconfig.json`);
let options: { [key: string]: any };
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
} else {
options = tsconfig.compilerOptions;
}
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -35,17 +41,18 @@ function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options;
}
function createCompile(src: string, build: boolean, emitError?: boolean) {
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(getTypeScriptCompilerOptions(src));
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
function pipeline(token?: util.ICancellationToken) {
return function (token?: util.ICancellationToken) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
@@ -58,31 +65,39 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(compilation(token))
.pipe(ts(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: overrideOptions.sourceRoot
sourceRoot: opts.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError));
return es.duplex(input, output);
}
pipeline.tsProjectSrc = () => {
return compilation.src({ base: src });
};
return pipeline;
}
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile(src, build, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
const srcPipe = es.merge(
gulp.src(`${src}/**`, { base: `${src}` }),
gulp.src(typesDts),
);
let generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
@@ -100,8 +115,11 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
return function () {
const compile = createCompile('src', build);
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src(typesDts),
);
const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true);
generator.execute();

View File

@@ -2,88 +2,29 @@
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const vfs = require("vinyl-fs");
const filter = require("gulp-filter");
const json = require("gulp-json-editor");
const _ = require("underscore");
const util = require("./util");
const electron = require('gulp-atom-electron');
const fs = require('fs');
const path = require('path');
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
exports.getElectronVersion = getElectronVersion;
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
exports.config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, exports.config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch) {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
// @ts-ignore
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target;
}
module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron
// @ts-ignore
if (require.main === module) {
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
const version = getElectronVersion();
const versionFile = path.join(root, '.build', 'electron', 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
process.exit(isUpToDate ? 0 : 1);
}

View File

@@ -1,100 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as vfs from 'vinyl-fs';
import * as filter from 'gulp-filter';
import * as json from 'gulp-json-editor';
import * as _ from 'underscore';
import * as util from './util';
const electron = require('gulp-atom-electron');
const root = path.dirname(path.dirname(__dirname));
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
const commit = util.getVersion(root);
export function getElectronVersion(): string {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)![1];
return target;
}
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions: string[], icon: string) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
export const config = {
version: getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinForceDarkModeSupport: true,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
repo: product.electronRepository || undefined
};
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return vfs.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
async function main(arch = process.arch): Promise<void> {
const version = getElectronVersion();
const electronPath = path.join(root, '.build', 'electron');
const versionFile = path.join(electronPath, 'version');
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
if (!isUpToDate) {
await util.rimraf(electronPath)();
await util.streamToPromise(getElectron(arch)());
}
}
if (require.main === module) {
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -101,7 +101,7 @@ function fromLocalWebpack(extensionPath) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = Object.assign(Object.assign({}, require(webpackConfigPath)), { mode: 'production' });
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
@@ -200,9 +200,13 @@ const sqlBuiltInExtensions = [
'dacpac',
'schema-compare',
'cms',
'query-history',
'liveshare'
'query-history'
];
// make resource deployment and BDC extension only available in insiders
if (process.env['VSCODE_QUALITY'] === 'stable') {
sqlBuiltInExtensions.push('resource-deployment');
sqlBuiltInExtensions.push('big-data-cluster');
}
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
// {{SQL CARBON EDIT}} - End
function packageLocalExtensionsStream() {

View File

@@ -236,10 +236,16 @@ const sqlBuiltInExtensions = [
'dacpac',
'schema-compare',
'cms',
'query-history',
'liveshare'
'query-history'
];
// make resource deployment and BDC extension only available in insiders
if (process.env['VSCODE_QUALITY'] === 'stable') {
sqlBuiltInExtensions.push('resource-deployment');
sqlBuiltInExtensions.push('big-data-cluster');
}
interface IBuiltInExtension {
name: string;
version: string;
@@ -350,4 +356,4 @@ export function packageExtensionTask(extensionName: string, platform: string, ar
return result.pipe(vfs.dest(destination));
};
}
// {{SQL CARBON EDIT}} - End
// {{SQL CARBON EDIT}} - End

View File

@@ -176,7 +176,6 @@ class XLF {
this.buffer.push(line.toString());
}
}
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
@@ -249,6 +248,7 @@ XLF.parse = function (xlfString) {
});
});
};
exports.XLF = XLF;
class Limiter {
constructor(maxDegreeOfParalellism) {
this.maxDegreeOfParalellism = maxDegreeOfParalellism;
@@ -586,7 +586,7 @@ function createXlfFilesForExtensions() {
}
return _xlf;
}
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
if (file.isBuffer()) {
const buffer = file.contents;
const basename = path.basename(file.path);
@@ -609,7 +609,7 @@ function createXlfFilesForExtensions() {
}
else if (basename === 'nls.metadata.json') {
const json = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@@ -912,8 +912,8 @@ function pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language
_coreAndExtensionResources.push(...json.workbench);
// extensions
let extensionsToLocalize = Object.create(null);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
});
@@ -1086,7 +1086,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];
if (externalExtensionId) {

View File

@@ -106,10 +106,6 @@
"name": "vs/workbench/contrib/quickopen",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/userData",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/remote",
"project": "vscode-workbench"
@@ -174,10 +170,6 @@
"name": "vs/workbench/contrib/webview",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/customEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/welcome",
"project": "vscode-workbench"
@@ -186,10 +178,6 @@
"name": "vs/workbench/contrib/outline",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/userDataSync",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/actions",
"project": "vscode-workbench"
@@ -242,10 +230,6 @@
"name": "vs/workbench/services/keybinding",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/lifecycle",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/mode",
"project": "vscode-workbench"
@@ -271,7 +255,7 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/workspaces",
"name": "vs/workbench/services/workspace",
"project": "vscode-workbench"
},
{
@@ -289,10 +273,6 @@
{
"name": "vs/workbench/services/notification",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/userData",
"project": "vscode-workbench"
}
]
}

View File

@@ -709,7 +709,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
}
return _xlf;
}
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
if (file.isBuffer()) {
const buffer: Buffer = file.contents as Buffer;
const basename = path.basename(file.path);
@@ -729,7 +729,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
} else if (basename === 'nls.metadata.json') {
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
@@ -1053,8 +1053,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
// extensions
let extensionsToLocalize = Object.create(null);
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
@@ -1253,7 +1253,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];

View File

@@ -5,7 +5,6 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream");
const fs = require("fs");
const gulp = require("gulp");
const concat = require("gulp-concat");
const minifyCSS = require("gulp-cssnano");
@@ -18,7 +17,7 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const path = require("path");
const pump = require("pump");
const terser = require("terser");
const uglifyes = require("uglify-es");
const VinylFile = require("vinyl");
const bundle = require("./bundle");
const i18n_1 = require("./i18n");
@@ -135,14 +134,6 @@ function optimizeTask(opts) {
if (err || !result) {
return bundlesStream.emit('error', JSON.stringify(err));
}
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
}
catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
const filteredResources = resources.slice();
@@ -178,39 +169,6 @@ function optimizeTask(opts) {
};
}
exports.optimizeTask = optimizeTask;
function inlineAmdImages(src, result) {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
@@ -241,7 +199,7 @@ function uglifyWithCopyrights() {
return false;
};
};
const minify = composer(terser);
const minify = composer(uglifyes);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {

View File

@@ -6,7 +6,6 @@
'use strict';
import * as es from 'event-stream';
import * as fs from 'fs';
import * as gulp from 'gulp';
import * as concat from 'gulp-concat';
import * as minifyCSS from 'gulp-cssnano';
@@ -20,7 +19,7 @@ import * as ansiColors from 'ansi-colors';
import * as path from 'path';
import * as pump from 'pump';
import * as sm from 'source-map';
import * as terser from 'terser';
import * as uglifyes from 'uglify-es';
import * as VinylFile from 'vinyl';
import * as bundle from './bundle';
import { Language, processNlsFiles } from './i18n';
@@ -162,10 +161,6 @@ export interface IOptimizeTaskOpts {
* (emit bundleInfo.json file)
*/
bundleInfo: boolean;
/**
* replace calls to `registerAndGetAmdImageURL` with data uris
*/
inlineAmdImages: boolean;
/**
* (out folder name)
*/
@@ -199,14 +194,6 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); }
if (opts.inlineAmdImages) {
try {
result = inlineAmdImages(src, result);
} catch (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
}
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
@@ -251,42 +238,6 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
};
}
function inlineAmdImages(src: string, result: bundle.IBundleResult): bundle.IBundleResult {
for (const outputFile of result.files) {
for (const sourceFile of outputFile.sources) {
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
let imagePath = m0;
// remove `` or ''
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
imagePath = imagePath.substr(1, imagePath.length - 2);
}
if (!/\.(png|svg)$/.test(imagePath)) {
console.log(`original: ${_}`);
return _;
}
const repoLocation = path.join(src, imagePath);
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
if (!fs.existsSync(absoluteLocation)) {
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
console.log(message);
throw new Error(message);
}
const fileContents = fs.readFileSync(absoluteLocation);
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
// Mark the file as inlined so we don't ship it by itself
result.cssInlinedResources.push(repoLocation);
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
});
}
}
}
return result;
}
declare class FileWithCopyright extends VinylFile {
public __hasOurCopyright: boolean;
}
@@ -324,7 +275,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
};
};
const minify = (composer as any)(terser);
const minify = (composer as any)(uglifyes);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {

View File

@@ -1,103 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const rollup = require("rollup");
const path = require("path");
// getting around stupid import rules
const nodeResolve = require('rollup-plugin-node-resolve');
const commonjs = require('rollup-plugin-commonjs');
async function rollupModule(options) {
const moduleName = options.moduleName;
try {
const inputFile = options.inputFile;
const outputDirectory = options.outputDirectory;
await fs.promises.mkdir(outputDirectory, {
recursive: true
});
const outputFileName = options.outputFileName;
const outputMapName = `${outputFileName}.map`;
const external = options.external || [];
const outputFilePath = path.resolve(outputDirectory, outputFileName);
const outputMapPath = path.resolve(outputDirectory, outputMapName);
const bundle = await rollup.rollup({
input: inputFile,
plugins: [
nodeResolve(),
commonjs(),
],
external,
});
const generatedBundle = await bundle.generate({
name: moduleName,
format: 'umd',
sourcemap: true
});
const result = generatedBundle.output[0];
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
await fs.promises.writeFile(outputFilePath, result.code);
await fs.promises.writeFile(outputMapPath, result.map);
return {
name: moduleName,
result: true
};
}
catch (ex) {
return {
name: moduleName,
result: false,
exception: ex
};
}
}
function rollupAngularSlickgrid(root) {
return new Promise(async (resolve, reject) => {
const result = await rollupModule({
moduleName: 'angular2-slickgrid',
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
outputFileName: 'angular2-slickgrid.umd.js'
});
if (!result.result) {
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
}
resolve();
});
}
exports.rollupAngularSlickgrid = rollupAngularSlickgrid;
function rollupAngular(root) {
return new Promise(async (resolve, reject) => {
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
const tasks = modules.map((module) => {
return rollupModule({
moduleName: `ng.${module}`,
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
outputFileName: `${module}.umd.js`,
external: modules.map(mn => `@angular/${mn}`)
});
});
// array of booleans
const x = await Promise.all(tasks);
const result = x.reduce((prev, current) => {
if (!current.result) {
prev.fails.push(current.name);
prev.exceptions.push(current.exception);
prev.result = false;
}
return prev;
}, {
fails: [],
exceptions: [],
result: true,
});
if (!result.result) {
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
}
resolve();
});
}
exports.rollupAngular = rollupAngular;

View File

@@ -1,125 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as fs from 'fs';
import * as rollup from 'rollup';
import * as path from 'path';
// getting around stupid import rules
const nodeResolve = require('rollup-plugin-node-resolve');
const commonjs = require('rollup-plugin-commonjs');
export interface IRollupOptions {
moduleName: string;
inputFile: string;
outputDirectory: string;
outputFileName: string;
external?: string[];
}
async function rollupModule(options: IRollupOptions) {
const moduleName = options.moduleName;
try {
const inputFile = options.inputFile;
const outputDirectory = options.outputDirectory;
await fs.promises.mkdir(outputDirectory, {
recursive: true
});
const outputFileName = options.outputFileName;
const outputMapName = `${outputFileName}.map`;
const external = options.external || [];
const outputFilePath = path.resolve(outputDirectory, outputFileName);
const outputMapPath = path.resolve(outputDirectory, outputMapName);
const bundle = await rollup.rollup({
input: inputFile,
plugins: [
nodeResolve(),
commonjs(),
],
external,
});
const generatedBundle = await bundle.generate({
name: moduleName,
format: 'umd',
sourcemap: true
});
const result = generatedBundle.output[0];
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
await fs.promises.writeFile(outputFilePath, result.code);
await fs.promises.writeFile(outputMapPath, result.map);
return {
name: moduleName,
result: true
};
} catch (ex) {
return {
name: moduleName,
result: false,
exception: ex
};
}
}
export function rollupAngularSlickgrid(root: string): Promise<void> {
return new Promise(async (resolve, reject) => {
const result = await rollupModule({
moduleName: 'angular2-slickgrid',
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
outputFileName: 'angular2-slickgrid.umd.js'
});
if (!result.result) {
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
}
resolve();
});
}
export function rollupAngular(root: string): Promise<void> {
return new Promise(async (resolve, reject) => {
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
const tasks = modules.map((module) => {
return rollupModule({
moduleName: `ng.${module}`,
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
outputFileName: `${module}.umd.js`,
external: modules.map(mn => `@angular/${mn}`)
});
});
// array of booleans
const x = await Promise.all(tasks);
const result = x.reduce<{ fails: string[]; exceptions: string[]; result: boolean }>((prev, current) => {
if (!current.result) {
prev.fails.push(current.name);
prev.exceptions.push(current.exception);
prev.result = false;
}
return prev;
}, {
fails: [],
exceptions: [],
result: true,
});
if (!result.result) {
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
}
resolve();
});
}

View File

@@ -130,7 +130,7 @@ function createESMSourcesAndResources2(options) {
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
continue;
}
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
@@ -250,37 +250,35 @@ function transportCSS(module, enqueue, write) {
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
const inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents, forceBase64) {
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
return _replaceURL(contents, (url) => {
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
if (fontMatch) {
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
const fontPath = path.join(path.dirname(module), relativeFontPath);
enqueue(fontPath);
return relativeFontPath;
}
const imagePath = path.join(path.dirname(module), url);
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
return '"data:' + MIME + DATA + '"';
enqueue(imagePath);
return url;
});
}
function _replaceURL(contents, replacer) {

View File

@@ -154,7 +154,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
continue;
}
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
@@ -290,41 +290,40 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
const filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents);
return true;
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean): string {
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean, inlineByteLimit: number): string {
return _replaceURL(contents, (url) => {
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
if (fontMatch) {
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
const fontPath = path.join(path.dirname(module), relativeFontPath);
enqueue(fontPath);
return relativeFontPath;
}
let imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const imagePath = path.join(path.dirname(module), url);
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}
}
return '"data:' + MIME + DATA + '"';
}
return '"data:' + MIME + DATA + '"';
enqueue(imagePath);
return url;
});
}

View File

@@ -48,7 +48,9 @@ class DoubleQuotedStringArgRuleWalker extends Lint.RuleWalker {
const argText = arg.getText();
const doubleQuotedArg = argText.length >= 2 && argText[0] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE && argText[argText.length - 1] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE;
if (!doubleQuotedArg) {
const fix = Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `"${arg.getText().slice(1, arg.getWidth() - 1)}"`);
const fix = [
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getWidth(), `"${arg.getText().slice(1, arg.getWidth() - 2)}"`),
];
this.addFailure(this.createFailure(arg.getStart(), arg.getWidth(), `Argument ${this.argIndex + 1} to '${functionName}' must be double quoted.`, fix));
return;
}

View File

@@ -68,7 +68,9 @@ class DoubleQuotedStringArgRuleWalker extends Lint.RuleWalker {
const doubleQuotedArg = argText.length >= 2 && argText[0] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE && argText[argText.length - 1] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE;
if (!doubleQuotedArg) {
const fix = Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `"${arg.getText().slice(1, arg.getWidth() - 1)}"`);
const fix = [
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getWidth(), `"${arg.getText().slice(1, arg.getWidth() - 2)}"`),
];
this.addFailure(this.createFailure(
arg.getStart(), arg.getWidth(),
`Argument ${this.argIndex! + 1} to '${functionName}' must be double quoted.`, fix));

View File

@@ -1,33 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
const minimatch = require("minimatch");
class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile) {
const args = this.getOptions().ruleArguments[0];
if (args.exclude.every(x => !minimatch(sourceFile.fileName, x))) {
return this.applyWithWalker(new NoSyncRuleWalker(sourceFile, this.getOptions()));
}
return [];
}
}
exports.Rule = Rule;
class NoSyncRuleWalker extends Lint.RuleWalker {
constructor(file, opts) {
super(file, opts);
}
visitCallExpression(node) {
if (node.expression && NoSyncRuleWalker.operations.some(x => node.expression.getText().indexOf(x) >= 0)) {
this.addFailureAtNode(node, `Do not use Sync operations`);
}
super.visitCallExpression(node);
}
}
NoSyncRuleWalker.operations = ['readFileSync', 'writeFileSync', 'existsSync', 'fchmodSync', 'lchmodSync',
'statSync', 'fstatSync', 'lstatSync', 'linkSync', 'symlinkSync', 'readlinkSync', 'realpathSync', 'unlinkSync', 'rmdirSync',
'mkdirSync', 'mkdtempSync', 'readdirSync', 'openSync', 'utimesSync', 'futimesSync', 'fsyncSync', 'writeSync', 'readSync',
'appendFileSync', 'accessSync', 'fdatasyncSync', 'copyFileSync'];

View File

@@ -1,45 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as minimatch from 'minimatch';
interface NoSyncRuleConfig {
exclude: string[];
}
export class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
const args = <NoSyncRuleConfig>this.getOptions().ruleArguments[0];
if (args.exclude.every(x => !minimatch(sourceFile.fileName, x))) {
return this.applyWithWalker(new NoSyncRuleWalker(sourceFile, this.getOptions()));
}
return [];
}
}
class NoSyncRuleWalker extends Lint.RuleWalker {
private static readonly operations = ['readFileSync', 'writeFileSync', 'existsSync', 'fchmodSync', 'lchmodSync',
'statSync', 'fstatSync', 'lstatSync', 'linkSync', 'symlinkSync', 'readlinkSync', 'realpathSync', 'unlinkSync', 'rmdirSync',
'mkdirSync', 'mkdtempSync', 'readdirSync', 'openSync', 'utimesSync', 'futimesSync', 'fsyncSync', 'writeSync', 'readSync',
'appendFileSync', 'accessSync', 'fdatasyncSync', 'copyFileSync'];
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
}
visitCallExpression(node: ts.CallExpression) {
if (node.expression && NoSyncRuleWalker.operations.some(x => node.expression.getText().indexOf(x) >= 0)) {
this.addFailureAtNode(node, `Do not use Sync operations`);
}
super.visitCallExpression(node);
}
}

View File

@@ -1,49 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript");
const Lint = require("tslint");
/**
* Implementation of the no-localize-keys-with-underscore rule which verifies that keys to the localize
* calls don't contain underscores (_) since those break the localization process.
*/
class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile) {
return this.applyWithWalker(new NoLocalizeKeysWithUnderscore(sourceFile, this.getOptions()));
}
}
exports.Rule = Rule;
const signatures = [
"localize",
"nls.localize"
];
class NoLocalizeKeysWithUnderscore extends Lint.RuleWalker {
constructor(file, opts) {
super(file, opts);
}
visitCallExpression(node) {
this.checkCallExpression(node);
super.visitCallExpression(node);
}
checkCallExpression(node) {
// If this isn't one of the localize functions then continue on
const functionName = node.expression.getText();
if (functionName && !signatures.some(s => s === functionName)) {
return;
}
const arg = node && node.arguments && node.arguments.length > 0 ? node.arguments[0] : undefined; // The key is the first element
// Ignore if the arg isn't a string - we expect the compiler to warn if that's an issue
if (arg && ts.isStringLiteral(arg)) {
if (arg.getText().indexOf('_') >= 0) {
const fix = [
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `${arg.getText().replace(/_/g, '.')}`),
];
this.addFailure(this.createFailure(arg.getStart(), arg.getWidth(), `Keys for localize calls must not contain underscores. Use periods (.) instead.`, fix));
return;
}
}
}
}

View File

@@ -1,55 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
/**
* Implementation of the no-localize-keys-with-underscore rule which verifies that keys to the localize
* calls don't contain underscores (_) since those break the localization process.
*/
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
return this.applyWithWalker(new NoLocalizeKeysWithUnderscore(sourceFile, this.getOptions()));
}
}
const signatures = [
"localize",
"nls.localize"
];
class NoLocalizeKeysWithUnderscore extends Lint.RuleWalker {
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
}
protected visitCallExpression(node: ts.CallExpression): void {
this.checkCallExpression(node);
super.visitCallExpression(node);
}
private checkCallExpression(node: ts.CallExpression): void {
// If this isn't one of the localize functions then continue on
const functionName = node.expression.getText();
if (functionName && !signatures.some(s => s === functionName)) {
return;
}
const arg = node && node.arguments && node.arguments.length > 0 ? node.arguments[0] : undefined; // The key is the first element
// Ignore if the arg isn't a string - we expect the compiler to warn if that's an issue
if(arg && ts.isStringLiteral(arg)) {
if (arg.getText().indexOf('_') >= 0) {
const fix = [
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `${arg.getText().replace(/_/g, '.')}`),
];
this.addFailure(this.createFailure(
arg.getStart(), arg.getWidth(),
`Keys for localize calls must not contain underscores. Use periods (.) instead.`, fix));
return;
}
}
}
}

View File

@@ -11,9 +11,6 @@ const Lint = require("tslint");
*/
class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile) {
if (/\.d.ts$/.test(sourceFile.fileName)) {
return [];
}
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
}
}

View File

@@ -11,9 +11,6 @@ import * as Lint from 'tslint';
*/
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
if (/\.d.ts$/.test(sourceFile.fileName)) {
return [];
}
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
}
}

View File

@@ -1,28 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint");
class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile, program) {
if (program.getCompilerOptions().alwaysStrict) {
return this.applyWithWalker(new NoUselessStrictRuleWalker(sourceFile, this.getOptions()));
}
return [];
}
}
exports.Rule = Rule;
class NoUselessStrictRuleWalker extends Lint.RuleWalker {
visitStringLiteral(node) {
this.checkStringLiteral(node);
super.visitStringLiteral(node);
}
checkStringLiteral(node) {
const text = node.getText();
if (text === '\'use strict\'' || text === '"use strict"') {
this.addFailureAtNode(node, 'use strict directive is unnecessary');
}
}
}

View File

@@ -1,30 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as Lint from 'tslint';
import * as ts from 'typescript';
export class Rule extends Lint.Rules.TypedRule {
public applyWithProgram(sourceFile: ts.SourceFile, program: ts.Program): Lint.RuleFailure[] {
if (program.getCompilerOptions().alwaysStrict) {
return this.applyWithWalker(new NoUselessStrictRuleWalker(sourceFile, this.getOptions()));
}
return [];
}
}
class NoUselessStrictRuleWalker extends Lint.RuleWalker {
protected visitStringLiteral(node: ts.StringLiteral): void {
this.checkStringLiteral(node);
super.visitStringLiteral(node);
}
private checkStringLiteral(node: ts.StringLiteral): void {
const text = node.getText();
if (text === '\'use strict\'' || text === '"use strict"') {
this.addFailureAtNode(node, 'use strict directive is unnecessary');
}
}
}

View File

@@ -7,12 +7,10 @@ declare module "gulp-tsb" {
}
export interface IncrementalCompiler {
(token?: ICancellationToken): NodeJS.ReadWriteStream;
src(opts?: {
cwd?: string;
base?: string;
}): NodeJS.ReadStream;
(token?:ICancellationToken): NodeJS.ReadWriteStream;
// program?: ts.Program;
}
export function create(projectPath: string, existingOptions: any, verbose?: boolean, onError?: (message: any) => void): IncrementalCompiler;
}
export function create(configOrName: { [option: string]: string | number | boolean; } | string, verbose?: boolean, json?: boolean, onError?: (message: any) => void): IncrementalCompiler;
}

View File

@@ -121,7 +121,7 @@ function loadSourcemaps() {
return;
}
if (!f.contents) {
cb(undefined, f);
cb(new Error('empty file'));
return;
}
const contents = f.contents.toString('utf8');
@@ -166,23 +166,20 @@ function stripSourceMappingURL() {
}
exports.stripSourceMappingURL = stripSourceMappingURL;
function rimraf(dir) {
const result = () => new Promise((c, e) => {
let retries = 0;
const retry = () => {
_rimraf(dir, { maxBusyTries: 1 }, (err) => {
if (!err) {
return c();
}
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(), 10);
}
return e(err);
});
};
retry();
});
result.taskName = `clean-${path.basename(dir).toLowerCase()}`;
return result;
let retries = 0;
const retry = (cb) => {
_rimraf(dir, { maxBusyTries: 1 }, (err) => {
if (!err) {
return cb();
}
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(cb), 10);
}
return cb(err);
});
};
retry.taskName = `clean-${path.basename(dir).toLowerCase()}`;
return retry;
}
exports.rimraf = rimraf;
function getVersion(root) {
@@ -222,10 +219,3 @@ function versionStringToNumber(versionStr) {
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
exports.versionStringToNumber = versionStringToNumber;
function streamToPromise(stream) {
return new Promise((c, e) => {
stream.on('error', err => e(err));
stream.on('end', () => c());
});
}
exports.streamToPromise = streamToPromise;

View File

@@ -165,7 +165,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream {
}
if (!f.contents) {
cb(undefined, f);
cb(new Error('empty file'));
return;
}
@@ -218,29 +218,24 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
return es.duplex(input, output);
}
export function rimraf(dir: string): () => Promise<void> {
const result = () => new Promise<void>((c, e) => {
let retries = 0;
export function rimraf(dir: string): (cb: any) => void {
let retries = 0;
const retry = () => {
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
if (!err) {
return c();
}
const retry = (cb: (err?: any) => void) => {
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
if (!err) {
return cb();
}
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(), 10);
}
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(cb), 10);
}
return e(err);
});
};
retry();
});
result.taskName = `clean-${path.basename(dir).toLowerCase()}`;
return result;
return cb(err);
});
};
retry.taskName = `clean-${path.basename(dir).toLowerCase()}`;
return retry;
}
export function getVersion(root: string): string | undefined {
@@ -286,10 +281,3 @@ export function versionStringToNumber(versionStr: string) {
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
}
export function streamToPromise(stream: NodeJS.ReadWriteStream): Promise<void> {
return new Promise((c, e) => {
stream.on('error', err => e(err));
stream.on('end', () => c());
});
}

View File

@@ -5,6 +5,17 @@
const es = require('event-stream');
/** Ugly hack for gulp-tsb */
function handleDeletions() {
return es.mapSync(f => {
if (/\.ts$/.test(f.relative) && !f.contents) {
f.contents = Buffer.from('');
f.stat = { mtime: new Date() };
}
return f;
});
}
let watch = undefined;
@@ -13,5 +24,6 @@ if (!watch) {
}
module.exports = function () {
return watch.apply(null, arguments);
return watch.apply(null, arguments)
.pipe(handleDeletions());
};

View File

@@ -148,9 +148,8 @@ function getMassagedTopLevelDeclarationText(sourceFile, declaration, importName,
}
});
}
result = result.replace(/export default /g, 'export ');
result = result.replace(/export declare /g, 'export ');
result = result.replace(/declare /g, '');
result = result.replace(/export default/g, 'export');
result = result.replace(/export declare/g, 'export');
if (declaration.kind === ts.SyntaxKind.EnumDeclaration) {
result = result.replace(/const enum/, 'enum');
enums.push(result);
@@ -516,7 +515,7 @@ class DeclarationResolver {
'file.ts': fileContents
};
const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {}));
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text;
const text = service.getEmitOutput('file.ts', true).outputFiles[0].text;
return new CacheEntry(ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5), mtime);
}
}

View File

@@ -178,9 +178,8 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
}
});
}
result = result.replace(/export default /g, 'export ');
result = result.replace(/export declare /g, 'export ');
result = result.replace(/declare /g, '');
result = result.replace(/export default/g, 'export');
result = result.replace(/export declare/g, 'export');
if (declaration.kind === ts.SyntaxKind.EnumDeclaration) {
result = result.replace(/const enum/, 'enum');
@@ -617,7 +616,7 @@ export class DeclarationResolver {
'file.ts': fileContents
};
const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {}));
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text;
const text = service.getEmitOutput('file.ts', true).outputFiles[0].text;
return new CacheEntry(
ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5),
mtime

View File

@@ -48,7 +48,7 @@ declare namespace monaco.editor {
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
#include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule
#include(vs/editor/common/services/webWorker): MonacoWebWorker, IWebWorkerOptions
#include(vs/editor/standalone/browser/standaloneCodeEditor): IActionDescriptor, IStandaloneEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
#include(vs/editor/standalone/browser/standaloneCodeEditor): IActionDescriptor, IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
export interface ICommandHandler {
(...args: any[]): void;
}
@@ -62,7 +62,6 @@ export interface ICommandHandler {
#includeAll(vs/editor/common/editorCommon;editorOptions.=>): IScrollEvent
#includeAll(vs/editor/common/model/textModelEvents):
#includeAll(vs/editor/common/controller/cursorEvents):
#include(vs/platform/accessibility/common/accessibility): AccessibilitySupport
#includeAll(vs/editor/common/config/editorOptions):
#includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>):
#include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo

View File

@@ -1,7 +1,7 @@
{
"name": "monaco-editor-core",
"private": true,
"version": "0.18.0",
"version": "0.16.0",
"description": "A browser based code editor",
"author": "Microsoft Corporation",
"license": "MIT",

View File

@@ -70,7 +70,6 @@ runtime "${runtime}"`;
}
yarnInstall(`build`); // node modules required for build
yarnInstall('test/automation'); // node modules required for smoketest
yarnInstall('test/smoke'); // node modules required for smoketest
yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron

View File

@@ -31,4 +31,4 @@ if (!/yarn\.js$|yarnpkg$/.test(process.env['npm_execpath'])) {
if (err) {
console.error('');
process.exit(1);
}
}

View File

@@ -24,9 +24,9 @@
"@types/pump": "^1.0.1",
"@types/request": "^2.47.0",
"@types/rimraf": "^2.0.2",
"@types/terser": "^3.12.0",
"@types/through": "^0.0.29",
"@types/through2": "^2.0.34",
"@types/uglify-es": "^3.0.0",
"@types/underscore": "^1.8.9",
"@types/xml2js": "0.0.33",
"applicationinsights": "1.0.8",
@@ -36,18 +36,13 @@
"github-releases": "^0.4.1",
"gulp-bom": "^1.0.0",
"gulp-sourcemaps": "^1.11.0",
"gulp-uglify": "^3.0.0",
"iconv-lite": "0.4.23",
"mime": "^1.3.4",
"minimist": "^1.2.0",
"request": "^2.85.0",
"rollup": "^1.20.3",
"rollup-plugin-commonjs": "^10.1.0",
"rollup-plugin-node-resolve": "^5.2.0",
"terser": "4.3.8",
"tslint": "^5.9.1",
"service-downloader": "github:anthonydresser/service-downloader#0.1.7",
"typescript": "3.7.0-dev.20191017",
"typescript": "3.5.2",
"vsce": "1.48.0",
"vscode-telemetry-extractor": "^1.5.4",
"xml2js": "^0.4.17"

View File

@@ -1,57 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const fs = require('fs').promises;
const path = require('path');
const readConfiguration = (async () => {
const parseConfigString = ((content) => {
try {
const result = JSON.parse(content);
return result;
}
catch (ex) {
console.log('Could NOT parse TEST_RUN_LIST:', content);
}
});
// Attempt to read from an enviornment variable
const testRunlist = process.env['TEST_RUN_LIST'];
if (testRunlist && testRunlist !== '') {
const result = parseConfigString(testRunlist);
if (result) {
console.log('Using the environment test run list:', result);
return result;
}
}
// Attempt to read from a config file
let testRunPath = process.env['TEST_RUN_LIST_FILE'];
if (!testRunPath || testRunPath === '') {
testRunPath = path.resolve(__dirname, '..', 'runlist.json');
}
try {
const contents = await fs.readFile(testRunPath);
return parseConfigString(contents);
}
catch (ex) {
console.log(`error reading file ${testRunPath}:`, ex);
}
});
(async () => {
const keys = process.argv.slice(2);
const configuration = await readConfiguration();
if (!configuration) {
console.log('no configuration was setup');
return;
}
const testList = [];
keys.forEach((key) => {
const arr = configuration[key];
if (arr) {
testList.push(...arr);
}
});
const result = `(${testList.join('|')})`;
console.log(result);
process.env['TEST_GREP'] = result;
})();

View File

@@ -1,67 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const fs = require('fs').promises;
const path = require('path');
interface IntegrationTestConfig {
[key: string]: string[];
}
const readConfiguration = (async (): Promise<IntegrationTestConfig | void> => {
const parseConfigString = ((content: string): (IntegrationTestConfig | void) => {
try {
const result = JSON.parse(content);
return result as IntegrationTestConfig;
} catch (ex) {
console.log('Could NOT parse TEST_RUN_LIST:', content);
}
});
// Attempt to read from an enviornment variable
const testRunlist = process.env['TEST_RUN_LIST'];
if (testRunlist && testRunlist !== '') {
const result = parseConfigString(testRunlist);
if (result) {
console.log('Using the environment test run list:', result);
return result;
}
}
// Attempt to read from a config file
let testRunPath = process.env['TEST_RUN_LIST_FILE'];
if (!testRunPath || testRunPath === '') {
testRunPath = path.resolve(__dirname, '..', 'runlist.json');
}
try {
const contents = await fs.readFile(testRunPath);
return parseConfigString(contents);
} catch (ex) {
console.log(`error reading file ${testRunPath}:`, ex);
}
});
(async (): Promise<string | void> => {
const keys = process.argv.slice(2);
const configuration = await readConfiguration();
if (!configuration) {
console.log('no configuration was setup');
return;
}
const testList: string[] = [];
keys.forEach((key) => {
const arr = configuration[key];
if (arr) {
testList.push(...arr);
}
});
const result = `(${testList.join('|')})`;
console.log(result);
process.env['TEST_GREP'] = result;
})();

View File

@@ -29,7 +29,7 @@ dependencies = [
[[package]]
name = "inno_updater"
version = "0.8.2"
version = "0.8.0"
dependencies = [
"byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crc 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@@ -20,8 +20,8 @@ Compression=lzma
SolidCompression=yes
AppMutex={code:GetAppMutex}
SetupMutex={#AppMutex}setup
WizardImageFile="{#RepoDir}\resources\win32\inno-big-100.bmp,{#RepoDir}\resources\win32\inno-big-125.bmp,{#RepoDir}\resources\win32\inno-big-150.bmp,{#RepoDir}\resources\win32\inno-big-175.bmp,{#RepoDir}\resources\win32\inno-big-200.bmp,{#RepoDir}\resources\win32\inno-big-225.bmp,{#RepoDir}\resources\win32\inno-big-250.bmp"
WizardSmallImageFile="{#RepoDir}\resources\win32\inno-small-100.bmp,{#RepoDir}\resources\win32\inno-small-125.bmp,{#RepoDir}\resources\win32\inno-small-150.bmp,{#RepoDir}\resources\win32\inno-small-175.bmp,{#RepoDir}\resources\win32\inno-small-200.bmp,{#RepoDir}\resources\win32\inno-small-225.bmp,{#RepoDir}\resources\win32\inno-small-250.bmp"
WizardImageFile="{#RepoDir}\resources\win32\sql-big.bmp"
WizardSmallImageFile="{#RepoDir}\resources\win32\sql-small.bmp"
SetupIconFile={#RepoDir}\resources\win32\code.ico
UninstallDisplayIcon={app}\{#ExeBasename}.exe
ChangesEnvironment=true

Binary file not shown.

View File

@@ -68,11 +68,6 @@
dependencies:
"@types/node" "*"
"@types/estree@0.0.39":
version "0.0.39"
resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f"
integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==
"@types/events@*":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@types/events/-/events-1.2.0.tgz#81a6731ce4df43619e5c8c945383b3e62a89ea86"
@@ -197,11 +192,6 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.14.13.tgz#ac786d623860adf39a3f51d629480aacd6a6eec7"
integrity sha512-yN/FNNW1UYsRR1wwAoyOwqvDuLDtVXnaJTZ898XIw/Q5cCaeVAlVwvsmXLX5PuiScBYwZsZU4JYSHB3TvfdwvQ==
"@types/node@^12.7.5":
version "12.7.8"
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.8.tgz#cb1bf6800238898bc2ff6ffa5702c3cadd350708"
integrity sha512-FMdVn84tJJdV+xe+53sYiZS4R5yn1mAIxfj+DVoNiQjTYz1+OYmjwEZr1ev9nU0axXwda0QDbYl06QHanRVH3A==
"@types/pump@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@types/pump/-/pump-1.0.1.tgz#ae8157cefef04d1a4d24c1cc91d403c2f5da5cd0"
@@ -219,13 +209,6 @@
"@types/node" "*"
"@types/tough-cookie" "*"
"@types/resolve@0.0.8":
version "0.0.8"
resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194"
integrity sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ==
dependencies:
"@types/node" "*"
"@types/rimraf@^2.0.2":
version "2.0.2"
resolved "https://registry.yarnpkg.com/@types/rimraf/-/rimraf-2.0.2.tgz#7f0fc3cf0ff0ad2a99bb723ae1764f30acaf8b6e"
@@ -234,13 +217,6 @@
"@types/glob" "*"
"@types/node" "*"
"@types/terser@^3.12.0":
version "3.12.0"
resolved "https://registry.yarnpkg.com/@types/terser/-/terser-3.12.0.tgz#25e020fe9a7a6ae92ce46261f00ced67de6c12ac"
integrity sha512-J0Wy8A7ULEqVJftkWhrXZbH0iBk4tYuTj0gBiiveKaY9deNi6cCsxl0ApJ27ojqwYv51bvEw85lOb8Wt4ng9zA==
dependencies:
terser "*"
"@types/through2@^2.0.34":
version "2.0.34"
resolved "https://registry.yarnpkg.com/@types/through2/-/through2-2.0.34.tgz#9c2a259a238dace2a05a2f8e94b786961bc27ac4"
@@ -260,6 +236,20 @@
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-2.3.2.tgz#e0d481d8bb282ad8a8c9e100ceb72c995fb5e709"
integrity sha512-vOVmaruQG5EatOU/jM6yU2uCp3Lz6mK1P5Ztu4iJjfM4SVHU9XYktPUQtKlIXuahqXHdEyUarMrBEwg5Cwu+bA==
"@types/uglify-es@^3.0.0":
version "3.0.0"
resolved "https://registry.yarnpkg.com/@types/uglify-es/-/uglify-es-3.0.0.tgz#2c5e70b43c0e86643ac1c223f61df15fa0b87bc2"
integrity sha512-Oc/c7pGIQL0MVhC6g+VftWiDQethKsT4c3fQKYm6nOprkvkx9s1MLrnJprDTKlZL3ZJulMpCF9Qn7s6u3uCNxQ==
dependencies:
"@types/uglify-js" "*"
"@types/uglify-js@*":
version "3.0.3"
resolved "https://registry.yarnpkg.com/@types/uglify-js/-/uglify-js-3.0.3.tgz#801a5ca1dc642861f47c46d14b700ed2d610840b"
integrity sha512-MAT0BW2ruO0LhQKjvlipLGCF/Yx0y/cj+tT67tK3QIQDrM2+9R78HgJ54VlrE8AbfjYJJBCQCEPM5ZblPVTuww==
dependencies:
source-map "^0.6.1"
"@types/uglify-js@^2":
version "2.6.32"
resolved "https://registry.yarnpkg.com/@types/uglify-js/-/uglify-js-2.6.32.tgz#1b60906946fcf6ee4ceafa812d2b86f1358da904"
@@ -317,25 +307,13 @@ acorn@4.X:
resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787"
integrity sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=
acorn@^7.0.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.1.0.tgz#949d36f2c292535da602283586c2477c57eb2d6c"
integrity sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==
agent-base@4:
agent-base@4, agent-base@^4.1.0:
version "4.2.1"
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.2.1.tgz#d89e5999f797875674c07d87f260fc41e83e8ca9"
integrity sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg==
dependencies:
es6-promisify "^5.0.0"
agent-base@^4.3.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.3.0.tgz#8165f01c436009bccad0b1d122f05ed770efc6ee"
integrity sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==
dependencies:
es6-promisify "^5.0.0"
ajv@^4.9.1:
version "4.11.8"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536"
@@ -455,11 +433,6 @@ array-differ@^3.0.0:
resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-3.0.0.tgz#3cbb3d0f316810eafcc47624734237d6aee4ae6b"
integrity sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg==
array-each@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/array-each/-/array-each-1.0.1.tgz#a794af0c05ab1752846ee753a1f211a05ba0c44f"
integrity sha1-p5SvDAWrF1KEbudTofIRoFugxE8=
array-union@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39"
@@ -713,11 +686,6 @@ buffer-fill@^1.0.0:
resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c"
integrity sha1-+PeLdniYiO858gXNY39o5wISKyw=
buffer-from@^1.0.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
buffer@^5.2.1:
version "5.2.1"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.2.1.tgz#dd57fa0f109ac59c602479044dca7b8b3d0b71d6"
@@ -731,11 +699,6 @@ builtin-modules@^1.1.1:
resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f"
integrity sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=
builtin-modules@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.1.0.tgz#aad97c15131eb76b65b50ef208e7584cd76a7484"
integrity sha512-k0KL0aWZuBt2lrxrcASWDfwOLMnodeQjodT/1SxEQAXsHANgo6ZC/VEaSEHCXt7aSTZ4/4H5LKa+tBXmW7Vtvw==
cache-base@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2"
@@ -906,11 +869,6 @@ commander@^2.12.1, commander@^2.8.1:
resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a"
integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==
commander@^2.20.0, commander@~2.20.0:
version "2.20.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.0.tgz#d58bb2b5c1ee8f87b0d340027e9e94e222c5a422"
integrity sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ==
commander@~2.8.1:
version "2.8.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.8.1.tgz#06be367febfda0c330aa1e2a072d3dc9762425d4"
@@ -1281,11 +1239,6 @@ esprima@^4.0.0:
resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
estree-walker@^0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.6.1.tgz#53049143f40c6eb918b23671d1fe3219f3a1b362"
integrity sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==
esutils@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"
@@ -1666,22 +1619,6 @@ gulp-sourcemaps@^1.11.0:
through2 "2.X"
vinyl "1.X"
gulp-uglify@^3.0.0:
version "3.0.2"
resolved "https://registry.yarnpkg.com/gulp-uglify/-/gulp-uglify-3.0.2.tgz#5f5b2e8337f879ca9dec971feb1b82a5a87850b0"
integrity sha512-gk1dhB74AkV2kzqPMQBLA3jPoIAPd/nlNzP2XMDSG8XZrqnlCiDGAqC+rZOumzFvB5zOphlFh6yr3lgcAb/OOg==
dependencies:
array-each "^1.0.1"
extend-shallow "^3.0.2"
gulplog "^1.0.0"
has-gulplog "^0.1.0"
isobject "^3.0.1"
make-error-cause "^1.1.1"
safe-buffer "^5.1.2"
through2 "^2.0.0"
uglify-js "^3.0.5"
vinyl-sourcemaps-apply "^0.2.0"
gulp-util@^3.0.0:
version "3.0.8"
resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-3.0.8.tgz#0054e1e744502e27c04c187c3ecc505dd54bbb4f"
@@ -1871,11 +1808,11 @@ http-signature@~1.2.0:
sshpk "^1.7.0"
https-proxy-agent@^2.2.1:
version "2.2.4"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz#4ee7a737abd92678a293d9b34a1af4d0d08c787b"
integrity sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==
version "2.2.1"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz#51552970fa04d723e04c56d04178c3f92592bbc0"
integrity sha512-HPCTS1LW51bcyMYbxUIOO4HEOlQ1/1qRaFWcyxvwaqUS9TY88aoEuHUY33kuAh1YhVVaDQhLZsnPd+XNARWZlQ==
dependencies:
agent-base "^4.3.0"
agent-base "^4.1.0"
debug "^3.1.0"
iconv-lite@0.4.23:
@@ -2048,11 +1985,6 @@ is-glob@^4.0.1:
dependencies:
is-extglob "^2.1.1"
is-module@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591"
integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=
is-natural-number@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/is-natural-number/-/is-natural-number-4.0.1.tgz#ab9d76e1db4ced51e35de0c72ebecf09f734cde8"
@@ -2101,13 +2033,6 @@ is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4:
dependencies:
isobject "^3.0.1"
is-reference@^1.1.2:
version "1.1.4"
resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-1.1.4.tgz#3f95849886ddb70256a3e6d062b1a68c13c51427"
integrity sha512-uJA/CDPO3Tao3GTrxYn6AwkM4nUPJiGGYu5+cB8qbC7WGFlrKZbiRo7SFKxUAEpFUfiHofWCXBUNhvYJMh+6zw==
dependencies:
"@types/estree" "0.0.39"
is-relative@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-relative/-/is-relative-1.0.0.tgz#a1bb6935ce8c5dba1e8b9754b9b2dcc020e2260d"
@@ -2384,16 +2309,9 @@ lodash.templatesettings@^3.0.0:
lodash.escape "^3.0.0"
lodash@^4.15.0, lodash@^4.17.10:
version "4.17.15"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
magic-string@^0.25.2:
version "0.25.3"
resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.3.tgz#34b8d2a2c7fec9d9bdf9929a3fd81d271ef35be9"
integrity sha512-6QK0OpF/phMz0Q2AxILkX2mFhi7m+WMwTRg0LQKq/WBB0cDP4rYH3Wp4/d3OTXlrPLVJT/RFqj8tFeAR4nk8AA==
dependencies:
sourcemap-codec "^1.4.4"
version "4.17.11"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d"
integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==
make-dir@^1.0.0:
version "1.3.0"
@@ -2402,18 +2320,6 @@ make-dir@^1.0.0:
dependencies:
pify "^3.0.0"
make-error-cause@^1.1.1:
version "1.2.2"
resolved "https://registry.yarnpkg.com/make-error-cause/-/make-error-cause-1.2.2.tgz#df0388fcd0b37816dff0a5fb8108939777dcbc9d"
integrity sha1-3wOI/NCzeBbf8KX7gQiTl3fcvJ0=
dependencies:
make-error "^1.2.0"
make-error@^1.2.0:
version "1.3.5"
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.5.tgz#efe4e81f6db28cadd605c70f29c831b58ef776c8"
integrity sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==
map-cache@^0.2.2:
version "0.2.2"
resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf"
@@ -2813,7 +2719,7 @@ path-is-inside@^1.0.1:
resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53"
integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=
path-parse@^1.0.5, path-parse@^1.0.6:
path-parse@^1.0.5:
version "1.0.6"
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c"
integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==
@@ -3072,13 +2978,6 @@ resolve-url@^0.2.1:
resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a"
integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=
resolve@^1.11.0, resolve@^1.11.1:
version "1.12.0"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.12.0.tgz#3fc644a35c84a48554609ff26ec52b66fa577df6"
integrity sha512-B/dOmuoAik5bKcD6s6nXDCjzUKnaDvdkRyAk6rsmsKLipWj4797iothd7jmmUhWTfinVMU+wc56rYKsit2Qy4w==
dependencies:
path-parse "^1.0.6"
resolve@^1.3.2:
version "1.8.1"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.8.1.tgz#82f1ec19a423ac1fbd080b0bab06ba36e84a7a26"
@@ -3108,44 +3007,6 @@ rimraf@^2.2.8, rimraf@^2.6.1:
dependencies:
glob "^7.1.3"
rollup-plugin-commonjs@^10.1.0:
version "10.1.0"
resolved "https://registry.yarnpkg.com/rollup-plugin-commonjs/-/rollup-plugin-commonjs-10.1.0.tgz#417af3b54503878e084d127adf4d1caf8beb86fb"
integrity sha512-jlXbjZSQg8EIeAAvepNwhJj++qJWNJw1Cl0YnOqKtP5Djx+fFGkp3WRh+W0ASCaFG5w1jhmzDxgu3SJuVxPF4Q==
dependencies:
estree-walker "^0.6.1"
is-reference "^1.1.2"
magic-string "^0.25.2"
resolve "^1.11.0"
rollup-pluginutils "^2.8.1"
rollup-plugin-node-resolve@^5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-5.2.0.tgz#730f93d10ed202473b1fb54a5997a7db8c6d8523"
integrity sha512-jUlyaDXts7TW2CqQ4GaO5VJ4PwwaV8VUGA7+km3n6k6xtOEacf61u0VXwN80phY/evMcaS+9eIeJ9MOyDxt5Zw==
dependencies:
"@types/resolve" "0.0.8"
builtin-modules "^3.1.0"
is-module "^1.0.0"
resolve "^1.11.1"
rollup-pluginutils "^2.8.1"
rollup-pluginutils@^2.8.1:
version "2.8.2"
resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz#72f2af0748b592364dbd3389e600e5a9444a351e"
integrity sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==
dependencies:
estree-walker "^0.6.1"
rollup@^1.20.3:
version "1.21.4"
resolved "https://registry.yarnpkg.com/rollup/-/rollup-1.21.4.tgz#00a41a30f90095db890301b226cbe2918e4cf54d"
integrity sha512-Pl512XVCmVzgcBz5h/3Li4oTaoDcmpuFZ+kdhS/wLreALz//WuDAMfomD3QEYl84NkDu6Z6wV9twlcREb4qQsw==
dependencies:
"@types/estree" "0.0.39"
"@types/node" "^12.7.5"
acorn "^7.0.0"
run-parallel@^1.1.9:
version "1.1.9"
resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.1.9.tgz#c9dd3a7cf9f4b2c4b6244e173a6ed866e61dd679"
@@ -3156,12 +3017,7 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.1:
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
integrity sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==
safe-buffer@^5.1.2:
version "5.2.0"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519"
integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==
safe-buffer@~5.1.0, safe-buffer@~5.1.1:
safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.2"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
@@ -3307,34 +3163,21 @@ source-map-resolve@^0.5.0, source-map-resolve@^0.5.2:
source-map-url "^0.4.0"
urix "^0.1.0"
source-map-support@~0.5.12:
version "0.5.13"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932"
integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==
dependencies:
buffer-from "^1.0.0"
source-map "^0.6.0"
source-map-url@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3"
integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=
source-map@^0.5.1, source-map@^0.5.6:
source-map@^0.5.6:
version "0.5.7"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=
source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1:
source-map@^0.6.1, source-map@~0.6.0:
version "0.6.1"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
sourcemap-codec@^1.4.4:
version "1.4.6"
resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.6.tgz#e30a74f0402bad09807640d39e971090a08ce1e9"
integrity sha512-1ZooVLYFxC448piVLBbtOxFcXwnymH9oUF8nRd3CuYDVvkRBxRl6pB4Mtas5a4drtL+E8LDgFkQNcgIw6tc8Hg==
sparkles@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/sparkles/-/sparkles-1.0.1.tgz#008db65edce6c50eec0c5e228e1945061dd0437c"
@@ -3480,24 +3323,6 @@ tar@^4:
safe-buffer "^5.1.2"
yallist "^3.0.2"
terser@*:
version "4.2.1"
resolved "https://registry.yarnpkg.com/terser/-/terser-4.2.1.tgz#1052cfe17576c66e7bc70fcc7119f22b155bdac1"
integrity sha512-cGbc5utAcX4a9+2GGVX4DsenG6v0x3glnDi5hx8816X1McEAwPlPgRtXPJzSBsbpILxZ8MQMT0KvArLuE0HP5A==
dependencies:
commander "^2.20.0"
source-map "~0.6.1"
source-map-support "~0.5.12"
terser@4.3.8:
version "4.3.8"
resolved "https://registry.yarnpkg.com/terser/-/terser-4.3.8.tgz#707f05f3f4c1c70c840e626addfdb1c158a17136"
integrity sha512-otmIRlRVmLChAWsnSFNO0Bfk6YySuBp6G9qrHiJwlLDd4mxe2ta4sjI7TzIR+W1nBMjilzrMcPOz9pSusgx3hQ==
dependencies:
commander "^2.20.0"
source-map "~0.6.1"
source-map-support "~0.5.12"
through2@2.X, through2@^2.0.0, through2@^2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.3.tgz#0004569b37c7c74ba39c43f3ced78d1ad94140be"
@@ -3650,10 +3475,10 @@ typed-rest-client@^0.9.0:
tunnel "0.0.4"
underscore "1.8.3"
typescript@3.7.0-dev.20191017:
version "3.7.0-dev.20191017"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.7.0-dev.20191017.tgz#e61440dd445edea6d7b9a699e7c5d5fbcd1906f2"
integrity sha512-Yi0lCPEN0cn9Gp8TEEkPpgKNR5SWAmx9Hmzzz+oEuivw6amURqRGynaLyFZkMA9iMsvYG5LLqhdlFO3uu5ZT/w==
typescript@3.5.2:
version "3.5.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.2.tgz#a09e1dc69bc9551cadf17dba10ee42cf55e5d56c"
integrity sha512-7KxJovlYhTX5RaRbUdkAXN1KUZ8PwWlTzQdHV6xNqvuFOs7+WBo10TQUqT19Q/Jz2hk5v9TQDIhyLhhJY4p5AA==
typescript@^3.0.1:
version "3.5.3"
@@ -3670,14 +3495,6 @@ uc.micro@^1.0.1, uc.micro@^1.0.5:
resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-1.0.5.tgz#0c65f15f815aa08b560a61ce8b4db7ffc3f45376"
integrity sha512-JoLI4g5zv5qNyT09f4YAvEZIIV1oOjqnewYg5D38dkQljIzpPT296dbIGvKro3digYI1bkb7W6EP1y4uDlmzLg==
uglify-js@^3.0.5:
version "3.6.0"
resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.6.0.tgz#704681345c53a8b2079fb6cec294b05ead242ff5"
integrity sha512-W+jrUHJr3DXKhrsS7NUVxn3zqMOFn0hL/Ei6v0anCIMoKC93TjcflTagwIHLW7SfMFfiQuktQyFVCFHGUE0+yg==
dependencies:
commander "~2.20.0"
source-map "~0.6.1"
unbzip2-stream@^1.0.9:
version "1.3.3"
resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.3.3.tgz#d156d205e670d8d8c393e1c02ebd506422873f6a"
@@ -3773,13 +3590,6 @@ verror@1.10.0:
core-util-is "1.0.2"
extsprintf "^1.2.0"
vinyl-sourcemaps-apply@^0.2.0:
version "0.2.1"
resolved "https://registry.yarnpkg.com/vinyl-sourcemaps-apply/-/vinyl-sourcemaps-apply-0.2.1.tgz#ab6549d61d172c2b1b87be5c508d239c8ef87705"
integrity sha1-q2VJ1h0XLCsbh75cUI0jnI74dwU=
dependencies:
source-map "^0.5.1"
vinyl@1.X:
version "1.2.0"
resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-1.2.0.tgz#5c88036cf565e5df05558bfc911f8656df218884"
@@ -3822,9 +3632,9 @@ vsce@1.48.0:
yazl "^2.2.2"
vscode-ripgrep@^1.5.6:
version "1.5.7"
resolved "https://registry.yarnpkg.com/vscode-ripgrep/-/vscode-ripgrep-1.5.7.tgz#acb6b548af488a4bca5d0f1bb5faf761343289ce"
integrity sha512-/Vsz/+k8kTvui0q3O74pif9FK0nKopgFTiGNVvxicZANxtSA8J8gUE9GQ/4dpi7D/2yI/YVORszwVskFbz46hQ==
version "1.5.6"
resolved "https://registry.yarnpkg.com/vscode-ripgrep/-/vscode-ripgrep-1.5.6.tgz#93bf5c99ca5f8248950a305e224f6ca153c30af4"
integrity sha512-WRIM9XpUj6dsfdAmuI3ANbmT1ysPUVsYy/2uCLDHJa9kbiB4T7uGvFnnc0Rgx2qQnyRAwL7PeWaFgUljPPxf2g==
vscode-telemetry-extractor@^1.5.4:
version "1.5.4"

View File

@@ -46,6 +46,56 @@
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
]
},
{
// Reason: The npm module does not contain a repository field.
"name": "xterm-addon-search",
"fullLicenseText": [
"Copyright (c) 2017, The xterm.js authors (https://github.com/xtermjs/xterm.js)",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy",
"of this software and associated documentation files (the \"Software\"), to deal",
"in the Software without restriction, including without limitation the rights",
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
"copies of the Software, and to permit persons to whom the Software is",
"furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in",
"all copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN",
"THE SOFTWARE."
]
},
{
// Reason: The npm module does not contain a repository field.
"name": "xterm-addon-web-links",
"fullLicenseText": [
"Copyright (c) 2017, The xterm.js authors (https://github.com/xtermjs/xterm.js)",
"",
"Permission is hereby granted, free of charge, to any person obtaining a copy",
"of this software and associated documentation files (the \"Software\"), to deal",
"in the Software without restriction, including without limitation the rights",
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
"copies of the Software, and to permit persons to whom the Software is",
"furnished to do so, subject to the following conditions:",
"",
"The above copyright notice and this permission notice shall be included in",
"all copies or substantial portions of the Software.",
"",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN",
"THE SOFTWARE."
]
},
{
// Reason: The license at https://git.coolaj86.com/coolaj86/atob.js/src/branch/master/LICENSE
// cannot be found by the OSS tool automatically.

View File

@@ -6,7 +6,7 @@
"git": {
"name": "chromium",
"repositoryUrl": "https://chromium.googlesource.com/chromium/src",
"commitHash": "91f08db83c2ce8c722ddf0911ead8f7c473bedfa"
"commitHash": "c6a08e5368de4352903e702cde750b33239a50ab"
}
},
"licenseDetail": [
@@ -40,7 +40,7 @@
"SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
],
"isOnlyProductionDependency": true,
"version": "76.0.3809.146"
"version": "69.0.3497.128"
},
{
"component": {
@@ -48,11 +48,11 @@
"git": {
"name": "nodejs",
"repositoryUrl": "https://github.com/nodejs/node",
"commitHash": "64219741218aa87e259cf8257596073b8e747f0a"
"commitHash": "8c70b2084ce5f76ea1e3b3c4ccdeee4483fe338b"
}
},
"isOnlyProductionDependency": true,
"version": "12.4.0"
"version": "10.11.0"
},
{
"component": {
@@ -60,12 +60,12 @@
"git": {
"name": "electron",
"repositoryUrl": "https://github.com/electron/electron",
"commitHash": "1e50380fab37f407c4d357e1e30ecbc3d5a703b8"
"commitHash": "3d4d6454007f14fa9a5f0e1fa49206fb91b676cc"
}
},
"isOnlyProductionDependency": true,
"license": "MIT",
"version": "6.0.12"
"version": "4.2.9"
},
{
"component": {
@@ -96,13 +96,25 @@
"component": {
"type": "git",
"git": {
"name": "vscode-codicons",
"repositoryUrl": "https://github.com/microsoft/vscode-codicons",
"commitHash": "7f14c092f65f658cd520df3f13765efe828d0ba4"
"name": "vscode-octicons-font",
"repositoryUrl": "https://github.com/Microsoft/vscode-octicons-font",
"commitHash": "415cd5b42ab699b6b46c0bf011ada0a2ae50bfb4"
}
},
"license": "MIT and Creative Commons Attribution 4.0",
"version": "0.0.1"
"license": "MIT",
"version": "1.3.1"
},
{
"component": {
"type": "git",
"git": {
"name": "octicons",
"repositoryUrl": "https://github.com/primer/octicons",
"commitHash": "d120bf97bc9a12fb415f69fedaf31fe58427ca56"
}
},
"license": "MIT",
"version": "8.3.0"
},
{
"component": {

View File

@@ -1,86 +1,84 @@
{
"name": "admin-tool-ext-win",
"displayName": "%adminToolExtWin.displayName%",
"description": "%adminToolExtWin.description%",
"version": "0.0.2",
"publisher": "Microsoft",
"preview": true,
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/extensions/admin-tool-ext-win/license/Azure%20Data%20Studio%20Extension%20-%20Standalone%20(free)%20Use%20Terms.txt",
"icon": "images/sqlserver.png",
"aiKey": "AIF-444c3af9-8e69-4462-ab49-4191e6ad1916",
"engines": {
"vscode": "^1.30.1",
"azdata": ">=1.8.0"
},
"activationEvents": [
"*"
],
"main": "./out/main",
"repository": {
"type": "git",
"url": "https://github.com/Microsoft/azuredatastudio.git"
},
"extensionDependencies": [
"Microsoft.mssql"
],
"contributes": {
"commands": [
{
"command": "adminToolExtWin.launchSsmsMinPropertiesDialog",
"title": "%adminToolExtWin.propertiesMenuItem%",
"category": "AdminToolExtWin"
},
{
"command": "adminToolExtWin.launchSsmsMinGswDialog",
"title": "%adminToolExtWin.launchGswMenuItem%",
"category": "AdminToolExtWin"
}
],
"menus": {
"commandPalette": [
{
"command": "adminToolExtWin.launchSsmsMinPropertiesDialog",
"when": "false"
},
{
"command": "adminToolExtWin.launchSsmsMinGswDialog",
"when": "false"
}
],
"objectExplorer/item/context": [
{
"command": "adminToolExtWin.launchSsmsMinGswDialog",
"when": "isWindows && connectionProvider == MSSQL && nodeType && nodeType == Database && mssql:engineedition != 11",
"group": "z-AdminToolExt@1"
},
{
"command": "adminToolExtWin.launchSsmsMinPropertiesDialog",
"when": "isWindows && connectionProvider == MSSQL && serverInfo && !isCloud && nodeType && nodeType == Server && mssql:engineedition != 11",
"group": "z-AdminToolExt@2"
},
{
"command": "adminToolExtWin.launchSsmsMinPropertiesDialog",
"when": "isWindows && connectionProvider == MSSQL && serverInfo && nodeType && mssql:engineedition != 11 && nodeType =~ /^(Database|Table|Column|Index|Statistic|View|ServerLevelLogin|ServerLevelServerRole|ServerLevelCredential|ServerLevelServerAudit|ServerLevelServerAuditSpecification|StoredProcedure|ScalarValuedFunction|TableValuedFunction|AggregateFunction|Synonym|Assembly|UserDefinedDataType|UserDefinedType|UserDefinedTableType|Sequence|User|DatabaseRole|ApplicationRole|Schema|SecurityPolicy|ServerLevelLinkedServer)$/",
"group": "z-AdminToolExt@2"
}
]
},
"outputChannels": [
"admin-tool-ext-win"
]
},
"dependencies": {
"ads-extension-telemetry": "github:Charles-Gagnon/ads-extension-telemetry#0.1.0",
"vscode-nls": "^3.2.1"
},
"devDependencies": {
"mocha-junit-reporter": "^1.17.0",
"mocha-multi-reporters": "^1.1.7",
"vscode": "^1.1.36"
},
"__metadata": {
"id": "41",
"publisherDisplayName": "Microsoft",
"publisherId": "Microsoft"
}
}
"name": "admin-tool-ext-win",
"displayName": "%adminToolExtWin.displayName%",
"description": "%adminToolExtWin.description%",
"version": "0.0.1",
"publisher": "Microsoft",
"preview": true,
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/extensions/admin-tool-ext-win/license/Azure%20Data%20Studio%20Extension%20-%20Standalone%20(free)%20Use%20Terms.txt",
"icon": "images/sqlserver.png",
"aiKey": "AIF-5574968e-856d-40d2-af67-c89a14e76412",
"engines": {
"vscode": "^1.30.1",
"azdata": ">=1.8.0"
},
"activationEvents": [
"*"
],
"main": "./out/main",
"repository": {
"type": "git",
"url": "https://github.com/Microsoft/azuredatastudio.git"
},
"extensionDependencies": [
"Microsoft.mssql"
],
"contributes": {
"commands": [
{
"command": "adminToolExtWin.launchSsmsMinPropertiesDialog",
"title": "%adminToolExtWin.propertiesMenuItem%",
"category": "AdminToolExtWin"
},
{
"command": "adminToolExtWin.launchSsmsMinGswDialog",
"title": "%adminToolExtWin.launchGswMenuItem%",
"category": "AdminToolExtWin"
}
],
"menus": {
"commandPalette": [
{
"command": "adminToolExtWin.launchSsmsMinPropertiesDialog",
"when": "false"
},
{
"command": "adminToolExtWin.launchSsmsMinGswDialog",
"when": "false"
}
],
"objectExplorer/item/context": [
{
"command": "adminToolExtWin.launchSsmsMinGswDialog",
"when": "isWindows && connectionProvider == MSSQL && nodeType && nodeType == Database",
"group": "z-AdminToolExt@1"
},
{
"command": "adminToolExtWin.launchSsmsMinPropertiesDialog",
"when": "isWindows && connectionProvider == MSSQL && serverInfo && !isCloud && nodeType && nodeType == Server",
"group": "z-AdminToolExt@2"
},
{
"command": "adminToolExtWin.launchSsmsMinPropertiesDialog",
"when": "isWindows && connectionProvider == MSSQL && serverInfo && nodeType && nodeType =~ /^(Database|Table|Column|Index|Statistic|View|ServerLevelLogin|ServerLevelServerRole|ServerLevelCredential|ServerLevelServerAudit|ServerLevelServerAuditSpecification|StoredProcedure|ScalarValuedFunction|TableValuedFunction|AggregateFunction|Synonym|Assembly|UserDefinedDataType|UserDefinedType|UserDefinedTableType|Sequence|User|DatabaseRole|ApplicationRole|Schema|SecurityPolicy|ServerLevelLinkedServer)$/",
"group": "z-AdminToolExt@2"
}
]
},
"outputChannels": [
"admin-tool-ext-win"
]
},
"dependencies": {
"ads-extension-telemetry": "github:Charles-Gagnon/ads-extension-telemetry#0.1.0",
"vscode-nls": "^3.2.1"
},
"devDependencies": {
"vscode": "1.0.1"
},
"__metadata": {
"id": "41",
"publisherDisplayName": "Microsoft",
"publisherId": "Microsoft"
}
}

View File

@@ -10,9 +10,8 @@ import * as vscode from 'vscode';
import { TelemetryReporter, TelemetryViews } from './telemetry';
import { doubleEscapeSingleQuotes, backEscapeDoubleQuotes, getTelemetryErrorType } from './utils';
import { ChildProcess, exec } from 'child_process';
import { promises as fs } from 'fs';
const localize = nls.loadMessageBundle();
const ssmsMinVer = JSON.parse(JSON.stringify(require('./config.json'))).version;
let exePath: string;
const runningProcesses: Map<number, ChildProcess> = new Map<number, ChildProcess>();
@@ -67,8 +66,6 @@ export interface LaunchSsmsDialogParams {
export async function activate(context: vscode.ExtensionContext): Promise<void> {
// This is for Windows-specific support so do nothing on other platforms
if (process.platform === 'win32') {
const rawConfig = await fs.readFile(path.join(context.extensionPath, 'config.json'));
const ssmsMinVer = JSON.parse(rawConfig.toString()).version;
exePath = path.join(context.extensionPath, 'ssmsmin', 'Windows', ssmsMinVer, 'ssmsmin.exe');
registerCommands(context);
}
@@ -99,7 +96,7 @@ function registerCommands(context: vscode.ExtensionContext): void {
async function handleLaunchSsmsMinPropertiesDialogCommand(connectionContext?: azdata.ObjectExplorerContext): Promise<void> {
if (!connectionContext) {
TelemetryReporter.sendErrorEvent(TelemetryViews.SsmsMinProperties, 'NoConnectionContext');
vscode.window.showErrorMessage(localize('adminToolExtWin.noConnectionContextForProp', "No ConnectionContext provided for handleLaunchSsmsMinPropertiesDialogCommand"));
vscode.window.showErrorMessage(localize('adminToolExtWin.noConnectionContextForProp', 'No ConnectionContext provided for handleLaunchSsmsMinPropertiesDialogCommand'));
return;
}
@@ -111,7 +108,7 @@ async function handleLaunchSsmsMinPropertiesDialogCommand(connectionContext?: az
nodeType = connectionContext.nodeInfo.nodeType;
} else {
TelemetryReporter.sendErrorEvent(TelemetryViews.SsmsMinProperties, 'NoOENode');
vscode.window.showErrorMessage(localize('adminToolExtWin.noOENode', "Could not determine Object Explorer node from connectionContext : {0}", JSON.stringify(connectionContext)));
vscode.window.showErrorMessage(localize('adminToolExtWin.noOENode', 'Could not determine Object Explorer node from connectionContext : {0}', JSON.stringify(connectionContext)));
return;
}
@@ -128,7 +125,7 @@ async function handleLaunchSsmsMinGswDialogCommand(connectionContext?: azdata.Ob
const action = 'GenerateScripts';
if (!connectionContext) {
TelemetryReporter.sendErrorEvent(TelemetryViews.SsmsMinGsw, 'NoConnectionContext');
vscode.window.showErrorMessage(localize('adminToolExtWin.noConnectionContextForGsw', "No ConnectionContext provided for handleLaunchSsmsMinPropertiesDialogCommand"));
vscode.window.showErrorMessage(localize('adminToolExtWin.noConnectionContextForGsw', 'No ConnectionContext provided for handleLaunchSsmsMinPropertiesDialogCommand'));
}
launchSsmsDialog(
@@ -145,7 +142,7 @@ async function handleLaunchSsmsMinGswDialogCommand(connectionContext?: azdata.Ob
async function launchSsmsDialog(action: string, connectionContext: azdata.ObjectExplorerContext): Promise<void> {
if (!connectionContext.connectionProfile) {
TelemetryReporter.sendErrorEvent(TelemetryViews.SsmsMinDialog, 'NoConnectionProfile');
vscode.window.showErrorMessage(localize('adminToolExtWin.noConnectionProfile', "No connectionProfile provided from connectionContext : {0}", JSON.stringify(connectionContext)));
vscode.window.showErrorMessage(localize('adminToolExtWin.noConnectionProfile', 'No connectionProfile provided from connectionContext : {0}', JSON.stringify(connectionContext)));
return;
}
@@ -159,7 +156,7 @@ async function launchSsmsDialog(action: string, connectionContext: azdata.Object
}
else {
TelemetryReporter.sendErrorEvent(TelemetryViews.SsmsMinDialog, 'NoOENode');
vscode.window.showErrorMessage(localize('adminToolExtWin.noOENode', "Could not determine Object Explorer node from connectionContext : {0}", JSON.stringify(connectionContext)));
vscode.window.showErrorMessage(localize('adminToolExtWin.noOENode', 'Could not determine Object Explorer node from connectionContext : {0}', JSON.stringify(connectionContext)));
return;
}
@@ -191,7 +188,7 @@ async function launchSsmsDialog(action: string, connectionContext: azdata.Object
}).withConnectionInfo(connectionContext.connectionProfile)
.send();
vscode.window.setStatusBarMessage(localize('adminToolExtWin.launchingDialogStatus', "Launching dialog..."), 3000);
vscode.window.setStatusBarMessage(localize('adminToolExtWin.launchingDialogStatus', 'Launching dialog...'), 3000);
// This will be an async call since we pass in the callback
const proc: ChildProcess = exec(
@@ -212,7 +209,7 @@ async function launchSsmsDialog(action: string, connectionContext: azdata.Object
if (err !== '') {
vscode.window.showErrorMessage(localize(
'adminToolExtWin.ssmsMinError',
"Error calling SsmsMin with args \'{0}\' - {1}", args, err));
'Error calling SsmsMin with args \'{0}\' - {1}', args, err));
}
});
@@ -262,4 +259,4 @@ export async function buildUrn(node: azdata.objectexplorer.ObjectExplorerNode):
}
return ['Server'].concat(urnNodes).join('/');
}
}

View File

@@ -6,7 +6,7 @@
const path = require('path');
const testRunner = require('vscode/lib/testrunner');
const suite = 'admin-tool-ext-win Extension Tests';
const suite = 'Database Admin Tool Extensions for Windows';
const options: any = {
ui: 'bdd',
@@ -14,24 +14,6 @@ const options: any = {
timeout: 600000
};
// set relevant mocha options from the environment
if (process.env.ADS_TEST_GREP) {
options.grep = process.env.ADS_TEST_GREP;
console.log(`setting options.grep to: ${options.grep}`);
}
if (process.env.ADS_TEST_INVERT_GREP) {
options.invert = parseInt(process.env.ADS_TEST_INVERT_GREP);
console.log(`setting options.invert to: ${options.invert}`);
}
if (process.env.ADS_TEST_TIMEOUT) {
options.timeout = parseInt(process.env.ADS_TEST_TIMEOUT);
console.log(`setting options.timeout to: ${options.timeout}`);
}
if (process.env.ADS_TEST_RETRIES) {
options.retries = parseInt(process.env.ADS_TEST_RETRIES);
console.log(`setting options.retries to: ${options.retries}`);
}
if (process.env.BUILD_ARTIFACTSTAGINGDIRECTORY) {
options.reporter = 'mocha-multi-reporters';
options.reporterOptions = {
@@ -45,4 +27,4 @@ if (process.env.BUILD_ARTIFACTSTAGINGDIRECTORY) {
testRunner.configure(options);
export = testRunner;
export = testRunner;

File diff suppressed because it is too large Load Diff

View File

@@ -1,98 +1,93 @@
{
"name": "agent",
"displayName": "SQL Server Agent",
"description": "Manage and troubleshoot SQL Server Agent jobs",
"version": "0.44.0",
"publisher": "Microsoft",
"preview": true,
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/LICENSE.txt",
"icon": "images/sqlserver.png",
"aiKey": "AIF-444c3af9-8e69-4462-ab49-4191e6ad1916",
"engines": {
"vscode": "^1.25.0"
},
"activationEvents": [
"*"
],
"main": "./out/main",
"repository": {
"type": "git",
"url": "https://github.com/Microsoft/azuredatastudio.git"
},
"extensionDependencies": [
"Microsoft.mssql"
],
"contributes": {
"outputChannels": [
"sqlagent"
],
"dashboard.tabs": [
{
"id": "data-management-agent",
"description": "Manage and troubleshoot SQL Agent jobs",
"provider": "MSSQL",
"title": "SQL Agent",
"when": "connectionProvider == 'MSSQL' && !mssql:iscloud && mssql:engineedition != 11",
"container": {
"controlhost-container": {
"type": "agent"
}
}
}
],
"commands": [
{
"command": "agent.openNotebookDialog",
"title": "Schedule Notebook",
"icon": {
"dark": "resources/dark/open_notebook_inverse.svg",
"light": "resources/light/open_notebook.svg"
}
},
{
"command": "agent.reuploadTemplate",
"title": "Update Template",
"icon": {
"dark": "resources/dark/open_notebook_inverse.svg",
"light": "resources/light/open_notebook.svg"
}
}
],
"menus": {
"notebook/toolbar": [
{
"command": "agent.openNotebookDialog",
"when": "providerId == sql && !agent:trackedTemplate"
},
{
"command": "agent.reuploadTemplate",
"when": "agent:trackedTemplate"
}
],
"explorer/context": [
{
"command": "agent.openNotebookDialog",
"when": "resourceExtname == .ipynb"
}
]
}
},
"dependencies": {
"vscode-nls": "^3.2.1"
},
"devDependencies": {
"@types/mocha": "^5.2.5",
"@types/node": "^10.14.8",
"mocha": "^5.2.0",
"mocha-junit-reporter": "^1.17.0",
"mocha-multi-reporters": "^1.1.7",
"should": "^13.2.1",
"typemoq": "^2.1.0",
"vscode": "^1.1.36"
},
"__metadata": {
"id": "10",
"publisherDisplayName": "Microsoft",
"publisherId": "Microsoft"
"name": "agent",
"displayName": "SQL Server Agent",
"description": "Manage and troubleshoot SQL Server Agent jobs",
"version": "0.43.0",
"publisher": "Microsoft",
"preview": true,
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/LICENSE.txt",
"icon": "images/sqlserver.png",
"aiKey": "AIF-5574968e-856d-40d2-af67-c89a14e76412",
"engines": {
"vscode": "^1.25.0"
},
"activationEvents": [
"*"
],
"main": "./out/main",
"repository": {
"type": "git",
"url": "https://github.com/Microsoft/azuredatastudio.git"
},
"extensionDependencies": [
"Microsoft.mssql"
],
"contributes": {
"outputChannels": [
"sqlagent"
],
"dashboard.tabs": [
{
"id": "data-management-agent",
"description": "Manage and troubleshoot SQL Agent jobs",
"provider": "MSSQL",
"title": "SQL Agent",
"when": "connectionProvider == 'MSSQL' && !mssql:iscloud",
"container": {
"controlhost-container": {
"type": "agent"
}
}
}
],
"commands": [
{
"command": "agent.openNotebookDialog",
"title": "Schedule Notebook",
"icon": {
"dark": "resources/dark/open_notebook_inverse.svg",
"light": "resources/light/open_notebook.svg"
}
},
{
"command": "agent.reuploadTemplate",
"title": "Update Template",
"icon": {
"dark": "resources/dark/open_notebook_inverse.svg",
"light": "resources/light/open_notebook.svg"
}
}
],
"menus": {
"notebook/toolbar": [
{
"command": "agent.openNotebookDialog",
"when": "providerId == sql && !agent:trackedTemplate"
},
{
"command": "agent.reuploadTemplate",
"when": "agent:trackedTemplate"
}
],
"explorer/context": [
{
"command": "agent.openNotebookDialog",
"when": "resourceExtname == .ipynb"
}
]
}
},
"dependencies": {
"vscode-nls": "^3.2.1"
},
"devDependencies": {
"mocha-junit-reporter": "^1.17.0",
"mocha-multi-reporters": "^1.1.7",
"@types/mocha": "^5.2.5",
"@types/node": "^8.10.25",
"mocha": "^5.2.0",
"should": "^13.2.1",
"typemoq": "^2.1.0",
"vscode": "1.1.5"
}
}

View File

@@ -6,7 +6,9 @@
'use strict';
import * as azdata from 'azdata';
import { promises as fs } from 'fs';
import * as fs from 'fs';
import { promisify } from 'util';
export class AgentUtils {
@@ -50,11 +52,18 @@ export class AgentUtils {
}
export async function exists(path: string): Promise<boolean> {
try {
await fs.access(path);
return true;
} catch (e) {
return false;
}
export function exists(path: string): Promise<boolean> {
return promisify(fs.exists)(path);
}
export function mkdir(path: string): Promise<void> {
return promisify(fs.mkdir)(path);
}
export function unlink(path: string): Promise<void> {
return promisify(fs.unlink)(path);
}
export function writeFile(path: string, data: string): Promise<void> {
return promisify(fs.writeFile)(path, data);
}

View File

@@ -14,9 +14,9 @@ import { JobData } from './jobData';
const localize = nls.loadMessageBundle();
export class AlertData implements IAgentDialogData {
public static readonly AlertTypeSqlServerEventString: string = localize('alertData.DefaultAlertTypString', "SQL Server event alert");
public static readonly AlertTypePerformanceConditionString: string = localize('alertDialog.PerformanceCondition', "SQL Server performance condition alert");
public static readonly AlertTypeWmiEventString: string = localize('alertDialog.WmiEvent', "WMI event alert");
public static readonly AlertTypeSqlServerEventString: string = localize('alertData.DefaultAlertTypString', 'SQL Server event alert');
public static readonly AlertTypePerformanceConditionString: string = localize('alertDialog.PerformanceCondition', 'SQL Server performance condition alert');
public static readonly AlertTypeWmiEventString: string = localize('alertDialog.WmiEvent', 'WMI event alert');
public static readonly DefaultAlertTypeString: string = AlertData.AlertTypeSqlServerEventString;
ownerUri: string;

View File

@@ -14,12 +14,12 @@ const localize = nls.loadMessageBundle();
export class JobData implements IAgentDialogData {
private readonly JobCompletionActionCondition_Always: string = localize('jobData.whenJobCompletes', "When the job completes");
private readonly JobCompletionActionCondition_OnFailure: string = localize('jobData.whenJobFails', "When the job fails");
private readonly JobCompletionActionCondition_OnSuccess: string = localize('jobData.whenJobSucceeds', "When the job succeeds");
private readonly JobCompletionActionCondition_Always: string = localize('jobData.whenJobCompletes', 'When the job completes');
private readonly JobCompletionActionCondition_OnFailure: string = localize('jobData.whenJobFails', 'When the job fails');
private readonly JobCompletionActionCondition_OnSuccess: string = localize('jobData.whenJobSucceeds', 'When the job succeeds');
// Error Messages
private readonly CreateJobErrorMessage_NameIsEmpty = localize('jobData.jobNameRequired', "Job name must be provided");
private readonly CreateJobErrorMessage_NameIsEmpty = localize('jobData.jobNameRequired', 'Job name must be provided');
private _ownerUri: string;
private _jobCategories: string[];

Some files were not shown because too many files have changed in this diff Show More