mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-19 18:46:52 -05:00
Compare commits
124 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
59c4b8e90c | ||
|
|
0e9f1ad6d1 | ||
|
|
e3ef2d53f9 | ||
|
|
dc69b012a8 | ||
|
|
f7dc9ec2be | ||
|
|
d04451985c | ||
|
|
b490d53284 | ||
|
|
bc766698ee | ||
|
|
ed7521e846 | ||
|
|
63affd4d9a | ||
|
|
5e62aea5d5 | ||
|
|
dfa33a85b2 | ||
|
|
e09e7be757 | ||
|
|
dd8207dd3f | ||
|
|
0f9f17fb1b | ||
|
|
1de05a2339 | ||
|
|
c6fec97819 | ||
|
|
52a223e9ff | ||
|
|
ddb442f1e1 | ||
|
|
bf21db99c4 | ||
|
|
aef6511ba6 | ||
|
|
2d71397ffe | ||
|
|
faaf667439 | ||
|
|
3946cee33c | ||
|
|
b75e688a7e | ||
|
|
4750d5c0dd | ||
|
|
7f6ddc897b | ||
|
|
e49dd12951 | ||
|
|
730367494b | ||
|
|
399406b732 | ||
|
|
25352fa39c | ||
|
|
f691701c1c | ||
|
|
c615affdc3 | ||
|
|
1d75fcfed5 | ||
|
|
866ced5c08 | ||
|
|
8a0c183114 | ||
|
|
a7ad526ff2 | ||
|
|
70a63830bd | ||
|
|
e2b2cfd7ca | ||
|
|
27b3904f2f | ||
|
|
f8fb0b8c49 | ||
|
|
db5b68e04a | ||
|
|
d582a955dd | ||
|
|
cc232f195f | ||
|
|
82e0ede921 | ||
|
|
a5db46b961 | ||
|
|
555e6a10e9 | ||
|
|
4c66b04572 | ||
|
|
a187e8f2de | ||
|
|
2ec720d5b9 | ||
|
|
c61c53976a | ||
|
|
932fc44566 | ||
|
|
22093ec406 | ||
|
|
4a1537aac7 | ||
|
|
43e8fde775 | ||
|
|
02770e21ee | ||
|
|
04ede021f3 | ||
|
|
2e9555f24a | ||
|
|
d36918081a | ||
|
|
6b189be781 | ||
|
|
b3d3906f76 | ||
|
|
388b87a519 | ||
|
|
836550e1b8 | ||
|
|
dc42beb237 | ||
|
|
d820137267 | ||
|
|
1040d5bc4c | ||
|
|
08ae25d7ba | ||
|
|
e0d8b67fe4 | ||
|
|
2781279644 | ||
|
|
7e8dccec82 | ||
|
|
faf4c9232b | ||
|
|
2a0d37aee0 | ||
|
|
c54f991e60 | ||
|
|
c6a8b80ce5 | ||
|
|
af96cb92db | ||
|
|
335146c65e | ||
|
|
71db7ed101 | ||
|
|
12e0f24ef8 | ||
|
|
c1317e520e | ||
|
|
d797ef433a | ||
|
|
8ee8975f17 | ||
|
|
b1d909e829 | ||
|
|
8111c536e1 | ||
|
|
0e24fc09b5 | ||
|
|
558dfd693c | ||
|
|
ca81290aa1 | ||
|
|
1d55facfe3 | ||
|
|
70b0d89b5d | ||
|
|
624c07947c | ||
|
|
89db1266d2 | ||
|
|
036545b74b | ||
|
|
8db452dcd6 | ||
|
|
b612218b02 | ||
|
|
1eaa6be392 | ||
|
|
448d21849b | ||
|
|
5f2fa391c4 | ||
|
|
efd70310ba | ||
|
|
4983dcbd9f | ||
|
|
969694e9a2 | ||
|
|
846da0a918 | ||
|
|
488ae411fa | ||
|
|
c770aca1d3 | ||
|
|
58b9e9dc3d | ||
|
|
104ab95080 | ||
|
|
633afd16bb | ||
|
|
23d79c2090 | ||
|
|
ce114ae191 | ||
|
|
5385b02b61 | ||
|
|
ed40a0b91e | ||
|
|
f03a129186 | ||
|
|
99fd1fbb5e | ||
|
|
c015338ed4 | ||
|
|
9cddd91037 | ||
|
|
cd71be33b3 | ||
|
|
27c206a25b | ||
|
|
da00d83ace | ||
|
|
ec1f4ab077 | ||
|
|
95efa2b788 | ||
|
|
fabc17a3f8 | ||
|
|
0b41baaa0c | ||
|
|
fff2bd5089 | ||
|
|
1c4b1a178c | ||
|
|
c68b359aaa | ||
|
|
676296fbbb |
@@ -740,6 +740,8 @@
|
||||
"angular2-grid",
|
||||
"html-query-plan",
|
||||
"turndown",
|
||||
"gridstack",
|
||||
"gridstack/**",
|
||||
"mark.js",
|
||||
"vscode-textmate",
|
||||
"vscode-oniguruma",
|
||||
|
||||
10
CHANGELOG.md
10
CHANGELOG.md
@@ -1,5 +1,15 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.29.0
|
||||
* Release date: May 19, 2021
|
||||
* Release status: General Availability
|
||||
* New Notebook Features:
|
||||
* Added runs with a parameters option.
|
||||
* Extension Updates:
|
||||
* SQL Database Projects
|
||||
* Schema Compare
|
||||
* Bug Fixes
|
||||
|
||||
## Version 1.28.0
|
||||
* Release date: April 16, 2021
|
||||
* Release status: General Availability
|
||||
|
||||
14
README.md
14
README.md
@@ -131,10 +131,10 @@ Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Licensed under the [Source EULA](LICENSE.txt).
|
||||
|
||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2160781
|
||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2160780
|
||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2160923
|
||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2160874
|
||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2160782
|
||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2160875
|
||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2160876
|
||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2163435
|
||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2163531
|
||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2163529
|
||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2163528
|
||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2163530
|
||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2163437
|
||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2163436
|
||||
|
||||
@@ -29,6 +29,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
gc-signals: https://github.com/Microsoft/node-gc-signals
|
||||
getmac: https://github.com/bevry/getmac
|
||||
graceful-fs: https://github.com/isaacs/node-graceful-fs
|
||||
gridstack: https://github.com/gridstack/gridstack.js
|
||||
html-query-plan: https://github.com/JustinPealing/html-query-plan
|
||||
http-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
|
||||
https-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
|
||||
@@ -493,6 +494,32 @@ IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
=========================================
|
||||
END OF graceful-fs NOTICES AND INFORMATION
|
||||
|
||||
%% gridstack NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2020 Alain Dumesny, Dylan Weiss, Pavel Reznikov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
=========================================
|
||||
END OF gridstack NOTICES AND INFORMATION
|
||||
|
||||
%% html-query-plan NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
@@ -9,6 +9,7 @@ const path = require("path");
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const files = [
|
||||
'.build/langpacks/**/*.vsix',
|
||||
'.build/extensions/**/*.vsix',
|
||||
'.build/win32-x64/**/*.{exe,zip}',
|
||||
'.build/linux/sha256hashes.txt',
|
||||
|
||||
@@ -11,6 +11,7 @@ import * as es from 'event-stream';
|
||||
import * as fs from 'fs';
|
||||
|
||||
const files = [
|
||||
'.build/langpacks/**/*.vsix', // langpacks
|
||||
'.build/extensions/**/*.vsix', // external extensions
|
||||
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
|
||||
'.build/linux/sha256hashes.txt', // linux hashes
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#Download base image ubuntu 16.04
|
||||
FROM ubuntu:16.04
|
||||
#Download base image ubuntu 18.04
|
||||
FROM ubuntu:18.04
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update
|
||||
|
||||
@@ -7,7 +7,7 @@ SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||
|
||||
# create docker
|
||||
mkdir -p $REPO/.build/docker
|
||||
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
|
||||
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME-web
|
||||
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
|
||||
@@ -79,7 +79,7 @@ steps:
|
||||
set -e
|
||||
for f in $(Build.SourcesDirectory)/.build/drop/linux/server/*.tar.gz
|
||||
do
|
||||
tar -C $(agent.builddirectory) -zxvf $f
|
||||
tar -C $(Build.SourcesDirectory)/../ -zxvf $f
|
||||
rm $f
|
||||
done
|
||||
displayName: Unzip artifacts
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#Download base image ubuntu 16.04
|
||||
FROM ubuntu:16.04
|
||||
#Download base image ubuntu 18.04
|
||||
FROM ubuntu:18.04
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update && apt-get upgrade -y
|
||||
|
||||
@@ -14,13 +14,22 @@ TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
# Publish Remote Extension Host
|
||||
LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||
SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||
SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||
# # Publish Remote Extension Host
|
||||
# LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||
# SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||
# SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||
# SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
# rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||
# (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
# Publish Remote Extension Host (Web)
|
||||
LEGACY_SERVER_BUILD_NAME_WEB="azuredatastudio-reh-web-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web"
|
||||
SERVER_TARBALL_FILENAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web.tar.gz"
|
||||
SERVER_TARBALL_PATH_WEB="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME_WEB"
|
||||
|
||||
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
(cd $ROOT && mv vscode-reh-web-linux-x64 $SERVER_BUILD_NAME_WEB && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH_WEB $SERVER_BUILD_NAME_WEB)
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
|
||||
@@ -95,6 +95,7 @@ steps:
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
yarn gulp vscode-web-min-ci
|
||||
yarn gulp vscode-reh-web-linux-x64-min
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
@@ -106,6 +107,11 @@ steps:
|
||||
yarn gulp package-external-extensions
|
||||
displayName: Package External extensions
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-langpacks
|
||||
displayName: Package Langpacks
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
service xvfb start
|
||||
@@ -190,7 +196,7 @@ steps:
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build'
|
||||
Pattern: 'extensions/*.vsix'
|
||||
Pattern: 'extensions/*.vsix,langpacks/*.vsix'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
@@ -215,9 +221,22 @@ steps:
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: 'Signing Extensions'
|
||||
displayName: 'Signing Extensions and Langpacks'
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
# cd ./extensions/mssql/node_modules/@microsoft/ads-kerberos
|
||||
# # npx node-gyp rebuild
|
||||
# yarn install
|
||||
# displayName: Recompile native node modules
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
# VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
# yarn gulp vscode-reh-web-linux-x64-min
|
||||
# displayName: Build web server
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/linux/createDrop.sh
|
||||
|
||||
@@ -7,7 +7,7 @@ resources:
|
||||
jobs:
|
||||
- job: Compile
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
container: linux-x64
|
||||
steps:
|
||||
- script: |
|
||||
@@ -15,6 +15,7 @@ jobs:
|
||||
echo "##vso[build.addbuildtag]$(VSCODE_QUALITY)"
|
||||
displayName: Add Quality Build Tag
|
||||
- template: sql-product-compile.yml
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: macOS
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
@@ -24,7 +25,7 @@ jobs:
|
||||
- Compile
|
||||
steps:
|
||||
- template: darwin/sql-product-build-darwin.yml
|
||||
timeoutInMinutes: 180
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: macOS_Signing
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), eq(variables['signed'], true), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
@@ -39,7 +40,7 @@ jobs:
|
||||
- job: Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
container: linux-x64
|
||||
dependsOn:
|
||||
- Compile
|
||||
@@ -47,12 +48,12 @@ jobs:
|
||||
- template: linux/sql-product-build-linux.yml
|
||||
parameters:
|
||||
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects", "data-workspace"]
|
||||
timeoutInMinutes: 70
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: LinuxWeb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
container: linux-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
@@ -60,16 +61,18 @@ jobs:
|
||||
- Compile
|
||||
steps:
|
||||
- template: web/sql-product-build-web.yml
|
||||
timeoutInMinutes: 90
|
||||
|
||||
# - job: Docker
|
||||
# condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
||||
# pool:
|
||||
# vmImage: 'Ubuntu-16.04'
|
||||
# container: linux-x64
|
||||
# dependsOn:
|
||||
# - Linux
|
||||
# steps:
|
||||
# - template: docker/sql-product-build-docker.yml
|
||||
- job: Docker
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
container: linux-x64
|
||||
dependsOn:
|
||||
- Linux
|
||||
steps:
|
||||
- template: docker/sql-product-build-docker.yml
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: Windows
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
@@ -79,7 +82,7 @@ jobs:
|
||||
- Compile
|
||||
steps:
|
||||
- template: win32/sql-product-build-win32.yml
|
||||
timeoutInMinutes: 70
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: Windows_Test
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
@@ -90,11 +93,12 @@ jobs:
|
||||
- Windows
|
||||
steps:
|
||||
- template: win32/sql-product-test-win32.yml
|
||||
timeoutInMinutes: 90
|
||||
|
||||
- job: Release
|
||||
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
vmImage: 'Ubuntu-18.04'
|
||||
dependsOn:
|
||||
- macOS
|
||||
- Linux
|
||||
|
||||
@@ -99,3 +99,4 @@ steps:
|
||||
# VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
# ./build/azure-pipelines/web/publish.sh
|
||||
# displayName: Publish
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ steps:
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
SecretsFilter: 'github-distro-mixin-password,standalone-extensions-uri'
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
@@ -102,8 +102,6 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "package-rebuild-extensions" }
|
||||
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-win32-x64-min" }
|
||||
exec { yarn gulp "vscode-reh-web-win32-x64-min" }
|
||||
exec { yarn gulp "vscode-win32-x64-code-helper" }
|
||||
exec { yarn gulp "vscode-win32-x64-inno-updater" }
|
||||
displayName: Build
|
||||
@@ -121,6 +119,19 @@ steps:
|
||||
vstsPackageVersion: '*'
|
||||
condition: and(succeeded(), eq(variables['VSCODE_QUALITY'], 'saw'))
|
||||
|
||||
- powershell: |
|
||||
# Install TSGOps specific extensions
|
||||
$ErrorActionPreference = "Stop"
|
||||
$tempFilePath = (New-TemporaryFile).FullName
|
||||
$zipFilePath = $tempFilePath + ".zip"
|
||||
$extensionUri = "$(standalone-extensions-uri)"
|
||||
$adsExtensionPath = "$(agent.builddirectory)\azuredatastudio-win32-x64\resources\app\extensions"
|
||||
Invoke-WebRequest -Uri $extensionUri -OutFile $tempFilePath
|
||||
Move-Item $tempFilePath $zipFilePath
|
||||
Expand-Archive $zipFilePath -DestinationPath $adsExtensionPath
|
||||
displayName: Install SAW Extensions
|
||||
condition: and(succeeded(), eq(variables['VSCODE_QUALITY'], 'saw'))
|
||||
|
||||
# - powershell: | @anthonydresser unit tests timeout never existing the node process
|
||||
# . build/azure-pipelines/win32/exec.ps1
|
||||
# $ErrorActionPreference = "Stop"
|
||||
|
||||
@@ -26,8 +26,17 @@ const ext = require('./lib/extensions');
|
||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||
// {{SQL CARBON EDIT}}
|
||||
const sqlLocalizedExtensions = [
|
||||
'admin-tool-ext-win',
|
||||
'agent',
|
||||
'cms',
|
||||
'dacpac',
|
||||
'schema-compare'
|
||||
'import',
|
||||
'machine-learning',
|
||||
'profiler',
|
||||
'schema-compare',
|
||||
'server-report',
|
||||
'sql-assessment',
|
||||
'sql-database-projects'
|
||||
];
|
||||
// {{SQL CARBON EDIT}}
|
||||
|
||||
|
||||
@@ -6,21 +6,35 @@
|
||||
'use strict';
|
||||
|
||||
const gulp = require('gulp');
|
||||
|
||||
const path = require('path');
|
||||
const es = require('event-stream');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const common = require('./lib/optimize');
|
||||
const product = require('../product.json');
|
||||
const rename = require('gulp-rename');
|
||||
const replace = require('gulp-replace');
|
||||
const filter = require('gulp-filter');
|
||||
const _ = require('underscore');
|
||||
const { getProductionDependencies } = require('./lib/dependencies');
|
||||
const vfs = require('vinyl-fs');
|
||||
const packageJson = require('../package.json');
|
||||
const flatmap = require('gulp-flatmap');
|
||||
const gunzip = require('gulp-gunzip');
|
||||
const File = require('vinyl');
|
||||
const fs = require('fs');
|
||||
const rename = require('gulp-rename');
|
||||
const filter = require('gulp-filter');
|
||||
const glob = require('glob');
|
||||
const { compileBuildTask } = require('./gulpfile.compile');
|
||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
||||
const { vscodeWebEntryPoints, vscodeWebResourceIncludes, createVSCodeWebFileContentMapper } = require('./gulpfile.vscode.web');
|
||||
const cp = require('child_process');
|
||||
|
||||
const REPO_ROOT = path.dirname(__dirname);
|
||||
const commit = util.getVersion(REPO_ROOT);
|
||||
const BUILD_ROOT = path.dirname(REPO_ROOT);
|
||||
const REMOTE_FOLDER = path.join(REPO_ROOT, 'remote');
|
||||
|
||||
// Targets
|
||||
|
||||
const BUILD_TARGETS = [
|
||||
{ platform: 'win32', arch: 'ia32', pkgTarget: 'node8-win-x86' },
|
||||
@@ -33,21 +47,92 @@ const BUILD_TARGETS = [
|
||||
{ platform: 'linux', arch: 'alpine', pkgTarget: 'node8-linux-alpine' },
|
||||
];
|
||||
|
||||
const noop = () => { return Promise.resolve(); };
|
||||
const serverResources = [
|
||||
|
||||
gulp.task('vscode-reh-win32-ia32-min', noop);
|
||||
gulp.task('vscode-reh-win32-x64-min', noop);
|
||||
gulp.task('vscode-reh-darwin-min', noop);
|
||||
gulp.task('vscode-reh-linux-x64-min', noop);
|
||||
gulp.task('vscode-reh-linux-armhf-min', noop);
|
||||
gulp.task('vscode-reh-linux-arm64-min', noop);
|
||||
gulp.task('vscode-reh-linux-alpine-min', noop);
|
||||
// Bootstrap
|
||||
'out-build/bootstrap.js',
|
||||
'out-build/bootstrap-fork.js',
|
||||
'out-build/bootstrap-amd.js',
|
||||
'out-build/bootstrap-node.js',
|
||||
'out-build/paths.js',
|
||||
|
||||
gulp.task('vscode-reh-web-win32-ia32-min', noop);
|
||||
gulp.task('vscode-reh-web-win32-x64-min', noop);
|
||||
gulp.task('vscode-reh-web-darwin-min', noop);
|
||||
gulp.task('vscode-reh-web-linux-x64-min', noop);
|
||||
gulp.task('vscode-reh-web-linux-alpine-min', noop);
|
||||
// Performance
|
||||
'out-build/vs/base/common/performance.js',
|
||||
|
||||
// main entry points
|
||||
'out-build/vs/server/cli.js',
|
||||
'out-build/vs/server/main.js',
|
||||
|
||||
// Watcher
|
||||
'out-build/vs/platform/files/**/*.exe',
|
||||
'out-build/vs/platform/files/**/*.md',
|
||||
|
||||
// Uri transformer
|
||||
'out-build/vs/server/uriTransformer.js',
|
||||
|
||||
// Process monitor
|
||||
'out-build/vs/base/node/cpuUsage.sh',
|
||||
'out-build/vs/base/node/ps.sh',
|
||||
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
let serverWithWebResources = undefined;
|
||||
try {
|
||||
serverWithWebResources = [
|
||||
|
||||
// Include all of server...
|
||||
...serverResources,
|
||||
|
||||
// ...and all of web
|
||||
...vscodeWebResourceIncludes
|
||||
];
|
||||
} catch (err) {
|
||||
serverWithWebResources = [
|
||||
// Include all of server...
|
||||
...serverResources
|
||||
];
|
||||
}
|
||||
|
||||
const serverEntryPoints = [
|
||||
{
|
||||
name: 'vs/server/remoteExtensionHostAgent',
|
||||
exclude: ['vs/css', 'vs/nls']
|
||||
},
|
||||
{
|
||||
name: 'vs/server/remoteCli',
|
||||
exclude: ['vs/css', 'vs/nls']
|
||||
},
|
||||
{
|
||||
name: 'vs/server/remoteExtensionHostProcess',
|
||||
exclude: ['vs/css', 'vs/nls']
|
||||
},
|
||||
{
|
||||
name: 'vs/platform/files/node/watcher/unix/watcherApp',
|
||||
exclude: ['vs/css', 'vs/nls']
|
||||
},
|
||||
{
|
||||
name: 'vs/platform/files/node/watcher/nsfw/watcherApp',
|
||||
exclude: ['vs/css', 'vs/nls']
|
||||
}
|
||||
];
|
||||
|
||||
let serverWithWebEntryPoints = undefined;
|
||||
try {
|
||||
serverWithWebEntryPoints = [
|
||||
|
||||
// Include all of server
|
||||
...serverEntryPoints,
|
||||
|
||||
// Include workbench web
|
||||
...vscodeWebEntryPoints
|
||||
];
|
||||
} catch (err) {
|
||||
serverWithWebEntryPoints = [
|
||||
// Include all of server
|
||||
...serverEntryPoints
|
||||
];
|
||||
}
|
||||
|
||||
function getNodeVersion() {
|
||||
const yarnrc = fs.readFileSync(path.join(REPO_ROOT, 'remote', '.yarnrc'), 'utf8');
|
||||
@@ -116,6 +201,270 @@ function nodejs(platform, arch) {
|
||||
.pipe(rename('node'));
|
||||
}
|
||||
|
||||
function packageTask(type, platform, arch, sourceFolderName, destinationFolderName) {
|
||||
const destination = path.join(BUILD_ROOT, destinationFolderName);
|
||||
|
||||
return () => {
|
||||
const json = require('gulp-json-editor');
|
||||
|
||||
const src = gulp.src(sourceFolderName + '/**', { base: '.' })
|
||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + sourceFolderName), 'out'); }))
|
||||
.pipe(util.setExecutableBit(['**/*.sh']))
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
|
||||
const workspaceExtensionPoints = ['debuggers', 'jsonValidation'];
|
||||
const isUIExtension = (manifest) => {
|
||||
switch (manifest.extensionKind) {
|
||||
case 'ui': return true;
|
||||
case 'workspace': return false;
|
||||
default: {
|
||||
if (manifest.main) {
|
||||
return false;
|
||||
}
|
||||
if (manifest.contributes && Object.keys(manifest.contributes).some(key => workspaceExtensionPoints.indexOf(key) !== -1)) {
|
||||
return false;
|
||||
}
|
||||
// Default is UI Extension
|
||||
return true;
|
||||
}
|
||||
}
|
||||
};
|
||||
const localWorkspaceExtensions = glob.sync('extensions/*/package.json')
|
||||
.filter((extensionPath) => {
|
||||
if (type === 'reh-web') {
|
||||
return true; // web: ship all extensions for now
|
||||
}
|
||||
|
||||
const manifest = JSON.parse(fs.readFileSync(path.join(REPO_ROOT, extensionPath)).toString());
|
||||
return !isUIExtension(manifest);
|
||||
}).map((extensionPath) => path.basename(path.dirname(extensionPath)))
|
||||
.filter(name => name !== 'vscode-api-tests' && name !== 'vscode-test-resolver'); // Do not ship the test extensions
|
||||
const marketplaceExtensions = JSON.parse(fs.readFileSync(path.join(REPO_ROOT, 'product.json'), 'utf8')).builtInExtensions
|
||||
.filter(entry => !entry.platforms || new Set(entry.platforms).has(platform))
|
||||
.filter(entry => !entry.clientOnly)
|
||||
.map(entry => entry.name);
|
||||
const extensionPaths = [...localWorkspaceExtensions, ...marketplaceExtensions]
|
||||
.map(name => `.build/extensions/${name}/**`);
|
||||
|
||||
const extensions = gulp.src(extensionPaths, { base: '.build', dot: true });
|
||||
const extensionsCommonDependencies = gulp.src('.build/extensions/node_modules/**', { base: '.build', dot: true });
|
||||
const sources = es.merge(src, extensions, extensionsCommonDependencies)
|
||||
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
|
||||
|
||||
let version = packageJson.version;
|
||||
const quality = product.quality;
|
||||
|
||||
if (quality && quality !== 'stable') {
|
||||
version += '-' + quality;
|
||||
}
|
||||
|
||||
const name = product.nameShort;
|
||||
const packageJsonStream = gulp.src(['remote/package.json'], { base: 'remote' })
|
||||
.pipe(json({ name, version }));
|
||||
|
||||
const date = new Date().toISOString();
|
||||
|
||||
const productJsonStream = gulp.src(['product.json'], { base: '.' })
|
||||
.pipe(json({ commit, date }));
|
||||
|
||||
const license = gulp.src(['remote/LICENSE'], { base: 'remote' });
|
||||
|
||||
const jsFilter = util.filter(data => !data.isDirectory() && /\.js$/.test(data.path));
|
||||
|
||||
const productionDependencies = getProductionDependencies(REMOTE_FOLDER);
|
||||
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(REPO_ROOT, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`, `!${d}/.bin/**`]));
|
||||
const deps = gulp.src(dependenciesSrc, { base: 'remote', dot: true })
|
||||
// filter out unnecessary files, no source maps in server build
|
||||
.pipe(filter(['**', '!**/package-lock.json', '!**/yarn.lock', '!**/*.js.map']))
|
||||
.pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore')))
|
||||
.pipe(jsFilter)
|
||||
.pipe(util.stripSourceMappingURL())
|
||||
.pipe(jsFilter.restore);
|
||||
|
||||
const nodePath = `.build/node/v${nodeVersion}/${platform}-${platform === 'darwin' ? 'x64' : arch}`;
|
||||
const node = gulp.src(`${nodePath}/**`, { base: nodePath, dot: true });
|
||||
|
||||
let web = [];
|
||||
if (type === 'reh-web') {
|
||||
web = [
|
||||
'resources/server/favicon.ico',
|
||||
'resources/server/code-192.png',
|
||||
'resources/server/code-512.png',
|
||||
'resources/server/manifest.json'
|
||||
].map(resource => gulp.src(resource, { base: '.' }).pipe(rename(resource)));
|
||||
}
|
||||
|
||||
let all = es.merge(
|
||||
packageJsonStream,
|
||||
productJsonStream,
|
||||
license,
|
||||
sources,
|
||||
deps,
|
||||
node,
|
||||
...web
|
||||
);
|
||||
|
||||
let result = all
|
||||
.pipe(util.skipDirectories())
|
||||
.pipe(util.fixWin32DirectoryPermissions());
|
||||
|
||||
if (platform === 'win32') {
|
||||
result = es.merge(result,
|
||||
gulp.src('resources/server/bin/code.cmd', { base: '.' })
|
||||
.pipe(replace('@@VERSION@@', version))
|
||||
.pipe(replace('@@COMMIT@@', commit))
|
||||
.pipe(replace('@@APPNAME@@', product.applicationName))
|
||||
.pipe(rename(`bin/${product.applicationName}.cmd`)),
|
||||
// gulp.src('resources/server/bin/helpers/browser.cmd', { base: '.' })
|
||||
// .pipe(replace('@@VERSION@@', version))
|
||||
// .pipe(replace('@@COMMIT@@', commit))
|
||||
// .pipe(replace('@@APPNAME@@', product.applicationName))
|
||||
// .pipe(rename(`bin/helpers/browser.cmd`)),
|
||||
gulp.src('resources/server/bin/server.cmd', { base: '.' })
|
||||
.pipe(rename(`server.cmd`))
|
||||
);
|
||||
} else if (platform === 'linux' || platform === 'darwin') {
|
||||
result = es.merge(result,
|
||||
gulp.src('resources/server/bin/code.sh', { base: '.' })
|
||||
.pipe(replace('@@VERSION@@', version))
|
||||
.pipe(replace('@@COMMIT@@', commit))
|
||||
.pipe(replace('@@APPNAME@@', product.applicationName))
|
||||
.pipe(rename(`bin/${product.applicationName}`))
|
||||
.pipe(util.setExecutableBit()),
|
||||
// gulp.src('resources/server/bin/helpers/browser.sh', { base: '.' })
|
||||
// .pipe(replace('@@VERSION@@', version))
|
||||
// .pipe(replace('@@COMMIT@@', commit))
|
||||
// .pipe(replace('@@APPNAME@@', product.applicationName))
|
||||
// .pipe(rename(`bin/helpers/browser.sh`))
|
||||
// .pipe(util.setExecutableBit()),
|
||||
gulp.src('resources/server/bin/server.sh', { base: '.' })
|
||||
.pipe(rename(`server.sh`))
|
||||
.pipe(util.setExecutableBit())
|
||||
);
|
||||
}
|
||||
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
|
||||
function copyConfigTask(folder) {
|
||||
const destination = path.join(BUILD_ROOT, folder);
|
||||
return () => {
|
||||
const json = require('gulp-json-editor');
|
||||
|
||||
return gulp.src(['remote/pkg-package.json'], { base: 'remote' })
|
||||
.pipe(rename(path => path.basename += '.' + folder))
|
||||
.pipe(json(obj => {
|
||||
const pkg = obj.pkg;
|
||||
pkg.scripts = pkg.scripts && pkg.scripts.map(p => path.join(destination, p));
|
||||
pkg.assets = pkg.assets && pkg.assets.map(p => path.join(destination, p));
|
||||
return obj;
|
||||
}))
|
||||
.pipe(vfs.dest('out-vscode-reh-pkg'));
|
||||
};
|
||||
}
|
||||
|
||||
function copyNativeTask(folder) {
|
||||
const destination = path.join(BUILD_ROOT, folder);
|
||||
return () => {
|
||||
const nativeLibraries = gulp.src(['remote/node_modules/**/*.node']);
|
||||
const license = gulp.src(['remote/LICENSE']);
|
||||
|
||||
const result = es.merge(
|
||||
nativeLibraries,
|
||||
license
|
||||
);
|
||||
|
||||
return result
|
||||
.pipe(rename({ dirname: '' }))
|
||||
.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
|
||||
function packagePkgTask(platform, arch, pkgTarget) {
|
||||
const folder = path.join(BUILD_ROOT, 'vscode-reh') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
return () => {
|
||||
const cwd = process.cwd();
|
||||
const config = path.join(cwd, 'out-vscode-reh-pkg', 'pkg-package.vscode-reh-' + platform + '-' + arch + '.json');
|
||||
process.chdir(folder);
|
||||
console.log(`TODO`, pkgTarget, config);
|
||||
return null;
|
||||
// return pkg.exec(['-t', pkgTarget, '-d', '-c', config, '-o', path.join(folder + '-pkg', platform === 'win32' ? 'vscode-reh.exe' : 'vscode-reh'), './out/remoteExtensionHostAgent.js'])
|
||||
// .then(() => process.chdir(cwd));
|
||||
};
|
||||
}
|
||||
|
||||
['reh', 'reh-web'].forEach(type => {
|
||||
const optimizeTask = task.define(`optimize-vscode-${type}`, task.series(
|
||||
util.rimraf(`out-vscode-${type}`),
|
||||
common.optimizeTask({
|
||||
src: 'out-build',
|
||||
entryPoints: _.flatten(type === 'reh' ? serverEntryPoints : serverWithWebEntryPoints),
|
||||
otherSources: [],
|
||||
resources: type === 'reh' ? serverResources : serverWithWebResources,
|
||||
loaderConfig: common.loaderConfig(),
|
||||
out: `out-vscode-${type}`,
|
||||
inlineAmdImages: true,
|
||||
bundleInfo: undefined,
|
||||
fileContentMapper: createVSCodeWebFileContentMapper ? createVSCodeWebFileContentMapper('.build/extensions') : undefined
|
||||
})
|
||||
));
|
||||
|
||||
const minifyTask = task.define(`minify-vscode-${type}`, task.series(
|
||||
optimizeTask,
|
||||
util.rimraf(`out-vscode-${type}-min`),
|
||||
common.minifyTask(`out-vscode-${type}`, `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`)
|
||||
));
|
||||
gulp.task(minifyTask);
|
||||
|
||||
BUILD_TARGETS.forEach(buildTarget => {
|
||||
const dashed = (str) => (str ? `-${str}` : ``);
|
||||
const platform = buildTarget.platform;
|
||||
const arch = buildTarget.arch;
|
||||
const pkgTarget = buildTarget.pkgTarget;
|
||||
|
||||
const copyPkgConfigTask = task.define(`copy-pkg-config${dashed(platform)}${dashed(arch)}`, task.series(
|
||||
util.rimraf(`out-vscode-${type}-pkg`),
|
||||
copyConfigTask(`vscode-${type}${dashed(platform)}${dashed(arch)}`)
|
||||
));
|
||||
|
||||
const copyPkgNativeTask = task.define(`copy-pkg-native${dashed(platform)}${dashed(arch)}`, task.series(
|
||||
util.rimraf(path.join(BUILD_ROOT, `vscode-${type}${dashed(platform)}${dashed(arch)}-pkg`)),
|
||||
copyNativeTask(`vscode-${type}${dashed(platform)}${dashed(arch)}-pkg`)
|
||||
));
|
||||
|
||||
['', 'min'].forEach(minified => {
|
||||
const sourceFolderName = `out-vscode-${type}${dashed(minified)}`;
|
||||
const destinationFolderName = `vscode-${type}${dashed(platform)}${dashed(arch)}`;
|
||||
|
||||
const serverTaskCI = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}${dashed(minified)}-ci`, task.series(
|
||||
gulp.task(`node-${platform}-${platform === 'darwin' ? 'x64' : arch}`),
|
||||
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
|
||||
packageTask(type, platform, arch, sourceFolderName, destinationFolderName)
|
||||
));
|
||||
gulp.task(serverTaskCI);
|
||||
|
||||
const serverTask = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}${dashed(minified)}`, task.series(
|
||||
compileBuildTask,
|
||||
compileExtensionsBuildTask,
|
||||
minified ? minifyTask : optimizeTask,
|
||||
serverTaskCI
|
||||
));
|
||||
gulp.task(serverTask);
|
||||
|
||||
const serverPkgTask = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}${dashed(minified)}-pkg`, task.series(
|
||||
task.parallel(
|
||||
serverTask,
|
||||
copyPkgConfigTask,
|
||||
copyPkgNativeTask
|
||||
),
|
||||
packagePkgTask(platform, arch, pkgTarget)
|
||||
));
|
||||
gulp.task(serverPkgTask);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function mixinServer(watch) {
|
||||
const packageJSONPath = path.join(path.dirname(__dirname), 'package.json');
|
||||
function exec(cmdLine) {
|
||||
|
||||
@@ -10,6 +10,7 @@ const es = require('event-stream');
|
||||
const filter = require('gulp-filter');
|
||||
const path = require('path');
|
||||
const ext = require('./lib/extensions');
|
||||
const loc = require('./lib/locFunc');
|
||||
const task = require('./lib/task');
|
||||
const glob = require('glob');
|
||||
const vsce = require('vsce');
|
||||
@@ -115,6 +116,31 @@ gulp.task('package-external-extensions', task.series(
|
||||
})
|
||||
));
|
||||
|
||||
gulp.task('package-langpacks', task.series(
|
||||
task.define('bundle-external-langpack-build', () => loc.packageLangpacksStream().pipe(gulp.dest('.build/external'))),
|
||||
task.define('create-external-langpack-vsix-build', () => {
|
||||
const vsixes = glob.sync('.build/external/langpacks/*/package.json').map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
}).map(element => {
|
||||
const pkgJson = require(path.join(element.path, 'package.json'));
|
||||
const vsixDirectory = path.join(root, '.build', 'langpacks');
|
||||
mkdirp.sync(vsixDirectory);
|
||||
const packagePath = path.join(vsixDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
return vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
});
|
||||
|
||||
return Promise.all(vsixes);
|
||||
})
|
||||
));
|
||||
|
||||
|
||||
gulp.task('package-rebuild-extensions', task.series(
|
||||
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
|
||||
task.define('rebuild-extensions-build', () => ext.packageRebuildExtensionsStream().pipe(gulp.dest('.build'))),
|
||||
|
||||
49
build/lib/locFunc.js
Normal file
49
build/lib/locFunc.js
Normal file
@@ -0,0 +1,49 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.packageLangpacksStream = void 0;
|
||||
const es = require("event-stream");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const stats_1 = require("./stats");
|
||||
const File = require("vinyl");
|
||||
const glob = require("glob");
|
||||
const rename = require("gulp-rename");
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
// Modified packageLocalExtensionsStream from extensions.ts, but for langpacks.
|
||||
function packageLangpacksStream() {
|
||||
const langpackDescriptions = glob.sync('i18n/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const langpackPath = path.dirname(path.join(root, manifestPath));
|
||||
const langpackName = path.basename(langpackPath);
|
||||
return { name: langpackName, path: langpackPath };
|
||||
});
|
||||
const builtLangpacks = langpackDescriptions.map(langpack => {
|
||||
return fromLocalNormal(langpack.path)
|
||||
.pipe(rename(p => p.dirname = `langpacks/${langpack.name}/${p.dirname}`));
|
||||
});
|
||||
return es.merge(builtLangpacks);
|
||||
}
|
||||
exports.packageLangpacksStream = packageLangpacksStream;
|
||||
//copied from extensions.
|
||||
function fromLocalNormal(extensionPath) {
|
||||
const result = es.through();
|
||||
const vsce = require('vsce');
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath)
|
||||
}));
|
||||
es.readArray(files).pipe(result);
|
||||
})
|
||||
.catch(err => result.emit('error', err));
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
56
build/lib/locFunc.ts
Normal file
56
build/lib/locFunc.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as es from 'event-stream';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import { createStatsStream } from './stats';
|
||||
import * as File from 'vinyl';
|
||||
import { Stream } from 'stream';
|
||||
import * as glob from 'glob';
|
||||
import rename = require('gulp-rename');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
|
||||
// Modified packageLocalExtensionsStream from extensions.ts, but for langpacks.
|
||||
export function packageLangpacksStream(): NodeJS.ReadWriteStream {
|
||||
const langpackDescriptions = (<string[]>glob.sync('i18n/*/package.json'))
|
||||
.map(manifestPath => {
|
||||
const langpackPath = path.dirname(path.join(root, manifestPath));
|
||||
const langpackName = path.basename(langpackPath);
|
||||
return { name: langpackName, path: langpackPath };
|
||||
})
|
||||
|
||||
const builtLangpacks = langpackDescriptions.map(langpack => {
|
||||
return fromLocalNormal(langpack.path)
|
||||
.pipe(rename(p => p.dirname = `langpacks/${langpack.name}/${p.dirname}`));
|
||||
});
|
||||
|
||||
return es.merge(builtLangpacks);
|
||||
}
|
||||
|
||||
//copied from extensions.
|
||||
function fromLocalNormal(extensionPath: string): Stream {
|
||||
const result = es.through();
|
||||
|
||||
const vsce = require('vsce') as typeof import('vsce');
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath) as any
|
||||
}));
|
||||
|
||||
es.readArray(files).pipe(result);
|
||||
})
|
||||
.catch(err => result.emit('error', err));
|
||||
|
||||
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
@@ -1307,9 +1307,9 @@ lodash.unescape@4.0.1:
|
||||
integrity sha1-vyJJiGzlFM2hEvrpIYzcBlIR/Jw=
|
||||
|
||||
lodash@^4.17.10, lodash@^4.17.15:
|
||||
version "4.17.20"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52"
|
||||
integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
lru-cache@^6.0.0:
|
||||
version "6.0.0"
|
||||
|
||||
@@ -643,9 +643,9 @@ json5@^2.1.2:
|
||||
minimist "^1.2.5"
|
||||
|
||||
lodash@^4.16.4, lodash@^4.17.13:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
|
||||
@@ -509,9 +509,9 @@ json5@^2.1.2:
|
||||
minimist "^1.2.5"
|
||||
|
||||
lodash@^4.16.4, lodash@^4.17.13, lodash@^4.17.4:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
|
||||
@@ -117,7 +117,8 @@
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"os.environ[\"KUBECONFIG\"] = controller_kubeconfig\n",
|
||||
"os.environ[\"KUBECTL_CONTEXT\"] = controller_kubectl_context\n",
|
||||
"cmd = f'azdata login -e {controller_endpoint} -u {controller_username}'\n",
|
||||
"endpoint_option = f' -e {controller_endpoint}' if controller_endpoint else \"\"\n",
|
||||
"cmd = f'azdata login --namespace {arc_data_controller_namespace} -u {controller_username}{endpoint_option}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
|
||||
@@ -520,7 +520,7 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata",
|
||||
"version": "20.3.2"
|
||||
"version": "20.3.4"
|
||||
}
|
||||
],
|
||||
"when": true
|
||||
@@ -572,6 +572,7 @@
|
||||
"source": {
|
||||
"providerId": "arc.controllers",
|
||||
"variableNames": {
|
||||
"namespace": "AZDATA_NB_VAR_ARC_DATA_CONTROLLER_NAMESPACE",
|
||||
"endpoint": "AZDATA_NB_VAR_CONTROLLER_ENDPOINT",
|
||||
"username": "AZDATA_NB_VAR_CONTROLLER_USERNAME",
|
||||
"kubeConfig": "AZDATA_NB_VAR_CONTROLLER_KUBECONFIG",
|
||||
@@ -772,7 +773,7 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata",
|
||||
"version": "20.3.2"
|
||||
"version": "20.3.4"
|
||||
}
|
||||
],
|
||||
"when": "true"
|
||||
@@ -1001,7 +1002,7 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata",
|
||||
"version": "20.3.2"
|
||||
"version": "20.3.4"
|
||||
}
|
||||
],
|
||||
"when": "mi-type=arc-mi"
|
||||
|
||||
@@ -23,7 +23,6 @@ export const properties = localize('arc.properties', "Properties");
|
||||
export const settings = localize('arc.settings', "Settings");
|
||||
export const security = localize('arc.security', "Security");
|
||||
export const computeAndStorage = localize('arc.computeAndStorage', "Compute + Storage");
|
||||
export const nodeParameters = localize('arc.nodeParameters', "Node Parameters");
|
||||
export const coordinatorNodeParameters = localize('arc.coordinatorNodeParameters', "Coordinator Node Parameters");
|
||||
export const workerNodeParameters = localize('arc.workerNodeParameters', "Worker Node Parameters");
|
||||
export const compute = localize('arc.compute', "Compute");
|
||||
@@ -70,7 +69,7 @@ export const addingWorkerNodes = localize('arc.addingWorkerNodes', "adding worke
|
||||
export const workerNodesDescription = localize('arc.workerNodesDescription', "Expand your server group and scale your database by adding worker nodes.");
|
||||
export const workerNodesConfigurationInformation = localize('arc.workerNodesConfigurationInformation', "You can configure the number of CPU cores and storage size that will apply to all worker nodes. Adjust the number of CPU cores and memory settings for your server group. To reset the requests and/or limits, pass in empty value.");
|
||||
export const coordinatorNodeConfigurationInformation = localize('arc.coordinatorNodeConfigurationInformation', "You can configure the number of CPU cores and storage size that will apply to the coordinator node. Adjust the number of CPU cores and memory settings for your server group. To reset the requests and/or limits, pass in empty value.");
|
||||
export const workerNodesInformation = localize('arc.workerNodeInformation', "In preview it is not possible to reduce the number of worker nodes. Please refer to documentation linked above for more information.");
|
||||
export const workerNodesInformation = localize('arc.workerNodeInformation', "It is possible to scale in and out your server group by reducing or increasing the number of worker nodes.");
|
||||
export const vCores = localize('arc.vCores', "vCores");
|
||||
export const ram = localize('arc.ram', "RAM");
|
||||
export const refresh = localize('arc.refresh', "Refresh");
|
||||
@@ -146,7 +145,6 @@ export const databaseName = localize('arc.databaseName', "Database name");
|
||||
export const enterNewPassword = localize('arc.enterNewPassword', "Enter a new password");
|
||||
export const confirmNewPassword = localize('arc.confirmNewPassword', "Confirm the new password");
|
||||
export const learnAboutPostgresClients = localize('arc.learnAboutPostgresClients', "Learn more about Azure PostgreSQL Hyperscale client interfaces");
|
||||
export const nodeParametersDescription = localize('arc.nodeParametersDescription', " These server parameters of the Coordinator node and the Worker nodes can be set to custom (non-default) values. Search to find parameters.");
|
||||
export const coordinatorNodeParametersDescription = localize('arc.coordinatorNodeParametersDescription', " These server parameters of the Coordinator node can be set to custom (non-default) values. Search to find parameters.");
|
||||
export const workerNodesParametersDescription = localize('arc.workerNodesParametersDescription', " These server parameters of the Worker nodes can be set to custom (non-default) values. Search to find parameters.");
|
||||
export const learnAboutNodeParameters = localize('arc.learnAboutNodeParameters', "Learn more about database engine settings for Azure Arc enabled PostgreSQL Hyperscale");
|
||||
@@ -181,6 +179,7 @@ export const condition = localize('arc.condition', "Condition");
|
||||
export const details = localize('arc.details', "Details");
|
||||
export const lastTransition = localize('arc.lastTransition', "Last transition");
|
||||
export const noExternalEndpoint = localize('arc.noExternalEndpoint', "No External Endpoint has been configured so this information isn't available.");
|
||||
export const noWorkerPods = localize('arc.noWorkerPods', "No worker pods in this configuration.");
|
||||
export const podsReady = localize('arc.podsReady', "pods ready");
|
||||
export const podsPresent = localize('arc.podsPresent', "Pods Present");
|
||||
export const podsUsedDescription = localize('arc.podsUsedDescription', "Select a pod in the dropdown below for detailed health information.");
|
||||
|
||||
@@ -155,36 +155,18 @@ export class PostgresModel extends ResourceModel {
|
||||
const provider = azdata.dataprotocol.getProvider<azdata.QueryProvider>(this._connectionProfile!.providerName, azdata.DataProviderType.QueryProvider);
|
||||
const ownerUri = await azdata.connection.getUriForConnection(this._activeConnectionId);
|
||||
|
||||
const engineSettings = await provider.runQueryAndReturn(ownerUri, 'select name, setting, short_desc,min_val, max_val, enumvals, vartype from pg_settings');
|
||||
if (!engineSettings) {
|
||||
throw new Error('Could not fetch engine settings');
|
||||
}
|
||||
|
||||
const skippedEngineSettings: String[] = [
|
||||
'archive_command', 'archive_timeout', 'log_directory', 'log_file_mode', 'log_filename', 'restore_command',
|
||||
'shared_preload_libraries', 'synchronous_commit', 'ssl', 'unix_socket_permissions', 'wal_level'
|
||||
];
|
||||
|
||||
this.workerNodesEngineSettings = [];
|
||||
|
||||
engineSettings.rows.forEach(row => {
|
||||
let rowValues = row.map(c => c.displayValue);
|
||||
let name = rowValues.shift();
|
||||
if (!skippedEngineSettings.includes(name!)) {
|
||||
let result: EngineSettingsModel = {
|
||||
parameterName: name,
|
||||
value: rowValues.shift(),
|
||||
description: rowValues.shift(),
|
||||
min: rowValues.shift(),
|
||||
max: rowValues.shift(),
|
||||
options: rowValues.shift(),
|
||||
type: rowValues.shift()
|
||||
};
|
||||
|
||||
this.workerNodesEngineSettings.push(result);
|
||||
}
|
||||
});
|
||||
await this.createCoordinatorEngineSettings(provider, ownerUri, skippedEngineSettings);
|
||||
|
||||
const scale = this._config?.spec.scale;
|
||||
const nodes = (scale?.workers ?? scale?.shards ?? 0);
|
||||
if (nodes !== 0) {
|
||||
await this.createWorkerEngineSettings(provider, ownerUri, skippedEngineSettings);
|
||||
}
|
||||
|
||||
this.engineSettingsLastUpdated = new Date();
|
||||
this._engineSettingsPromise.resolve();
|
||||
@@ -196,6 +178,64 @@ export class PostgresModel extends ResourceModel {
|
||||
}
|
||||
}
|
||||
|
||||
private async createCoordinatorEngineSettings(provider: azdata.QueryProvider, ownerUri: string, skip: String[]): Promise<void> {
|
||||
const engineSettingsCoordinator = await provider.runQueryAndReturn(ownerUri, 'select name, setting, short_desc,min_val, max_val, enumvals, vartype from pg_settings');
|
||||
|
||||
this.coordinatorNodeEngineSettings = [];
|
||||
engineSettingsCoordinator.rows.forEach(row => {
|
||||
let rowValues = row.map(c => c.displayValue);
|
||||
let name = rowValues.shift();
|
||||
if (!skip.includes(name!)) {
|
||||
let result: EngineSettingsModel = {
|
||||
parameterName: name,
|
||||
value: rowValues.shift(),
|
||||
description: rowValues.shift(),
|
||||
min: rowValues.shift(),
|
||||
max: rowValues.shift(),
|
||||
options: rowValues.shift(),
|
||||
type: rowValues.shift()
|
||||
};
|
||||
|
||||
this.coordinatorNodeEngineSettings.push(result);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
private async createWorkerEngineSettings(provider: azdata.QueryProvider, ownerUri: string, skip: String[]): Promise<void> {
|
||||
|
||||
const engineSettingsWorker = await provider.runQueryAndReturn(ownerUri,
|
||||
`with settings as (select nodename, success, result from run_command_on_workers('select json_agg(pg_settings) from pg_settings') order by success desc, nodename asc)
|
||||
select * from settings limit case when exists(select 1 from settings where success) then 1 end`);
|
||||
|
||||
if (engineSettingsWorker.rows[0][1].displayValue === 'False') {
|
||||
let errorString = engineSettingsWorker.rows.map(row => row[2].displayValue);
|
||||
throw new Error(errorString.join('\n'));
|
||||
}
|
||||
|
||||
let engineSettingsWorkerJSON = JSON.parse(engineSettingsWorker.rows[0][2].displayValue);
|
||||
this.workerNodesEngineSettings = [];
|
||||
|
||||
for (let i = 0; i < engineSettingsWorkerJSON.length; i++) {
|
||||
let rowValues = engineSettingsWorkerJSON[i];
|
||||
let name = rowValues.name;
|
||||
if (!skip.includes(name!)) {
|
||||
let result: EngineSettingsModel = {
|
||||
parameterName: name,
|
||||
value: rowValues.setting,
|
||||
description: rowValues.short_desc,
|
||||
min: rowValues.min_val,
|
||||
max: rowValues.max_val,
|
||||
options: rowValues.enumvals,
|
||||
type: rowValues.vartype
|
||||
};
|
||||
|
||||
this.workerNodesEngineSettings.push(result);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected createConnectionProfile(): azdata.IConnectionProfile {
|
||||
const ipAndPort = parseIpAndPort(this.config?.status.primaryEndpoint || '');
|
||||
return {
|
||||
|
||||
@@ -30,6 +30,7 @@ export class ArcControllersOptionsSourceProvider implements rd.IOptionsSourcePro
|
||||
const controller = (await getRegisteredDataControllers(this._treeProvider)).find(ci => ci.label === controllerLabel);
|
||||
throwUnless(controller !== undefined, loc.noControllerInfoFound(controllerLabel));
|
||||
switch (variableName) {
|
||||
case 'namespace': return controller.info.namespace || '';
|
||||
case 'endpoint': return controller.info.endpoint || '';
|
||||
case 'username': return controller.info.username;
|
||||
case 'kubeConfig': return controller.info.kubeConfigFilePath;
|
||||
@@ -50,6 +51,7 @@ export class ArcControllersOptionsSourceProvider implements rd.IOptionsSourcePro
|
||||
|
||||
public getIsPassword(variableName: string): boolean {
|
||||
switch (variableName) {
|
||||
case 'namespace': return false;
|
||||
case 'endpoint': return false;
|
||||
case 'username': return false;
|
||||
case 'kubeConfig': return false;
|
||||
|
||||
@@ -33,6 +33,7 @@ export class FakeAzdataApi implements azdataExt.IAzdataApi {
|
||||
adminPassword?: boolean,
|
||||
coresLimit?: string,
|
||||
coresRequest?: string,
|
||||
coordinatorEngineSettings?: string,
|
||||
engineSettings?: string,
|
||||
extensions?: string,
|
||||
memoryLimit?: string,
|
||||
@@ -40,6 +41,7 @@ export class FakeAzdataApi implements azdataExt.IAzdataApi {
|
||||
noWait?: boolean,
|
||||
port?: number,
|
||||
replaceEngineSettings?: boolean,
|
||||
workerEngineSettings?: string,
|
||||
workers?: number
|
||||
},
|
||||
_additionalEnvVars?: azdataExt.AdditionalEnvVars
|
||||
|
||||
@@ -19,6 +19,11 @@ import { AzureArcTreeDataProvider } from '../../ui/tree/azureArcTreeDataProvider
|
||||
import { FakeControllerModel } from '../mocks/fakeControllerModel';
|
||||
import { FakeAzdataApi } from '../mocks/fakeAzdataApi';
|
||||
|
||||
export const FakeStorageVolume: azdataExt.StorageVolume[] = [{
|
||||
className: '',
|
||||
size: ''
|
||||
}];
|
||||
|
||||
export const FakePostgresServerShowOutput: azdataExt.AzdataOutput<azdataExt.PostgresServerShowResult> = {
|
||||
logs: [],
|
||||
stdout: [],
|
||||
@@ -39,7 +44,11 @@ export const FakePostgresServerShowOutput: azdataExt.AzdataOutput<azdataExt.Post
|
||||
engine: {
|
||||
extensions: [{ name: '' }],
|
||||
settings: {
|
||||
default: { ['']: '' }
|
||||
default: { ['']: '' },
|
||||
roles: {
|
||||
coordinator: { ['']: '' },
|
||||
worker: { ['']: '' }
|
||||
}
|
||||
},
|
||||
version: ''
|
||||
},
|
||||
@@ -553,7 +562,7 @@ describe('PostgresModel', function (): void {
|
||||
sinon.stub(azdata.dataprotocol, 'getProvider').returns(providerMock.object);
|
||||
|
||||
await postgresModel.getEngineSettings();
|
||||
should(postgresModel.workerNodesEngineSettings.pop()).be.match(engineSettingsModelCompare);
|
||||
should(postgresModel.coordinatorNodeEngineSettings.pop()).be.match(engineSettingsModelCompare);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -244,6 +244,7 @@ export class PostgresComputeAndStoragePage extends DashboardPage {
|
||||
// Worker node count
|
||||
this.workerCountBox = this.modelView.modelBuilder.inputBox().withProps({
|
||||
readOnly: false,
|
||||
min: 0,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading,
|
||||
required: true
|
||||
@@ -544,7 +545,6 @@ export class PostgresComputeAndStoragePage extends DashboardPage {
|
||||
let scale = this._postgresModel.config?.spec.scale;
|
||||
this.currentConfiguration.workers = scale?.workers ?? scale?.shards ?? 0;
|
||||
|
||||
this.workerCountBox.min = this.currentConfiguration.workers;
|
||||
this.workerCountBox.placeHolder = '';
|
||||
this.workerCountBox.value = this.currentConfiguration.workers.toString();
|
||||
this.saveArgs.workers = undefined;
|
||||
|
||||
@@ -35,33 +35,28 @@ export class PostgresCoordinatorNodeParametersPage extends PostgresParametersPag
|
||||
return this._postgresModel.coordinatorNodeEngineSettings;
|
||||
}
|
||||
|
||||
protected async saveParameterEdits(): Promise<void> {
|
||||
/* TODO add correct azdata call for editing coordinator parameters
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name,
|
||||
{ engineSettings: engineSettings.toString() },
|
||||
this._postgresModel.controllerModel.azdataAdditionalEnvVars,
|
||||
session);
|
||||
*/
|
||||
protected async saveParameterEdits(engineSettings: string): Promise<void> {
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name,
|
||||
{ coordinatorEngineSettings: engineSettings },
|
||||
this._postgresModel.controllerModel.azdataAdditionalEnvVars,
|
||||
this._postgresModel.controllerModel.controllerContext);
|
||||
|
||||
}
|
||||
|
||||
protected async resetAllParameters(): Promise<void> {
|
||||
/* TODO add correct azdata call for editing coordinator parameters
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name,
|
||||
{ engineSettings: `''`, replaceEngineSettings: true },
|
||||
this._postgresModel.controllerModel.azdataAdditionalEnvVars,
|
||||
session);
|
||||
*/
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name,
|
||||
{ coordinatorEngineSettings: `''`, replaceEngineSettings: true },
|
||||
this._postgresModel.controllerModel.azdataAdditionalEnvVars,
|
||||
this._postgresModel.controllerModel.controllerContext);
|
||||
}
|
||||
|
||||
protected async resetParameter(): Promise<void> {
|
||||
/* TODO add correct azdata call for editing coordinator parameters
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name,
|
||||
{ engineSettings: parameterName + '=' },
|
||||
this._postgresModel.controllerModel.azdataAdditionalEnvVars,
|
||||
session);
|
||||
*/
|
||||
protected async resetParameter(parameterName: string): Promise<void> {
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name,
|
||||
{ coordinatorEngineSettings: parameterName + '=' },
|
||||
this._postgresModel.controllerModel.azdataAdditionalEnvVars,
|
||||
this._postgresModel.controllerModel.controllerContext);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ import { PostgresComputeAndStoragePage } from './postgresComputeAndStoragePage';
|
||||
import { PostgresWorkerNodeParametersPage } from './postgresWorkerNodeParametersPage';
|
||||
import { PostgresPropertiesPage } from './postgresPropertiesPage';
|
||||
import { PostgresResourceHealthPage } from './postgresResourceHealthPage';
|
||||
import { PostgresCoordinatorNodeParametersPage } from './postgresCoordinatorNodeParametersPage';
|
||||
|
||||
export class PostgresDashboard extends Dashboard {
|
||||
constructor(private _context: vscode.ExtensionContext, private _controllerModel: ControllerModel, private _postgresModel: PostgresModel) {
|
||||
@@ -36,8 +37,7 @@ export class PostgresDashboard extends Dashboard {
|
||||
const connectionStringsPage = new PostgresConnectionStringsPage(modelView, this.dashboard, this._postgresModel);
|
||||
const computeAndStoragePage = new PostgresComputeAndStoragePage(modelView, this.dashboard, this._postgresModel);
|
||||
const propertiesPage = new PostgresPropertiesPage(modelView, this.dashboard, this._controllerModel, this._postgresModel);
|
||||
// TODO Add dashboard once backend is able to be connected for per role server parameter edits.
|
||||
// const coordinatorNodeParametersPage = new PostgresCoordinatorNodeParametersPage(modelView, this._postgresModel);
|
||||
const coordinatorNodeParametersPage = new PostgresCoordinatorNodeParametersPage(modelView, this.dashboard, this._postgresModel);
|
||||
const workerNodeParametersPage = new PostgresWorkerNodeParametersPage(modelView, this.dashboard, this._postgresModel);
|
||||
const diagnoseAndSolveProblemsPage = new PostgresDiagnoseAndSolveProblemsPage(modelView, this.dashboard, this._context, this._controllerModel, this._postgresModel);
|
||||
const supportRequestPage = new PostgresSupportRequestPage(modelView, this.dashboard, this._controllerModel, this._postgresModel);
|
||||
@@ -51,6 +51,7 @@ export class PostgresDashboard extends Dashboard {
|
||||
propertiesPage.tab,
|
||||
connectionStringsPage.tab,
|
||||
computeAndStoragePage.tab,
|
||||
coordinatorNodeParametersPage.tab,
|
||||
workerNodeParametersPage.tab
|
||||
]
|
||||
},
|
||||
|
||||
@@ -269,6 +269,13 @@ export abstract class PostgresParametersPage extends DashboardPage {
|
||||
|
||||
this.disposables.push(
|
||||
this.connectToServerButton.onDidClick(async () => {
|
||||
let scale = this._postgresModel.config?.spec.scale;
|
||||
let nodes = (scale?.workers ?? scale?.shards ?? 0);
|
||||
if (this.title === loc.workerNodeParameters && nodes === 0) {
|
||||
vscode.window.showInformationMessage(loc.noWorkerPods);
|
||||
return;
|
||||
}
|
||||
|
||||
this.connectToServerButton!.enabled = false;
|
||||
if (!vscode.extensions.getExtension(loc.postgresExtension)) {
|
||||
const response = await vscode.window.showErrorMessage(loc.missingExtension('PostgreSQL'), loc.yes, loc.no);
|
||||
@@ -437,11 +444,13 @@ export abstract class PostgresParametersPage extends DashboardPage {
|
||||
let valueComponent: azdata.Component;
|
||||
if (engineSetting.type === 'enum') {
|
||||
// If type is enum, component should be drop down menu
|
||||
let options = engineSetting.options?.slice(1, -1).split(',');
|
||||
let values: string[] = [];
|
||||
options!.forEach(option => {
|
||||
values.push(option.slice(option.indexOf('"') + 1, -1));
|
||||
});
|
||||
if (typeof engineSetting.options === 'string') {
|
||||
let options = engineSetting.options?.slice(1, -1).split(',');
|
||||
values = options.map(option => option.slice(option.indexOf('"') + 1, -1));
|
||||
} else if (engineSetting.options) {
|
||||
values = engineSetting.options;
|
||||
}
|
||||
|
||||
let valueBox = this.modelView.modelBuilder.dropDown().withProps({
|
||||
values: values,
|
||||
|
||||
@@ -16,13 +16,11 @@ export class PostgresWorkerNodeParametersPage extends PostgresParametersPage {
|
||||
}
|
||||
|
||||
protected get title(): string {
|
||||
// TODO update to loc.workerNodeParameters
|
||||
return loc.nodeParameters;
|
||||
return loc.workerNodeParameters;
|
||||
}
|
||||
|
||||
protected get id(): string {
|
||||
// TODO update to 'postgres-worker-node-parameters'
|
||||
return 'postgres-nodes-parameters';
|
||||
return 'postgres-worker-node-parameters';
|
||||
}
|
||||
|
||||
protected get icon(): { dark: string; light: string; } {
|
||||
@@ -30,8 +28,7 @@ export class PostgresWorkerNodeParametersPage extends PostgresParametersPage {
|
||||
}
|
||||
|
||||
protected get description(): string {
|
||||
// TODO update to loc.workerNodesParametersDescription
|
||||
return loc.nodeParametersDescription;
|
||||
return loc.workerNodesParametersDescription;
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +39,7 @@ export class PostgresWorkerNodeParametersPage extends PostgresParametersPage {
|
||||
protected async saveParameterEdits(engineSettings: string): Promise<void> {
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name,
|
||||
{ engineSettings: engineSettings },
|
||||
{ workerEngineSettings: engineSettings },
|
||||
this._postgresModel.controllerModel.azdataAdditionalEnvVars,
|
||||
this._postgresModel.controllerModel.controllerContext);
|
||||
}
|
||||
@@ -50,7 +47,7 @@ export class PostgresWorkerNodeParametersPage extends PostgresParametersPage {
|
||||
protected async resetAllParameters(): Promise<void> {
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name,
|
||||
{ engineSettings: `''`, replaceEngineSettings: true },
|
||||
{ workerEngineSettings: `''`, replaceEngineSettings: true },
|
||||
this._postgresModel.controllerModel.azdataAdditionalEnvVars,
|
||||
this._postgresModel.controllerModel.controllerContext);
|
||||
}
|
||||
@@ -58,7 +55,7 @@ export class PostgresWorkerNodeParametersPage extends PostgresParametersPage {
|
||||
protected async resetParameter(parameterName: string): Promise<void> {
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name,
|
||||
{ engineSettings: parameterName + '=' },
|
||||
{ workerEngineSettings: parameterName + '=' },
|
||||
this._postgresModel.controllerModel.azdataAdditionalEnvVars,
|
||||
this._postgresModel.controllerModel.controllerContext);
|
||||
}
|
||||
|
||||
@@ -847,9 +847,9 @@ lodash.get@^4.4.2:
|
||||
integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=
|
||||
|
||||
lodash@^4.16.4, lodash@^4.17.13, lodash@^4.17.4:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "azdata",
|
||||
"displayName": "%azdata.displayName%",
|
||||
"description": "%azdata.description%",
|
||||
"version": "0.6.2",
|
||||
"version": "0.6.4",
|
||||
"publisher": "Microsoft",
|
||||
"preview": true,
|
||||
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/main/LICENSE.txt",
|
||||
|
||||
@@ -123,6 +123,7 @@ export function getAzdataApi(localAzdataDiscovered: Promise<IAzdataTool | undefi
|
||||
adminPassword?: boolean;
|
||||
coresLimit?: string;
|
||||
coresRequest?: string;
|
||||
coordinatorEngineSettings?: string;
|
||||
engineSettings?: string;
|
||||
extensions?: string;
|
||||
memoryLimit?: string;
|
||||
@@ -130,6 +131,7 @@ export function getAzdataApi(localAzdataDiscovered: Promise<IAzdataTool | undefi
|
||||
noWait?: boolean;
|
||||
port?: number;
|
||||
replaceEngineSettings?: boolean;
|
||||
workerEngineSettings?: string;
|
||||
workers?: number;
|
||||
},
|
||||
additionalEnvVars?: azdataExt.AdditionalEnvVars,
|
||||
|
||||
@@ -20,7 +20,7 @@ import * as loc from './localizedConstants';
|
||||
/**
|
||||
* The minimum required azdata CLI version for this extension to function properly
|
||||
*/
|
||||
export const MIN_AZDATA_VERSION = new SemVer('20.3.3');
|
||||
export const MIN_AZDATA_VERSION = new SemVer('20.3.4');
|
||||
|
||||
export const enum AzdataDeployOption {
|
||||
dontPrompt = 'dontPrompt',
|
||||
@@ -125,6 +125,7 @@ export class AzdataTool implements azdataExt.IAzdataApi {
|
||||
adminPassword?: boolean,
|
||||
coresLimit?: string,
|
||||
coresRequest?: string,
|
||||
coordinatorEngineSettings?: string,
|
||||
engineSettings?: string,
|
||||
extensions?: string,
|
||||
memoryLimit?: string,
|
||||
@@ -132,6 +133,7 @@ export class AzdataTool implements azdataExt.IAzdataApi {
|
||||
noWait?: boolean,
|
||||
port?: number,
|
||||
replaceEngineSettings?: boolean,
|
||||
workerEngineSettings?: string,
|
||||
workers?: number
|
||||
},
|
||||
additionalEnvVars?: azdataExt.AdditionalEnvVars,
|
||||
@@ -140,6 +142,7 @@ export class AzdataTool implements azdataExt.IAzdataApi {
|
||||
if (args.adminPassword) { argsArray.push('--admin-password'); }
|
||||
if (args.coresLimit) { argsArray.push('--cores-limit', args.coresLimit); }
|
||||
if (args.coresRequest) { argsArray.push('--cores-request', args.coresRequest); }
|
||||
if (args.coordinatorEngineSettings) { argsArray.push('--coordinator-engine-settings', args.coordinatorEngineSettings); }
|
||||
if (args.engineSettings) { argsArray.push('--engine-settings', args.engineSettings); }
|
||||
if (args.extensions) { argsArray.push('--extensions', args.extensions); }
|
||||
if (args.memoryLimit) { argsArray.push('--memory-limit', args.memoryLimit); }
|
||||
@@ -147,7 +150,8 @@ export class AzdataTool implements azdataExt.IAzdataApi {
|
||||
if (args.noWait) { argsArray.push('--no-wait'); }
|
||||
if (args.port) { argsArray.push('--port', args.port.toString()); }
|
||||
if (args.replaceEngineSettings) { argsArray.push('--replace-engine-settings'); }
|
||||
if (args.workers) { argsArray.push('--workers', args.workers.toString()); }
|
||||
if (args.workerEngineSettings) { argsArray.push('--worker-engine-settings', args.workerEngineSettings); }
|
||||
if (args.workers !== undefined) { argsArray.push('--workers', args.workers.toString()); }
|
||||
return this.executeCommand<void>(argsArray, additionalEnvVars, azdataContext);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,7 +189,11 @@ declare module 'azdata-ext' {
|
||||
name: string // "citus"
|
||||
}[],
|
||||
settings: {
|
||||
default: { [key: string]: string } // { "max_connections": "101", "work_mem": "4MB" }
|
||||
default: { [key: string]: string }, // { "max_connections": "101", "work_mem": "4MB" }
|
||||
roles: {
|
||||
coordinator: { [key: string]: string },
|
||||
worker: { [key: string]: string }
|
||||
}
|
||||
},
|
||||
version: string // "12"
|
||||
},
|
||||
@@ -289,6 +293,7 @@ declare module 'azdata-ext' {
|
||||
adminPassword?: boolean,
|
||||
coresLimit?: string,
|
||||
coresRequest?: string,
|
||||
coordinatorEngineSettings?: string,
|
||||
engineSettings?: string,
|
||||
extensions?: string,
|
||||
memoryLimit?: string,
|
||||
@@ -296,6 +301,7 @@ declare module 'azdata-ext' {
|
||||
noWait?: boolean,
|
||||
port?: number,
|
||||
replaceEngineSettings?: boolean,
|
||||
workerEngineSettings?: string,
|
||||
workers?: number
|
||||
},
|
||||
additionalEnvVars?: AdditionalEnvVars,
|
||||
|
||||
@@ -16,6 +16,8 @@ const externals = {
|
||||
'bufferutil': 'commonjs bufferutil',
|
||||
'utf-8-validate': 'commonjs utf-8-validate',
|
||||
'keytar': 'commonjs keytar',
|
||||
'@azure/arm-subscriptions': 'commonjs @azure/arm-subscriptions',
|
||||
'@azure/arm-resourcegraph': 'commonjs @azure/arm-resourcegraph'
|
||||
};
|
||||
|
||||
// conditionally add ws if we are going to be running in a node environment
|
||||
|
||||
@@ -73,6 +73,10 @@ export abstract class AzureAuth implements vscode.Disposable {
|
||||
this.resources = this.resources.concat(this.metadata.settings.azureDevOpsResource);
|
||||
}
|
||||
|
||||
if (this.metadata.settings.azureLogAnalyticsResource) {
|
||||
this.resources = this.resources.concat(this.metadata.settings.azureLogAnalyticsResource);
|
||||
}
|
||||
|
||||
this.scopes = [...this.metadata.settings.scopes];
|
||||
this.scopesString = this.scopes.join(' ');
|
||||
}
|
||||
|
||||
@@ -17,7 +17,8 @@ const enum SettingIds {
|
||||
sql = 'sql',
|
||||
ossrdbms = 'ossrdbms',
|
||||
vault = 'vault',
|
||||
ado = 'ado'
|
||||
ado = 'ado',
|
||||
ala = 'ala'
|
||||
}
|
||||
|
||||
const publicAzureSettings: ProviderSettings = {
|
||||
@@ -68,6 +69,11 @@ const publicAzureSettings: ProviderSettings = {
|
||||
endpoint: '499b84ac-1321-427f-aa17-267ca6975798',
|
||||
azureResourceId: AzureResource.AzureDevOps,
|
||||
},
|
||||
azureLogAnalyticsResource: {
|
||||
id: SettingIds.ala,
|
||||
endpoint: 'https://api.loganalytics.io',
|
||||
azureResourceId: AzureResource.AzureLogAnalytics,
|
||||
},
|
||||
redirectUri: 'https://vscode-redirect.azurewebsites.net/',
|
||||
scopes: [
|
||||
'openid', 'email', 'profile', 'offline_access',
|
||||
@@ -117,6 +123,11 @@ const usGovAzureSettings: ProviderSettings = {
|
||||
endpoint: 'https://vault.usgovcloudapi.net',
|
||||
azureResourceId: AzureResource.AzureKeyVault
|
||||
},
|
||||
azureLogAnalyticsResource: {
|
||||
id: SettingIds.ala,
|
||||
endpoint: 'https://api.loganalytics.us',
|
||||
azureResourceId: AzureResource.AzureLogAnalytics,
|
||||
},
|
||||
redirectUri: 'https://vscode-redirect.azurewebsites.net/',
|
||||
scopes: [
|
||||
'openid', 'email', 'profile', 'offline_access',
|
||||
@@ -165,6 +176,11 @@ const usNatAzureSettings: ProviderSettings = {
|
||||
endpoint: 'https://vault.cloudapi.eaglex.ic.gov',
|
||||
azureResourceId: AzureResource.AzureKeyVault
|
||||
},
|
||||
azureLogAnalyticsResource: {
|
||||
id: SettingIds.ala,
|
||||
endpoint: 'https://api.loganalytics.azure.eaglex.ic.gov',
|
||||
azureResourceId: AzureResource.AzureLogAnalytics,
|
||||
},
|
||||
redirectUri: 'https://vscode-redirect.azurewebsites.net/',
|
||||
scopes: [
|
||||
'openid', 'email', 'profile', 'offline_access',
|
||||
@@ -237,6 +253,11 @@ const chinaAzureSettings: ProviderSettings = {
|
||||
endpoint: 'https://vault.azure.cn',
|
||||
azureResourceId: AzureResource.AzureKeyVault
|
||||
},
|
||||
azureLogAnalyticsResource: {
|
||||
id: SettingIds.ala,
|
||||
endpoint: 'https://api.loganalytics.azure.cn',
|
||||
azureResourceId: AzureResource.AzureLogAnalytics,
|
||||
},
|
||||
redirectUri: 'https://vscode-redirect.azurewebsites.net/'
|
||||
|
||||
}
|
||||
|
||||
@@ -91,6 +91,32 @@ declare module 'azureResource' {
|
||||
}
|
||||
|
||||
export interface AzureSqlManagedInstance extends AzureGraphResource {
|
||||
sku: {
|
||||
capacity: number;
|
||||
family: string;
|
||||
name: string;
|
||||
tier: 'GeneralPurpose' | 'BusinessCritical';
|
||||
},
|
||||
properties: {
|
||||
provisioningState: string,
|
||||
storageAccountType: string,
|
||||
maintenanceConfigurationId: string,
|
||||
state: string,
|
||||
licenseType: string,
|
||||
zoneRedundant: false,
|
||||
fullyQualifiedDomainName: string,
|
||||
collation: string,
|
||||
administratorLogin: string,
|
||||
minimalTlsVersion: string,
|
||||
subnetId: string,
|
||||
publicDataEndpointEnabled: boolean,
|
||||
storageSizeInGB: number,
|
||||
timezoneId: string,
|
||||
proxyOverride: string,
|
||||
vCores: number,
|
||||
dnsZone: string,
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export interface ManagedDatabase {
|
||||
|
||||
@@ -181,17 +181,22 @@ export function registerAzureResourceCommands(appContext: AppContext, azureViewT
|
||||
vscode.commands.executeCommand('workbench.actions.modal.linkedAccount');
|
||||
});
|
||||
|
||||
vscode.commands.registerCommand('azure.resource.connectsqlserver', async (node?: TreeNode) => {
|
||||
vscode.commands.registerCommand('azure.resource.connectsqlserver', async (node?: TreeNode | azdata.ObjectExplorerContext) => {
|
||||
if (!node) {
|
||||
return;
|
||||
}
|
||||
|
||||
const treeItem: azdata.TreeItem = await node.getTreeItem();
|
||||
if (!treeItem.payload) {
|
||||
return;
|
||||
let connectionProfile: azdata.IConnectionProfile = undefined;
|
||||
if (node instanceof TreeNode) {
|
||||
const treeItem: azdata.TreeItem = await node.getTreeItem();
|
||||
if (!treeItem.payload) {
|
||||
return;
|
||||
}
|
||||
// Ensure connection is saved to the Connections list, then open connection dialog
|
||||
connectionProfile = Object.assign({}, treeItem.payload, { saveProfile: true });
|
||||
} else if (node.isConnectionNode) {
|
||||
connectionProfile = Object.assign({}, node.connectionProfile, { saveProfile: true });
|
||||
}
|
||||
// Ensure connection is saved to the Connections list, then open connection dialog
|
||||
let connectionProfile = Object.assign({}, treeItem.payload, { saveProfile: true });
|
||||
|
||||
const conn = await azdata.connection.openConnectionDialog(undefined, connectionProfile, { saveConnection: true, showDashboard: true });
|
||||
if (conn) {
|
||||
vscode.commands.executeCommand('workbench.view.connections');
|
||||
|
||||
@@ -103,7 +103,7 @@ export class AzureTerminalService implements IAzureTerminalService {
|
||||
}
|
||||
}
|
||||
|
||||
const shells = [new ShellType('PowerShell', 'pwsh'), new ShellType('Bash', 'bash'),];
|
||||
const shells = [new ShellType('PowerShell', 'pwsh'), new ShellType('Bash', 'bash')];
|
||||
const idx = shells.findIndex(s => s.value === preferredShell);
|
||||
|
||||
const prefShell = shells.splice(idx, 1);
|
||||
@@ -157,7 +157,7 @@ class AzureTerminal implements vscode.Pseudoterminal {
|
||||
}
|
||||
|
||||
async open(initialDimensions: vscode.TerminalDimensions): Promise<void> {
|
||||
this.setDimensions(initialDimensions);
|
||||
await this.setDimensions(initialDimensions);
|
||||
}
|
||||
|
||||
close(): void {
|
||||
@@ -167,14 +167,19 @@ class AzureTerminal implements vscode.Pseudoterminal {
|
||||
this.socket.removeAllListeners('message');
|
||||
this.socket.removeAllListeners('close');
|
||||
|
||||
this.socket.terminate();
|
||||
this.socket.close();
|
||||
|
||||
if (this.intervalTimer) {
|
||||
clearInterval(this.intervalTimer);
|
||||
}
|
||||
}
|
||||
|
||||
private areSameDimensions(oldDimensions: vscode.TerminalDimensions | undefined, newDimensions: vscode.TerminalDimensions): boolean {
|
||||
return oldDimensions?.columns === newDimensions.columns && oldDimensions?.rows === newDimensions.rows;
|
||||
}
|
||||
|
||||
async setDimensions(dimensions: vscode.TerminalDimensions): Promise<void> {
|
||||
if (!dimensions) {
|
||||
if (!dimensions || this.areSameDimensions(this.terminalDimensions, dimensions)) {
|
||||
return;
|
||||
}
|
||||
this.terminalDimensions = dimensions;
|
||||
|
||||
5
extensions/azurecore/src/azurecore.d.ts
vendored
5
extensions/azurecore/src/azurecore.d.ts
vendored
@@ -118,6 +118,11 @@ declare module 'azurecore' {
|
||||
*/
|
||||
azureDevOpsResource?: Resource;
|
||||
|
||||
/**
|
||||
* Information that describes the Azure Log Analytics resource
|
||||
*/
|
||||
azureLogAnalyticsResource?: Resource;
|
||||
|
||||
/**
|
||||
* A list of tenant IDs to authenticate against. If defined, then these IDs will be used
|
||||
* instead of querying the tenants endpoint of the armResource
|
||||
|
||||
@@ -879,9 +879,9 @@ lodash.get@^4.4.2:
|
||||
integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=
|
||||
|
||||
lodash@^4.16.4, lodash@^4.17.13, lodash@^4.17.4:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
|
||||
@@ -248,7 +248,7 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata-old",
|
||||
"version": "20.3.2"
|
||||
"version": "20.3.4"
|
||||
}
|
||||
],
|
||||
"when": "target=new-aks&&version=bdc2019"
|
||||
@@ -266,7 +266,7 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata-old",
|
||||
"version": "20.3.2"
|
||||
"version": "20.3.4"
|
||||
}
|
||||
],
|
||||
"when": "target=existing-aks&&version=bdc2019"
|
||||
@@ -284,7 +284,7 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata-old",
|
||||
"version": "20.3.2"
|
||||
"version": "20.3.4"
|
||||
}
|
||||
],
|
||||
"when": "target=existing-kubeadm&&version=bdc2019"
|
||||
@@ -302,7 +302,7 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata-old",
|
||||
"version": "20.3.2"
|
||||
"version": "20.3.4"
|
||||
}
|
||||
],
|
||||
"when": "target=existing-aro&&version=bdc2019"
|
||||
@@ -320,7 +320,7 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata-old",
|
||||
"version": "20.3.2"
|
||||
"version": "20.3.4"
|
||||
}
|
||||
],
|
||||
"when": "target=existing-openshift&&version=bdc2019"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"extends": "../shared.tsconfig.json",
|
||||
"compileOnSave": true,
|
||||
"compilerOptions": {
|
||||
"outDir": "./out",
|
||||
"module": "commonjs",
|
||||
"target": "es6",
|
||||
"lib": [
|
||||
|
||||
@@ -710,9 +710,9 @@ jsprim@^1.2.2:
|
||||
verror "1.10.0"
|
||||
|
||||
lodash@^4.16.4, lodash@^4.17.13, lodash@^4.17.4:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
|
||||
@@ -690,9 +690,9 @@ lodash.get@^4.4.2:
|
||||
integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=
|
||||
|
||||
lodash@^4.16.4, lodash@^4.17.13, lodash@^4.17.4:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
|
||||
@@ -18,8 +18,8 @@ export const OpenWorkspace = localize('dataworkspace.openWorkspace', "Open Works
|
||||
export const CreateWorkspaceConfirmation = localize('dataworkspace.createWorkspaceConfirmation', "A workspace will be created and opened in order to open project. Azure Data Studio will restart and if there is a folder currently open, it will be closed.");
|
||||
export const EnterWorkspaceConfirmation = localize('dataworkspace.enterWorkspaceConfirmation', "To open this workspace, Azure Data Studio will restart. If there is a workspace or folder currently open, it will be closed.");
|
||||
export const WorkspaceContainsNotAddedProjects = localize('dataworkspace.workspaceContainsNotAddedProjects', "The current workspace contains one or more projects that have not been added to the workspace. Use the 'Open existing' dialog to add projects to the projects pane.");
|
||||
export const LaunchOpenExisitingDialog = localize('dataworkspace.launchOpenExistingDialog', "Launch Open existing dialog");
|
||||
export const DoNotShowAgain = localize('dataworkspace.doNotShowAgain', "Do not show again");
|
||||
export const LaunchOpenExisitingDialog = localize('dataworkspace.launchOpenExistingDialog', "Launch 'Open Existing' Dialog");
|
||||
export const DoNotAskAgain = localize('dataworkspace.doNotAskAgain', "Don't Ask Again");
|
||||
export const ProjectsFailedToLoad = localize('dataworkspace.projectsFailedToLoad', "Some projects failed to load. To view more details, [open the developer console](command:workbench.action.toggleDevTools)");
|
||||
export const fileDoesNotExist = (name: string): string => { return localize('fileDoesNotExist', "File '{0}' doesn't exist", name); };
|
||||
export const projectNameNull = localize('projectNameNull', "Project name is null");
|
||||
|
||||
@@ -80,7 +80,7 @@ declare module 'dataworkspace' {
|
||||
/**
|
||||
* Gets the project actions to be placed on the dashboard toolbar
|
||||
*/
|
||||
readonly projectActions: (IProjectAction | IProjectActionGroup)[];
|
||||
readonly projectToolbarActions: (IProjectAction | IProjectActionGroup)[];
|
||||
|
||||
/**
|
||||
* Gets the project image to be used as background in dashboard container
|
||||
|
||||
@@ -62,7 +62,7 @@ export class ProjectDashboard {
|
||||
}
|
||||
|
||||
private createToolbarContainer(projectFilePath: string): azdata.ToolbarContainer {
|
||||
const projectActions: (IProjectAction | IProjectActionGroup)[] = this.projectProvider!.projectActions;
|
||||
const projectActions: (IProjectAction | IProjectActionGroup)[] = this.projectProvider!.projectToolbarActions;
|
||||
|
||||
// Add actions as buttons
|
||||
const buttons: azdata.ToolbarComponent[] = [];
|
||||
|
||||
@@ -206,11 +206,11 @@ export class WorkspaceService implements IWorkspaceService {
|
||||
}
|
||||
|
||||
if (containsNotAddedProject) {
|
||||
const result = await vscode.window.showInformationMessage(constants.WorkspaceContainsNotAddedProjects, constants.LaunchOpenExisitingDialog, constants.DoNotShowAgain);
|
||||
const result = await vscode.window.showInformationMessage(constants.WorkspaceContainsNotAddedProjects, constants.LaunchOpenExisitingDialog, constants.DoNotAskAgain);
|
||||
if (result === constants.LaunchOpenExisitingDialog) {
|
||||
// open settings
|
||||
await vscode.commands.executeCommand('projects.openExisting');
|
||||
} else if (result === constants.DoNotShowAgain) {
|
||||
} else if (result === constants.DoNotAskAgain) {
|
||||
await config.update(constants.showNotAddedProjectsMessageKey, false, true);
|
||||
}
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ export function createProjectProvider(projectTypes: IProjectType[], projectActio
|
||||
createProject: (name: string, location: vscode.Uri, projectTypeId: string): Promise<vscode.Uri> => {
|
||||
return Promise.resolve(location);
|
||||
},
|
||||
projectActions: projectActions,
|
||||
projectToolbarActions: projectActions,
|
||||
getDashboardComponents: (projectFile: string): IDashboardTable[] => {
|
||||
return dashboardComponents;
|
||||
}
|
||||
|
||||
@@ -427,7 +427,7 @@ suite('WorkspaceService Tests', function (): void {
|
||||
description: '',
|
||||
icon: ''
|
||||
}]);
|
||||
const infoMessageStub = sinon.stub(vscode.window, 'showInformationMessage').resolves(<any>constants.DoNotShowAgain);
|
||||
const infoMessageStub = sinon.stub(vscode.window, 'showInformationMessage').resolves(<any>constants.DoNotAskAgain);
|
||||
const getProjectsInwWorkspaceFolderStub = sinon.stub(service, 'getAllProjectsInFolder').resolves([vscode.Uri.file('abc.sqlproj').fsPath, vscode.Uri.file('folder1/abc1.sqlproj').fsPath]);
|
||||
|
||||
await service.checkForProjectsNotAddedToWorkspace();
|
||||
|
||||
@@ -91,7 +91,7 @@ suite('workspaceTreeDataProvider Tests', function (): void {
|
||||
createProject: (name: string, location: vscode.Uri): Promise<vscode.Uri> => {
|
||||
return Promise.resolve(location);
|
||||
},
|
||||
projectActions: [{
|
||||
projectToolbarActions: [{
|
||||
id: 'Add',
|
||||
run: async (): Promise<any> => { return Promise.resolve(); }
|
||||
},
|
||||
|
||||
@@ -11,6 +11,7 @@ export const developers: string[] = [
|
||||
'alanrenmsft',
|
||||
'anjalia',
|
||||
'anthonydresser',
|
||||
'bnhoule',
|
||||
'caohai',
|
||||
'Charles-Gagnon',
|
||||
'cssuh',
|
||||
|
||||
@@ -492,8 +492,8 @@ function assertIncludeExcludeResult(result: mssql.SchemaCompareIncludeExcludeRes
|
||||
|
||||
function assertSchemaCompareResult(schemaCompareResult: mssql.SchemaCompareResult, operationId: string, expectedDifferenceCount: number, expectedIfEqual: boolean = false): void {
|
||||
assert(schemaCompareResult.areEqual === expectedIfEqual, `Expected: the schemas equivalency to be ${expectedIfEqual} Actual: ${schemaCompareResult.areEqual}`);
|
||||
assert(schemaCompareResult.errorMessage === null, `Expected: there should be no error. Actual Error message: "${schemaCompareResult.errorMessage}"`);
|
||||
assert(schemaCompareResult.success === true, `Expected: success in schema compare. Actual: Failure`);
|
||||
assert(schemaCompareResult.errorMessage === null, `Expected: there should be no error for comparison. Actual Error message: "${schemaCompareResult.errorMessage}"`);
|
||||
assert(schemaCompareResult.differences.length === expectedDifferenceCount, `Expected: ${expectedDifferenceCount} differences. Actual differences: "${schemaCompareResult.differences.length}"`);
|
||||
assert(schemaCompareResult.operationId === operationId, `Operation Id Expected to be same as passed. Expected : ${operationId}, Actual ${schemaCompareResult.operationId}`);
|
||||
}
|
||||
|
||||
@@ -787,9 +787,9 @@ jws@3.x.x:
|
||||
safe-buffer "^5.0.1"
|
||||
|
||||
lodash@^4.14.0, lodash@^4.16.4, lodash@^4.17.11, lodash@^4.17.13:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
|
||||
@@ -167,7 +167,7 @@ interface ISchemaAssociation {
|
||||
* A match succeeds when there is at least one pattern matching and last matching pattern does not start with '!'.
|
||||
*/
|
||||
fileMatch: string[];
|
||||
|
||||
|
||||
/*
|
||||
* The schema for the given URI.
|
||||
* If no schema is provided, the schema will be fetched with the schema request service (if available).
|
||||
@@ -203,16 +203,16 @@ The JSON language server is shipped with [Visual Studio Code](https://code.visua
|
||||
If you plan to integrate the JSON language server into an editor and IDE, check out [this page](https://microsoft.github.io/language-server-protocol/implementors/tools/) if there's already an LSP client integration available.
|
||||
|
||||
You can also launch the language server as a command and connect to it.
|
||||
For that, install the `json-language-server` npm module:
|
||||
For that, install the `vscode-json-languageserver` npm module:
|
||||
|
||||
`npm install -g json-language-server`
|
||||
`npm install -g vscode-json-languageserver`
|
||||
|
||||
Start the language server with the `json-language-server` command. Use a command line argument to specify the preferred communication channel:
|
||||
Start the language server with the `vscode-json-languageserver` command. Use a command line argument to specify the preferred communication channel:
|
||||
|
||||
```
|
||||
json-language-server --node-ipc
|
||||
json-language-server --stdio
|
||||
json-language-server --socket=<port>
|
||||
vscode-json-languageserver --node-ipc
|
||||
vscode-json-languageserver --stdio
|
||||
vscode-json-languageserver --socket=<port>
|
||||
```
|
||||
|
||||
To connect to the server from NodeJS, see Remy Suen's great write-up on [how to communicate with the server](https://github.com/rcjsuen/dockerfile-language-server-nodejs#communicating-with-the-server) through the available communication channels.
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"requiredPythonPackages": [
|
||||
{
|
||||
"name": "sqlmlutils",
|
||||
"version": "1.0.3"
|
||||
"version": "1.1.0"
|
||||
}
|
||||
],
|
||||
"requiredRPackages": [
|
||||
@@ -17,20 +17,20 @@
|
||||
},
|
||||
{
|
||||
"name": "sqlmlutils",
|
||||
"fileName": "sqlmlutils_0.7.3.zip",
|
||||
"downloadUrl": "https://github.com/microsoft/sqlmlutils/releases/download/R-0.7.3/sqlmlutils_0.7.3.zip",
|
||||
"fileName": "sqlmlutils_1.0.0.zip",
|
||||
"downloadUrl": "https://github.com/microsoft/sqlmlutils/releases/download/R-1.0.0/sqlmlutils_1.0.0.zip",
|
||||
"platform" : "win32"
|
||||
},
|
||||
{
|
||||
"name": "sqlmlutils",
|
||||
"fileName": "sqlmlutils_0.7.3.tar.gz",
|
||||
"downloadUrl": "https://github.com/microsoft/sqlmlutils/releases/download/R-0.7.3/sqlmlutils_0.7.3.tar.gz",
|
||||
"fileName": "sqlmlutils_1.0.0.tar.gz",
|
||||
"downloadUrl": "https://github.com/microsoft/sqlmlutils/releases/download/R-1.0.0/sqlmlutils_1.0.0.tar.gz",
|
||||
"platform" : "darwin"
|
||||
},
|
||||
{
|
||||
"name": "sqlmlutils",
|
||||
"fileName": "sqlmlutils_0.7.3.tar.gz",
|
||||
"downloadUrl": "https://github.com/microsoft/sqlmlutils/releases/download/R-0.7.3/sqlmlutils_0.7.3.tar.gz",
|
||||
"fileName": "sqlmlutils_1.0.0.tar.gz",
|
||||
"downloadUrl": "https://github.com/microsoft/sqlmlutils/releases/download/R-1.0.0/sqlmlutils_1.0.0.tar.gz",
|
||||
"platform" : "linux"
|
||||
}
|
||||
],
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
"name": "machine-learning",
|
||||
"displayName": "%displayName%",
|
||||
"description": "%description%",
|
||||
"version": "0.9.0",
|
||||
"version": "0.10.0",
|
||||
"publisher": "Microsoft",
|
||||
"preview": true,
|
||||
"engines": {
|
||||
"vscode": "^1.25.0",
|
||||
"azdata": ">=1.27.0"
|
||||
"azdata": ">=1.29.0"
|
||||
},
|
||||
"activationEvents": [
|
||||
"onCommand:ml.command.managePackages",
|
||||
|
||||
@@ -8,7 +8,6 @@ import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
export const winPlatform = 'win32';
|
||||
export const pythonBundleVersion = '0.0.1';
|
||||
export const managePackagesCommand = 'jupyter.cmd.managePackages';
|
||||
export const pythonLanguageName = 'Python';
|
||||
export const rLanguageName = 'R';
|
||||
@@ -42,7 +41,6 @@ export const pythonEnabledConfigKey = 'enablePython';
|
||||
export const rEnabledConfigKey = 'enableR';
|
||||
export const registeredModelsTableName = 'registeredModelsTableName';
|
||||
export const rPathConfigKey = 'rPath';
|
||||
export const adsPythonBundleVersion = '0.0.1';
|
||||
|
||||
// TSQL
|
||||
//
|
||||
|
||||
@@ -64,13 +64,6 @@ export function getPythonInstallationLocation(rootFolder: string) {
|
||||
return path.join(rootFolder, 'python');
|
||||
}
|
||||
|
||||
export function getPythonExePath(rootFolder: string): string {
|
||||
return path.join(
|
||||
getPythonInstallationLocation(rootFolder),
|
||||
constants.pythonBundleVersion,
|
||||
process.platform === constants.winPlatform ? 'python.exe' : 'bin/python3');
|
||||
}
|
||||
|
||||
export function getPackageFilePath(rootFolder: string, packageName: string): string {
|
||||
return path.join(
|
||||
rootFolder,
|
||||
@@ -272,7 +265,6 @@ export function getFileName(filePath: string) {
|
||||
export function getDefaultPythonLocation(): string {
|
||||
|
||||
return path.join(getUserHome() || '', 'azuredatastudio-python',
|
||||
constants.adsPythonBundleVersion,
|
||||
getPythonExeName());
|
||||
}
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ export class SqlRPackageManageProvider extends SqlPackageManageProviderBase impl
|
||||
let connectionParts: string[] = [];
|
||||
|
||||
if (connection) {
|
||||
connectionParts.push(utils.getKeyValueString('driver', constants.supportedODBCDriver));
|
||||
connectionParts.push(utils.getKeyValueString('driver', `"${constants.supportedODBCDriver}"`));
|
||||
let server = connection.serverName.replace('\\', '\\\\');
|
||||
if (databaseName) {
|
||||
connectionParts.push(utils.getKeyValueString('database', `"${databaseName}"`));
|
||||
|
||||
@@ -114,9 +114,9 @@ lodash.throttle@^4.1.1:
|
||||
integrity sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ=
|
||||
|
||||
lodash@^4.16.4:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
markdown-it-front-matter@^0.2.1:
|
||||
version "0.2.1"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"downloadUrl": "https://github.com/Microsoft/sqltoolsservice/releases/download/v{#version#}/microsoft.sqltools.servicelayer-{#fileName#}",
|
||||
"version": "3.0.0-release.102",
|
||||
"version": "3.0.0-release.105",
|
||||
"downloadFileNames": {
|
||||
"Windows_86": "win-x86-netcoreapp3.1.zip",
|
||||
"Windows_64": "win-x64-netcoreapp3.1.zip",
|
||||
|
||||
@@ -1192,9 +1192,9 @@ locate-path@^3.0.0:
|
||||
path-exists "^3.0.0"
|
||||
|
||||
lodash@^4.16.4, lodash@^4.17.13, lodash@^4.17.15, lodash@^4.17.4:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
log-symbols@3.0.0:
|
||||
version "3.0.0"
|
||||
|
||||
@@ -38,6 +38,11 @@
|
||||
"default": false,
|
||||
"description": "%notebook.useExistingPython.description%"
|
||||
},
|
||||
"notebook.dontPromptPythonUpdate": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"description": "%notebook.dontPromptPythonUpdate.description%"
|
||||
},
|
||||
"notebook.overrideEditorTheming": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"notebook.configuration.title": "Notebook configuration",
|
||||
"notebook.pythonPath.description": "Local path to python installation used by Notebooks.",
|
||||
"notebook.useExistingPython.description": "Local path to a preexisting python installation used by Notebooks.",
|
||||
"notebook.dontPromptPythonUpdate.description": "Do not show prompt to update Python.",
|
||||
"notebook.overrideEditorTheming.description": "Override editor default settings in the Notebook editor. Settings include background color, current line color and border",
|
||||
"notebook.maxTableRows.description": "Maximum number of rows returned per table in the Notebook editor",
|
||||
"notebook.trustedBooks.description": "Notebooks contained in these books will automatically be trusted.",
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0)">
|
||||
<path d="M4 6H11V3H4V6ZM5 4H10V5H5V4Z" fill="white"/>
|
||||
<path d="M2.99998 0C2.74326 0.000315985 2.48981 0.0577002 2.25798 0.168C1.77794 0.392083 1.39207 0.777956 1.16798 1.258C1.05778 1.48986 1.0004 1.74328 0.999984 2V14C0.998165 14.267 1.05127 14.5314 1.15598 14.777C1.36029 15.2573 1.74272 15.6397 2.22298 15.844C2.46855 15.9487 2.73303 16.0018 2.99998 16H14V0H2.99998ZM2.99998 15C2.86566 15.0013 2.73253 14.9747 2.60898 14.922C2.37107 14.8187 2.18127 14.6289 2.07798 14.391C2.02649 14.2671 1.99998 14.1342 1.99998 14C1.99998 13.8658 2.02649 13.7329 2.07798 13.609C2.12918 13.4911 2.20212 13.3839 2.29298 13.293C2.38371 13.2019 2.49095 13.1289 2.60898 13.078C2.73253 13.0253 2.86566 12.9987 2.99998 13H13V15H2.99998ZM13 12H2.99998C2.823 11.9993 2.64684 12.0243 2.47698 12.074C2.31034 12.1238 2.15022 12.1933 1.99998 12.281V2C2.00151 1.87494 2.03233 1.75199 2.08998 1.641C2.14876 1.52225 2.22643 1.41383 2.31998 1.32C2.41408 1.22676 2.52244 1.14912 2.64098 1.09C2.75186 1.03207 2.87489 1.00123 2.99998 1H13V12Z" fill="white"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0">
|
||||
<rect width="16" height="16" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
<path d="M5.98438 5.05811H2.98438V6.05811H5.98438V5.05811Z" fill="white"/>
|
||||
<path d="M5.98438 9.05811H2.98438V10.0581H5.98438V9.05811Z" fill="white"/>
|
||||
<path d="M5.98438 7.05811H2.98438V8.0581H5.98438V7.05811Z" fill="white"/>
|
||||
<path d="M13 5.05811H10V6.05811H13V5.05811Z" fill="white"/>
|
||||
<path d="M13 9.05811H10V10.0581H13V9.05811Z" fill="white"/>
|
||||
<path d="M13 7.05811H10V8.0581H13V7.05811Z" fill="white"/>
|
||||
<path d="M10 2C9.61049 2.00104 9.22665 2.0934 8.8793 2.26968C8.53196 2.44595 8.2308 2.70123 8 3.015C7.7692 2.70123 7.46804 2.44595 7.1207 2.26968C6.77335 2.0934 6.38951 2.00104 6 2H1V13H6.5C6.76522 13 7.01957 13.1054 7.20711 13.2929C7.39464 13.4804 7.5 13.7348 7.5 14H8.5C8.5 13.7348 8.60536 13.4804 8.79289 13.2929C8.98043 13.1054 9.23478 13 9.5 13H15V2H10ZM6.5 12H2V3H6C6.39782 3 6.77936 3.15804 7.06066 3.43934C7.34196 3.72064 7.5 4.10218 7.5 4.5V12.278C7.19736 12.098 6.85214 12.002 6.5 12V12ZM14 12H9.4V12.01C9.0829 12.0261 8.77428 12.118 8.5 12.278V4.5C8.5 4.10218 8.65804 3.72064 8.93934 3.43934C9.22064 3.15804 9.60218 3 10 3H14V12Z" fill="white"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 1.1 KiB |
@@ -1,7 +1,9 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16">
|
||||
<title>Artboard 10</title>
|
||||
<g>
|
||||
<path d="M4,6h7V3H4ZM5,4h5V5H5Z"/>
|
||||
<path d="M3,0a1.732,1.732,0,0,0-.742.168,2.256,2.256,0,0,0-1.09,1.09A1.735,1.735,0,0,0,1,2V14a1.947,1.947,0,0,0,.156.777,2.018,2.018,0,0,0,1.067,1.067A1.947,1.947,0,0,0,3,16H14V0ZM3,15a.972.972,0,0,1-.391-.078,1.023,1.023,0,0,1-.531-.531,1.019,1.019,0,0,1,0-.782,1.024,1.024,0,0,1,.215-.316,1.012,1.012,0,0,1,.316-.215A.972.972,0,0,1,3,13H13v2Zm10-3H3a1.836,1.836,0,0,0-.523.074A2.194,2.194,0,0,0,2,12.281V2a.8.8,0,0,1,.09-.359,1.223,1.223,0,0,1,.23-.321,1.246,1.246,0,0,1,.321-.23A.792.792,0,0,1,3,1H13Z"/>
|
||||
</g>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5.98438 5.05811H2.98438V6.05811H5.98438V5.05811Z" fill="#323130"/>
|
||||
<path d="M5.98438 9.05811H2.98438V10.0581H5.98438V9.05811Z" fill="#323130"/>
|
||||
<path d="M5.98438 7.05811H2.98438V8.0581H5.98438V7.05811Z" fill="#323130"/>
|
||||
<path d="M13 5.05811H10V6.05811H13V5.05811Z" fill="#323130"/>
|
||||
<path d="M13 9.05811H10V10.0581H13V9.05811Z" fill="#323130"/>
|
||||
<path d="M13 7.05811H10V8.0581H13V7.05811Z" fill="#323130"/>
|
||||
<path d="M10 2C9.61049 2.00104 9.22665 2.0934 8.8793 2.26968C8.53196 2.44595 8.2308 2.70123 8 3.015C7.7692 2.70123 7.46804 2.44595 7.1207 2.26968C6.77335 2.0934 6.38951 2.00104 6 2H1V13H6.5C6.76522 13 7.01957 13.1054 7.20711 13.2929C7.39464 13.4804 7.5 13.7348 7.5 14H8.5C8.5 13.7348 8.60536 13.4804 8.79289 13.2929C8.98043 13.1054 9.23478 13 9.5 13H15V2H10ZM6.5 12H2V3H6C6.39782 3 6.77936 3.15804 7.06066 3.43934C7.34196 3.72064 7.5 4.10218 7.5 4.5V12.278C7.19736 12.098 6.85214 12.002 6.5 12V12ZM14 12H9.4V12.01C9.0829 12.0261 8.77428 12.118 8.5 12.278V4.5C8.5 4.10218 8.65804 3.72064 8.93934 3.43934C9.22064 3.15804 9.60218 3 10 3H14V12Z" fill="#323130"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 684 B After Width: | Height: | Size: 1.1 KiB |
@@ -13,7 +13,7 @@ import * as fs from 'fs-extra';
|
||||
import * as loc from '../common/localizedConstants';
|
||||
import { IJupyterBookToc, JupyterBookSection } from '../contracts/content';
|
||||
import { convertFrom, getContentPath, BookVersion } from './bookVersionHandler';
|
||||
import { debounce } from '../common/utils';
|
||||
import { debounce, IPinnedNotebook } from '../common/utils';
|
||||
import { Deferred } from '../common/promise';
|
||||
const fsPromises = fileServices.promises;
|
||||
const content = 'content';
|
||||
@@ -39,7 +39,7 @@ export class BookModel {
|
||||
public readonly isNotebook: boolean,
|
||||
private _extensionContext: vscode.ExtensionContext,
|
||||
private _onDidChangeTreeData: vscode.EventEmitter<BookTreeItem | undefined>,
|
||||
public readonly notebookRootPath?: string) { }
|
||||
public readonly pinnedNotebookDetails?: IPinnedNotebook) { }
|
||||
|
||||
public unwatchTOC(): void {
|
||||
fs.unwatchFile(this.tableOfContentsPath);
|
||||
@@ -141,9 +141,9 @@ export class BookModel {
|
||||
|
||||
let pathDetails = path.parse(this.bookPath);
|
||||
let notebookItem = new BookTreeItem({
|
||||
title: pathDetails.name,
|
||||
title: this.pinnedNotebookDetails?.title ?? pathDetails.name,
|
||||
contentPath: this.bookPath,
|
||||
root: this.notebookRootPath ? this.notebookRootPath : pathDetails.dir,
|
||||
root: this.pinnedNotebookDetails?.bookPath ?? pathDetails.dir,
|
||||
tableOfContents: { sections: undefined },
|
||||
page: { sections: undefined },
|
||||
type: BookTreeItemType.Notebook,
|
||||
|
||||
@@ -6,7 +6,7 @@ import * as path from 'path';
|
||||
import * as vscode from 'vscode';
|
||||
import * as constants from './../common/constants';
|
||||
import { BookTreeItem } from './bookTreeItem';
|
||||
import { getPinnedNotebooks, setPinnedBookPathsInConfig, IBookNotebook } from '../common/utils';
|
||||
import { getPinnedNotebooks, setPinnedBookPathsInConfig, IPinnedNotebook } from '../common/utils';
|
||||
|
||||
export interface IBookPinManager {
|
||||
pinNotebook(notebook: BookTreeItem): Promise<boolean>;
|
||||
@@ -51,14 +51,14 @@ export class BookPinManager implements IBookPinManager {
|
||||
let modifiedPinnedBooks = false;
|
||||
let bookPathToChange: string = notebook.book.contentPath;
|
||||
|
||||
let pinnedBooks: IBookNotebook[] = getPinnedNotebooks();
|
||||
let pinnedBooks: IPinnedNotebook[] = getPinnedNotebooks();
|
||||
let existingBookIndex = pinnedBooks.map(pinnedBookPath => path.normalize(pinnedBookPath?.notebookPath)).indexOf(path.normalize(bookPathToChange));
|
||||
|
||||
if (existingBookIndex !== -1 && operation === PinBookOperation.Unpin) {
|
||||
pinnedBooks.splice(existingBookIndex, 1);
|
||||
modifiedPinnedBooks = true;
|
||||
} else if (existingBookIndex === -1 && operation === PinBookOperation.Pin) {
|
||||
let addNotebook: IBookNotebook = { notebookPath: bookPathToChange, bookPath: notebook.book.root };
|
||||
let addNotebook: IPinnedNotebook = { notebookPath: bookPathToChange, bookPath: notebook.book.root, title: notebook.book.title };
|
||||
pinnedBooks.push(addNotebook);
|
||||
modifiedPinnedBooks = true;
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ import { Deferred } from '../common/promise';
|
||||
import { IBookTrustManager, BookTrustManager } from './bookTrustManager';
|
||||
import * as loc from '../common/localizedConstants';
|
||||
import * as glob from 'fast-glob';
|
||||
import { getPinnedNotebooks, confirmMessageDialog, getNotebookType, FileExtension } from '../common/utils';
|
||||
import { getPinnedNotebooks, confirmMessageDialog, getNotebookType, FileExtension, IPinnedNotebook } from '../common/utils';
|
||||
import { IBookPinManager, BookPinManager } from './bookPinManager';
|
||||
import { BookTocManager, IBookTocManager, quickPickResults } from './bookTocManager';
|
||||
import { CreateBookDialog } from '../dialog/createBookDialog';
|
||||
@@ -68,7 +68,7 @@ export class BookTreeViewProvider implements vscode.TreeDataProvider<BookTreeIte
|
||||
book && // The notebook is part of a book in the viewlet (otherwise nothing to reveal)
|
||||
(this._openAsUntitled ? notebookPath?.scheme === 'untitled' : notebookPath?.scheme !== 'untitled')) // The notebook is of the correct type for this tree view
|
||||
{
|
||||
await this.revealDocumentInTreeView(notebookPath);
|
||||
await this.revealDocumentInTreeView(notebookPath, true, true);
|
||||
}
|
||||
});
|
||||
this._extensionContext.subscriptions.push(azdata.nb.registerNavigationProvider(this));
|
||||
@@ -78,7 +78,7 @@ export class BookTreeViewProvider implements vscode.TreeDataProvider<BookTreeIte
|
||||
if (this.viewId === constants.PINNED_BOOKS_VIEWID) {
|
||||
await Promise.all(getPinnedNotebooks().map(async (notebook) => {
|
||||
try {
|
||||
await this.createAndAddBookModel(notebook.notebookPath, true, notebook.bookPath);
|
||||
await this.createAndAddBookModel(notebook.notebookPath, true, notebook);
|
||||
} catch {
|
||||
// no-op, not all workspace folders are going to be valid books
|
||||
}
|
||||
@@ -254,8 +254,8 @@ export class BookTreeViewProvider implements vscode.TreeDataProvider<BookTreeIte
|
||||
async addNotebookToPinnedView(bookItem: BookTreeItem): Promise<void> {
|
||||
let notebookPath: string = bookItem.book.contentPath;
|
||||
if (notebookPath) {
|
||||
let rootPath: string = bookItem.book.root ? bookItem.book.root : '';
|
||||
await this.createAndAddBookModel(notebookPath, true, rootPath);
|
||||
let notebookDetails: IPinnedNotebook = bookItem.book.root ? { bookPath: bookItem.book.root, notebookPath: notebookPath, title: bookItem.book.title } : { notebookPath: notebookPath };
|
||||
await this.createAndAddBookModel(notebookPath, true, notebookDetails);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -318,9 +318,9 @@ export class BookTreeViewProvider implements vscode.TreeDataProvider<BookTreeIte
|
||||
* @param isNotebook A boolean value to know we are creating a model for a notebook or a book
|
||||
* @param notebookBookRoot For pinned notebooks we need to know if the notebook is part of a book or it's a standalone notebook
|
||||
*/
|
||||
private async createAndAddBookModel(bookPath: string, isNotebook: boolean, notebookBookRoot?: string): Promise<void> {
|
||||
private async createAndAddBookModel(bookPath: string, isNotebook: boolean, notebookDetails?: IPinnedNotebook): Promise<void> {
|
||||
if (!this.books.find(x => x.bookPath === bookPath)) {
|
||||
const book: BookModel = new BookModel(bookPath, this._openAsUntitled, isNotebook, this._extensionContext, this._onDidChangeTreeData, notebookBookRoot);
|
||||
const book: BookModel = new BookModel(bookPath, this._openAsUntitled, isNotebook, this._extensionContext, this._onDidChangeTreeData, notebookDetails);
|
||||
await book.initializeContents();
|
||||
this.books.push(book);
|
||||
if (!this.currentBook) {
|
||||
@@ -391,7 +391,13 @@ export class BookTreeViewProvider implements vscode.TreeDataProvider<BookTreeIte
|
||||
}
|
||||
}
|
||||
|
||||
async revealDocumentInTreeView(uri?: vscode.Uri, shouldReveal: boolean = true): Promise<BookTreeItem | undefined> {
|
||||
/**
|
||||
* Reveals the given uri in the tree view.
|
||||
* @param uri The path to the notebook. If it's undefined then the current active notebook is revealed in the Tree View.
|
||||
* @param shouldReveal A boolean to expand the parent node.
|
||||
* @param shouldFocus A boolean to focus on the tree item.
|
||||
*/
|
||||
async revealDocumentInTreeView(uri: vscode.Uri | undefined, shouldReveal: boolean, shouldFocus: boolean): Promise<BookTreeItem | undefined> {
|
||||
let bookItem: BookTreeItem;
|
||||
let notebookPath: string;
|
||||
// If no uri is passed in, try to use the current active notebook editor
|
||||
@@ -405,17 +411,18 @@ export class BookTreeViewProvider implements vscode.TreeDataProvider<BookTreeIte
|
||||
}
|
||||
|
||||
if (shouldReveal || this._bookViewer?.visible) {
|
||||
bookItem = notebookPath ? await this.findAndExpandParentNode(notebookPath) : undefined;
|
||||
bookItem = notebookPath ? await this.findAndExpandParentNode(notebookPath, shouldFocus) : undefined;
|
||||
// Select + focus item in viewlet if books viewlet is already open, or if we pass in variable
|
||||
if (bookItem?.contextValue && bookItem.contextValue !== 'pinnedNotebook') {
|
||||
// Note: 3 is the maximum number of levels that the vscode APIs let you expand to
|
||||
await this._bookViewer.reveal(bookItem, { select: true, focus: true, expand: true });
|
||||
await this._bookViewer.reveal(bookItem, { select: true, focus: shouldFocus, expand: true });
|
||||
}
|
||||
}
|
||||
|
||||
return bookItem;
|
||||
}
|
||||
|
||||
async findAndExpandParentNode(notebookPath: string): Promise<BookTreeItem | undefined> {
|
||||
async findAndExpandParentNode(notebookPath: string, shouldFocus: boolean): Promise<BookTreeItem | undefined> {
|
||||
notebookPath = notebookPath.replace(/\\/g, '/');
|
||||
const parentBook = this.books.find(b => notebookPath.indexOf(b.bookPath) > -1);
|
||||
if (!parentBook) {
|
||||
@@ -470,7 +477,7 @@ export class BookTreeViewProvider implements vscode.TreeDataProvider<BookTreeIte
|
||||
}
|
||||
try {
|
||||
// TO DO: Check why the reveal fails during initial load with 'TreeError [bookTreeView] Tree element not found'
|
||||
await this._bookViewer.reveal(bookItemToExpand, { select: false, focus: true, expand: true });
|
||||
await this._bookViewer.reveal(bookItemToExpand, { select: false, focus: shouldFocus, expand: true });
|
||||
}
|
||||
catch (e) {
|
||||
console.error(e);
|
||||
|
||||
@@ -13,10 +13,10 @@ export const extensionOutputChannelName = 'Notebooks';
|
||||
export const notebookCommandNew = 'notebook.command.new';
|
||||
|
||||
// JUPYTER CONFIG //////////////////////////////////////////////////////////
|
||||
export const pythonBundleVersion = '0.0.1';
|
||||
export const pythonVersion = '3.6.6';
|
||||
export const pythonVersion = '3.8.10';
|
||||
export const pythonPathConfigKey = 'pythonPath';
|
||||
export const existingPythonConfigKey = 'useExistingPython';
|
||||
export const dontPromptPythonUpdate = 'dontPromptPythonUpdate';
|
||||
export const notebookConfigKey = 'notebook';
|
||||
export const trustedBooksConfigKey = 'trustedBooks';
|
||||
export const pinnedBooksConfigKey = 'pinnedNotebooks';
|
||||
@@ -76,9 +76,9 @@ export enum NavigationProviders {
|
||||
export const unsavedBooksContextKey = 'unsavedBooks';
|
||||
export const showPinnedBooksContextKey = 'showPinnedbooks';
|
||||
|
||||
export const pythonWindowsInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2110625';
|
||||
export const pythonMacInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2128152';
|
||||
export const pythonLinuxInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2110524';
|
||||
export const pythonWindowsInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2163338';
|
||||
export const pythonMacInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2163337';
|
||||
export const pythonLinuxInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2163336';
|
||||
|
||||
export const notebookLanguages = ['notebook', 'ipynb'];
|
||||
|
||||
|
||||
@@ -19,17 +19,9 @@ export class NotebookUtils {
|
||||
|
||||
constructor() { }
|
||||
|
||||
public async newNotebook(connectionProfile?: azdata.IConnectionProfile): Promise<azdata.nb.NotebookEditor> {
|
||||
public async newNotebook(options?: azdata.nb.NotebookShowOptions): Promise<azdata.nb.NotebookEditor> {
|
||||
const title = this.findNextUntitledEditorName();
|
||||
const untitledUri = vscode.Uri.parse(`untitled:${title}`);
|
||||
const options: azdata.nb.NotebookShowOptions = connectionProfile ? {
|
||||
viewColumn: null,
|
||||
preserveFocus: true,
|
||||
preview: null,
|
||||
providerId: null,
|
||||
connectionProfile: connectionProfile,
|
||||
defaultKernel: null
|
||||
} : null;
|
||||
return azdata.nb.showNotebookDocument(untitledUri, options);
|
||||
}
|
||||
|
||||
|
||||
@@ -140,7 +140,7 @@ export function getOSPlatformId(): string {
|
||||
* @param second Second version string to compare.
|
||||
* @returns 1 if the first version is greater, -1 if it's less, and 0 otherwise.
|
||||
*/
|
||||
export function comparePackageVersions(first: string, second: string): number {
|
||||
export function compareVersions(first: string, second: string): number {
|
||||
let firstVersion = first.split('.');
|
||||
let secondVersion = second.split('.');
|
||||
|
||||
@@ -179,7 +179,7 @@ export function comparePackageVersions(first: string, second: string): number {
|
||||
|
||||
export function sortPackageVersions(versions: string[], ascending: boolean = true): string[] {
|
||||
return versions.sort((first, second) => {
|
||||
let compareResult = comparePackageVersions(first, second);
|
||||
let compareResult = compareVersions(first, second);
|
||||
if (ascending) {
|
||||
return compareResult;
|
||||
} else {
|
||||
@@ -230,7 +230,7 @@ export function isPackageSupported(pythonVersion: string, packageVersionConstrai
|
||||
versionSpecifier = constraint.slice(0, splitIndex);
|
||||
version = constraint.slice(splitIndex).trim();
|
||||
}
|
||||
let versionComparison = comparePackageVersions(pythonVersion, version);
|
||||
let versionComparison = compareVersions(pythonVersion, version);
|
||||
switch (versionSpecifier) {
|
||||
case '>=':
|
||||
supportedVersionFound = versionComparison !== -1;
|
||||
@@ -435,7 +435,7 @@ export async function getRandomToken(size: number = 24): Promise<string> {
|
||||
}
|
||||
|
||||
export function isBookItemPinned(notebookPath: string): boolean {
|
||||
let pinnedNotebooks: IBookNotebook[] = getPinnedNotebooks();
|
||||
let pinnedNotebooks: IPinnedNotebook[] = getPinnedNotebooks();
|
||||
if (pinnedNotebooks?.find(x => x.notebookPath === notebookPath)) {
|
||||
return true;
|
||||
}
|
||||
@@ -451,16 +451,16 @@ export function getNotebookType(book: BookTreeItemFormat): BookTreeItemType {
|
||||
}
|
||||
}
|
||||
|
||||
export function getPinnedNotebooks(): IBookNotebook[] {
|
||||
export function getPinnedNotebooks(): IPinnedNotebook[] {
|
||||
let config: vscode.WorkspaceConfiguration = vscode.workspace.getConfiguration(notebookConfigKey);
|
||||
let pinnedNotebooks: [] = config.get(pinnedBooksConfigKey);
|
||||
let updateFormat: boolean = false;
|
||||
const pinnedBookDirectories = pinnedNotebooks.map(elem => {
|
||||
if (typeof (elem) === 'string') {
|
||||
updateFormat = true;
|
||||
return { notebookPath: elem, bookPath: '' };
|
||||
return { notebookPath: elem, bookPath: '', title: '' };
|
||||
} else {
|
||||
return elem as IBookNotebook;
|
||||
return elem as IPinnedNotebook;
|
||||
}
|
||||
});
|
||||
if (updateFormat) {
|
||||
@@ -475,7 +475,7 @@ function hasWorkspaceFolders(): boolean {
|
||||
return workspaceFolders && workspaceFolders.length > 0;
|
||||
}
|
||||
|
||||
export async function setPinnedBookPathsInConfig(pinnedNotebookPaths: IBookNotebook[]): Promise<void> {
|
||||
export async function setPinnedBookPathsInConfig(pinnedNotebookPaths: IPinnedNotebook[]): Promise<void> {
|
||||
let config: vscode.WorkspaceConfiguration = vscode.workspace.getConfiguration(notebookConfigKey);
|
||||
let storeInWorspace: boolean = hasWorkspaceFolders();
|
||||
|
||||
@@ -483,8 +483,9 @@ export async function setPinnedBookPathsInConfig(pinnedNotebookPaths: IBookNoteb
|
||||
}
|
||||
|
||||
|
||||
export interface IBookNotebook {
|
||||
export interface IPinnedNotebook {
|
||||
bookPath?: string;
|
||||
title?: string;
|
||||
notebookPath: string;
|
||||
}
|
||||
|
||||
|
||||
@@ -149,7 +149,7 @@ export class ConfigurePythonWizard {
|
||||
}
|
||||
|
||||
if (useExistingPython) {
|
||||
let exePath = JupyterServerInstallation.getPythonExePath(pythonLocation, true);
|
||||
let exePath = JupyterServerInstallation.getPythonExePath(pythonLocation);
|
||||
let pythonExists = await utils.exists(exePath);
|
||||
if (!pythonExists) {
|
||||
this.showErrorMessage(this.PythonNotFoundMsg);
|
||||
|
||||
@@ -110,7 +110,7 @@ export class PickPackagesPage extends BasePage {
|
||||
|
||||
public async onPageEnter(): Promise<void> {
|
||||
this.packageVersionMap.clear();
|
||||
let pythonExe = JupyterServerInstallation.getPythonExePath(this.model.pythonLocation, this.model.useExistingPython);
|
||||
let pythonExe = JupyterServerInstallation.getPythonExePath(this.model.pythonLocation);
|
||||
this.packageVersionRetrieval = this.model.installation.getInstalledPipPackages(pythonExe)
|
||||
.then(installedPackages => {
|
||||
if (installedPackages) {
|
||||
|
||||
@@ -250,13 +250,15 @@ export class RemoteBookDialog {
|
||||
}
|
||||
|
||||
public async fillVersionDropdown(): Promise<void> {
|
||||
let filtered_assets = (await this.controller.getAssets()).filter(asset => asset.book === this.bookDropdown.value);
|
||||
const assets = await this.controller.getAssets();
|
||||
let filtered_assets = assets.filter(asset => asset.book === this.bookDropdown.value);
|
||||
this.versionDropdown.values = ['-'].concat(filtered_assets.map(asset => asset.version));
|
||||
this.checkValues();
|
||||
}
|
||||
|
||||
public async fillLanguageDropdown(): Promise<void> {
|
||||
let filtered_assets = (await this.controller.getAssets()).filter(asset => asset.book === this.bookDropdown.value &&
|
||||
const assets = await this.controller.getAssets();
|
||||
let filtered_assets = assets.filter(asset => asset.book === this.bookDropdown.value &&
|
||||
asset.version === this.versionDropdown.value);
|
||||
this.languageDropdown.values = ['-'].concat(filtered_assets.map(asset => asset.language));
|
||||
this.checkValues();
|
||||
|
||||
@@ -9,7 +9,7 @@ import { IRelease, IAsset } from '../book/remoteBookController';
|
||||
export class RemoteBookDialogModel {
|
||||
private _remoteLocation: string;
|
||||
private _releases: IRelease[] = [];
|
||||
private _assets: IAsset[];
|
||||
private _assets: IAsset[] = [];
|
||||
private _book: RemoteBook;
|
||||
|
||||
constructor() {
|
||||
|
||||
@@ -74,12 +74,8 @@ export async function activate(extensionContext: vscode.ExtensionContext): Promi
|
||||
dialog.createDialog();
|
||||
}));
|
||||
|
||||
extensionContext.subscriptions.push(vscode.commands.registerCommand('_notebook.command.new', async (context?: azdata.ConnectedContext) => {
|
||||
let connectionProfile: azdata.IConnectionProfile = undefined;
|
||||
if (context && context.connectionProfile) {
|
||||
connectionProfile = context.connectionProfile;
|
||||
}
|
||||
return appContext.notebookUtils.newNotebook(connectionProfile);
|
||||
extensionContext.subscriptions.push(vscode.commands.registerCommand('_notebook.command.new', async (options?: azdata.nb.NotebookShowOptions) => {
|
||||
return appContext.notebookUtils.newNotebook(options);
|
||||
}));
|
||||
extensionContext.subscriptions.push(vscode.commands.registerCommand('notebook.command.open', async () => {
|
||||
await appContext.notebookUtils.openNotebook();
|
||||
@@ -150,9 +146,9 @@ export async function activate(extensionContext: vscode.ExtensionContext): Promi
|
||||
|
||||
azdata.nb.onDidChangeActiveNotebookEditor(e => {
|
||||
if (e.document.uri.scheme === 'untitled') {
|
||||
providedBookTreeViewProvider.revealDocumentInTreeView(e.document.uri, false);
|
||||
providedBookTreeViewProvider.revealDocumentInTreeView(e.document.uri, false, false);
|
||||
} else {
|
||||
bookTreeViewProvider.revealDocumentInTreeView(e.document.uri, false);
|
||||
bookTreeViewProvider.revealDocumentInTreeView(e.document.uri, false, false);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -6,12 +6,10 @@
|
||||
import * as should from 'should';
|
||||
import * as vscode from 'vscode';
|
||||
import * as assert from 'assert';
|
||||
import * as path from 'path';
|
||||
import 'mocha';
|
||||
|
||||
import { JupyterController } from '../jupyter/jupyterController';
|
||||
import { JupyterServerInstallation, PythonPkgDetails } from '../jupyter/jupyterServerInstallation';
|
||||
import { pythonBundleVersion } from '../common/constants';
|
||||
import { executeStreamedCommand, sortPackageVersions } from '../common/utils';
|
||||
|
||||
describe('Notebook Extension Python Installation', function () {
|
||||
@@ -59,16 +57,15 @@ describe('Notebook Extension Python Installation', function () {
|
||||
|
||||
console.log('Uninstalling existing pip dependencies');
|
||||
let install = jupyterController.jupyterInstallation;
|
||||
let pythonExe = JupyterServerInstallation.getPythonExePath(pythonInstallDir, false);
|
||||
let pythonExe = JupyterServerInstallation.getPythonExePath(pythonInstallDir);
|
||||
let command = `"${pythonExe}" -m pip uninstall -y jupyter pandas sparkmagic`;
|
||||
await executeStreamedCommand(command, { env: install.execOptions.env }, install.outputChannel);
|
||||
console.log('Uninstalling existing pip dependencies is done');
|
||||
|
||||
console.log('Start Existing Python Installation');
|
||||
let existingPythonPath = path.join(pythonInstallDir, pythonBundleVersion);
|
||||
await install.startInstallProcess(false, { installPath: existingPythonPath, existingPython: true, packages: [] });
|
||||
await install.startInstallProcess(false, { installPath: pythonInstallDir, existingPython: true, packages: [] });
|
||||
should(JupyterServerInstallation.isPythonInstalled()).be.true();
|
||||
should(JupyterServerInstallation.getPythonInstallPath()).be.equal(existingPythonPath);
|
||||
should(JupyterServerInstallation.getPythonInstallPath()).be.equal(pythonInstallDir);
|
||||
should(JupyterServerInstallation.getExistingPythonSetting()).be.true();
|
||||
|
||||
// Redo "new" install to restore original settings.
|
||||
|
||||
@@ -32,11 +32,18 @@ const msgInstallPkgStart = localize('msgInstallPkgStart', "Installing Notebook d
|
||||
const msgInstallPkgFinish = localize('msgInstallPkgFinish', "Notebook dependencies installation is complete");
|
||||
const msgPythonRunningError = localize('msgPythonRunningError', "Cannot overwrite an existing Python installation while python is running. Please close any active notebooks before proceeding.");
|
||||
const msgWaitingForInstall = localize('msgWaitingForInstall', "Another Python installation is currently in progress. Waiting for it to complete.");
|
||||
const msgShutdownJupyterNotebookSessions = localize('msgShutdownNotebookSessions', "Active Python notebook sessions will be shutdown in order to update. Would you like to proceed now?");
|
||||
function msgPythonVersionUpdatePrompt(pythonVersion: string): string { return localize('msgPythonVersionUpdatePrompt', "Python {0} is now available in Azure Data Studio. The current Python version (3.6.6) will be out of support in December 2021. Would you like to update to Python {0} now?", pythonVersion); }
|
||||
function msgPythonVersionUpdateWarning(pythonVersion: string): string { return localize('msgPythonVersionUpdateWarning', "Python {0} will be installed and will replace Python 3.6.6. Some packages may no longer be compatible with the new version or may need to be reinstalled. A notebook will be created to help you reinstall all pip packages. Would you like to continue with the update now?", pythonVersion); }
|
||||
function msgDependenciesInstallationFailed(errorMessage: string): string { return localize('msgDependenciesInstallationFailed', "Installing Notebook dependencies failed with error: {0}", errorMessage); }
|
||||
function msgDownloadPython(platform: string, pythonDownloadUrl: string): string { return localize('msgDownloadPython', "Downloading local python for platform: {0} to {1}", platform, pythonDownloadUrl); }
|
||||
function msgPackageRetrievalFailed(errorMessage: string): string { return localize('msgPackageRetrievalFailed', "Encountered an error when trying to retrieve list of installed packages: {0}", errorMessage); }
|
||||
function msgGetPythonUserDirFailed(errorMessage: string): string { return localize('msgGetPythonUserDirFailed', "Encountered an error when getting Python user path: {0}", errorMessage); }
|
||||
|
||||
const yes = localize('yes', "Yes");
|
||||
const no = localize('no', "No");
|
||||
const dontAskAgain = localize('dontAskAgain', "Don't Ask Again");
|
||||
|
||||
export interface PythonInstallSettings {
|
||||
installPath: string;
|
||||
existingPython: boolean;
|
||||
@@ -110,6 +117,11 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
private _usingConda: boolean;
|
||||
private _installedPythonVersion: string;
|
||||
|
||||
private _upgradeInProcess: boolean = false;
|
||||
private _oldPythonExecutable: string | undefined;
|
||||
private _oldPythonInstallationPath: string | undefined;
|
||||
private _oldUserInstalledPipPackages: PythonPkgDetails[] = [];
|
||||
|
||||
private _installInProgress: boolean;
|
||||
private _installCompletion: Deferred<void>;
|
||||
|
||||
@@ -165,10 +177,41 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
|
||||
try {
|
||||
let pythonExists = await utils.exists(this._pythonExecutable);
|
||||
if (!pythonExists || forceInstall) {
|
||||
let upgradePython = false;
|
||||
// Warn users that some packages may need to be reinstalled after updating Python versions
|
||||
if (!this._usingExistingPython && this._oldPythonExecutable && utils.compareVersions(await this.getInstalledPythonVersion(this._oldPythonExecutable), constants.pythonVersion) < 0) {
|
||||
upgradePython = await vscode.window.showInformationMessage(msgPythonVersionUpdateWarning(constants.pythonVersion), yes, no) === yes;
|
||||
if (upgradePython) {
|
||||
this._upgradeInProcess = true;
|
||||
if (await this.isPythonRunning(this._oldPythonExecutable)) {
|
||||
let proceed = await vscode.window.showInformationMessage(msgShutdownJupyterNotebookSessions, yes, no) === yes;
|
||||
if (!proceed) {
|
||||
throw Error('Python update failed due to active Python notebook sessions.');
|
||||
}
|
||||
// Temporarily change the pythonExecutable to the old Python path so that the
|
||||
// correct path is used to shutdown the old Python server.
|
||||
let newPythonExecutable = this._pythonExecutable;
|
||||
this._pythonExecutable = this._oldPythonExecutable;
|
||||
await vscode.commands.executeCommand('notebook.action.stopJupyterNotebookSessions');
|
||||
this._pythonExecutable = newPythonExecutable;
|
||||
}
|
||||
|
||||
this._oldUserInstalledPipPackages = await this.getInstalledPipPackages(this._oldPythonExecutable, true);
|
||||
|
||||
if (await this.getInstalledPythonVersion(this._oldPythonExecutable) === '3.6.6') {
|
||||
// Remove '0.0.1' from python executable path since the bundle version is removed from the path for ADS-Python 3.8.10+.
|
||||
this._pythonExecutable = path.join(this._pythonInstallationPath, process.platform === constants.winPlatform ? 'python.exe' : 'bin/python3');
|
||||
}
|
||||
await fs.remove(this._oldPythonInstallationPath).catch(err => {
|
||||
throw (err);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!pythonExists || forceInstall || upgradePython) {
|
||||
await this.installPythonPackage(backgroundOperation, this._usingExistingPython, this._pythonInstallationPath, this.outputChannel);
|
||||
// reinstall pip to make sure !pip command works
|
||||
if (!this._usingExistingPython) {
|
||||
// reinstall pip to make sure !pip command works on Windows
|
||||
if (!this._usingExistingPython && process.platform === constants.winPlatform) {
|
||||
let packages: PythonPkgDetails[] = await this.getInstalledPipPackages(this._pythonExecutable);
|
||||
let pip: PythonPkgDetails = packages.find(x => x.name === 'pip');
|
||||
let cmd = `"${this._pythonExecutable}" -m pip install --force-reinstall pip=="${pip.version}"`;
|
||||
@@ -191,14 +234,13 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
let bundleVersion = constants.pythonBundleVersion;
|
||||
let pythonVersion = constants.pythonVersion;
|
||||
let platformId = utils.getOSPlatformId();
|
||||
let packageName: string;
|
||||
let pythonDownloadUrl: string;
|
||||
|
||||
let extension = process.platform === constants.winPlatform ? 'zip' : 'tar.gz';
|
||||
packageName = `python-${pythonVersion}-${platformId}-${bundleVersion}.${extension}`;
|
||||
packageName = `python-${pythonVersion}-${platformId}.${extension}`;
|
||||
|
||||
switch (process.platform) {
|
||||
case constants.winPlatform:
|
||||
@@ -257,16 +299,6 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
.on('close', async () => {
|
||||
//unpack python zip/tar file
|
||||
outputChannel.appendLine(msgPythonUnpackPending);
|
||||
let pythonSourcePath = path.join(installPath, constants.pythonBundleVersion);
|
||||
if (await utils.exists(pythonSourcePath)) {
|
||||
try {
|
||||
// eslint-disable-next-line no-sync
|
||||
fs.removeSync(pythonSourcePath);
|
||||
} catch (err) {
|
||||
backgroundOperation.updateStatus(azdata.TaskStatus.InProgress, msgPythonUnpackError);
|
||||
return reject(err);
|
||||
}
|
||||
}
|
||||
if (process.platform === constants.winPlatform) {
|
||||
try {
|
||||
let zippedFile = new zip(pythonPackagePathLocal);
|
||||
@@ -319,16 +351,11 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
delete process.env['PYTHONSTARTUP'];
|
||||
delete process.env['PYTHONHOME'];
|
||||
|
||||
//Python source path up to bundle version
|
||||
let pythonSourcePath = this._usingExistingPython
|
||||
? this._pythonInstallationPath
|
||||
: path.join(this._pythonInstallationPath, constants.pythonBundleVersion);
|
||||
|
||||
// Update python paths and properties to reference user's local python.
|
||||
let pythonBinPathSuffix = process.platform === constants.winPlatform ? '' : 'bin';
|
||||
|
||||
this._pythonExecutable = JupyterServerInstallation.getPythonExePath(this._pythonInstallationPath, this._usingExistingPython);
|
||||
this.pythonBinPath = path.join(pythonSourcePath, pythonBinPathSuffix);
|
||||
this._pythonExecutable = JupyterServerInstallation.getPythonExePath(this._pythonInstallationPath);
|
||||
this.pythonBinPath = path.join(this._pythonInstallationPath, pythonBinPathSuffix);
|
||||
|
||||
this._usingConda = this.isCondaInstalled();
|
||||
|
||||
@@ -338,15 +365,15 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
let delimiter = path.delimiter;
|
||||
this.pythonEnvVarPath = this.pythonBinPath + delimiter + this.pythonEnvVarPath;
|
||||
if (process.platform === constants.winPlatform) {
|
||||
let pythonScriptsPath = path.join(pythonSourcePath, 'Scripts');
|
||||
let pythonScriptsPath = path.join(this._pythonInstallationPath, 'Scripts');
|
||||
this.pythonEnvVarPath = pythonScriptsPath + delimiter + this.pythonEnvVarPath;
|
||||
|
||||
if (this._usingConda) {
|
||||
this.pythonEnvVarPath = [
|
||||
path.join(pythonSourcePath, 'Library', 'mingw-w64', 'bin'),
|
||||
path.join(pythonSourcePath, 'Library', 'usr', 'bin'),
|
||||
path.join(pythonSourcePath, 'Library', 'bin'),
|
||||
path.join(pythonSourcePath, 'condabin'),
|
||||
path.join(this._pythonInstallationPath, 'Library', 'mingw-w64', 'bin'),
|
||||
path.join(this._pythonInstallationPath, 'Library', 'usr', 'bin'),
|
||||
path.join(this._pythonInstallationPath, 'Library', 'bin'),
|
||||
path.join(this._pythonInstallationPath, 'condabin'),
|
||||
this.pythonEnvVarPath
|
||||
].join(delimiter);
|
||||
}
|
||||
@@ -405,7 +432,7 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
// This step is skipped when using an existing installation or when upgrading
|
||||
// packages, since those cases wouldn't overwrite the installation.
|
||||
if (!installSettings.existingPython && !installSettings.packageUpgradeOnly) {
|
||||
let pythonExePath = JupyterServerInstallation.getPythonExePath(installSettings.installPath, false);
|
||||
let pythonExePath = JupyterServerInstallation.getPythonExePath(installSettings.installPath);
|
||||
let isPythonRunning = await this.isPythonRunning(pythonExePath);
|
||||
if (isPythonRunning) {
|
||||
return Promise.reject(msgPythonRunningError);
|
||||
@@ -438,7 +465,21 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
|
||||
this._installCompletion.resolve();
|
||||
this._installInProgress = false;
|
||||
await vscode.commands.executeCommand('notebook.action.restartJupyterNotebookSessions');
|
||||
if (this._upgradeInProcess) {
|
||||
// Pass in false for restartJupyterServer parameter since the jupyter server has already been shutdown
|
||||
// when removing the old Python version on Windows.
|
||||
if (process.platform === constants.winPlatform) {
|
||||
await vscode.commands.executeCommand('notebook.action.restartJupyterNotebookSessions', false);
|
||||
} else {
|
||||
await vscode.commands.executeCommand('notebook.action.restartJupyterNotebookSessions');
|
||||
}
|
||||
if (this._oldUserInstalledPipPackages.length !== 0) {
|
||||
await this.createInstallPipPackagesHelpNotebook(this._oldUserInstalledPipPackages);
|
||||
}
|
||||
this._upgradeInProcess = false;
|
||||
} else {
|
||||
await vscode.commands.executeCommand('notebook.action.restartJupyterNotebookSessions');
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
let errorMsg = msgDependenciesInstallationFailed(utils.getErrorMessage(err));
|
||||
@@ -464,6 +505,12 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
}
|
||||
|
||||
let isPythonInstalled = JupyterServerInstallation.isPythonInstalled();
|
||||
|
||||
// If the latest version of ADS-Python is not installed, then prompt the user to upgrade
|
||||
if (isPythonInstalled && !this._usingExistingPython && utils.compareVersions(await this.getInstalledPythonVersion(this._pythonExecutable), constants.pythonVersion) < 0) {
|
||||
this.promptUserForPythonUpgrade();
|
||||
}
|
||||
|
||||
let areRequiredPackagesInstalled = await this.areRequiredPackagesInstalled(kernelDisplayName);
|
||||
if (!isPythonInstalled || !areRequiredPackagesInstalled) {
|
||||
let pythonWizard = new ConfigurePythonWizard(this);
|
||||
@@ -474,6 +521,22 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
}
|
||||
}
|
||||
|
||||
private async promptUserForPythonUpgrade(): Promise<void> {
|
||||
let notebookConfig: vscode.WorkspaceConfiguration = vscode.workspace.getConfiguration(constants.notebookConfigKey);
|
||||
if (notebookConfig && notebookConfig[constants.dontPromptPythonUpdate]) {
|
||||
return;
|
||||
}
|
||||
|
||||
let response = await vscode.window.showInformationMessage(msgPythonVersionUpdatePrompt(constants.pythonVersion), yes, no, dontAskAgain);
|
||||
if (response === yes) {
|
||||
this._oldPythonInstallationPath = path.join(this._pythonInstallationPath);
|
||||
this._oldPythonExecutable = this._pythonExecutable;
|
||||
vscode.commands.executeCommand(constants.jupyterConfigurePython);
|
||||
} else if (response === dontAskAgain) {
|
||||
await notebookConfig.update(constants.dontPromptPythonUpdate, true, vscode.ConfigurationTarget.Global);
|
||||
}
|
||||
}
|
||||
|
||||
private async areRequiredPackagesInstalled(kernelDisplayName: string): Promise<boolean> {
|
||||
if (this._kernelSetupCache.get(kernelDisplayName)) {
|
||||
return true;
|
||||
@@ -487,7 +550,7 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
let requiredPackages = this.getRequiredPackagesForKernel(kernelDisplayName);
|
||||
for (let pkg of requiredPackages) {
|
||||
let installedVersion = installedPackageMap.get(pkg.name);
|
||||
if (!installedVersion || utils.comparePackageVersions(installedVersion, pkg.version) < 0) {
|
||||
if (!installedVersion || utils.compareVersions(installedVersion, pkg.version) < 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -508,7 +571,7 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
|
||||
packages.forEach(pkg => {
|
||||
let installedPkgVersion = pipVersionMap.get(pkg.name);
|
||||
if (!installedPkgVersion || utils.comparePackageVersions(installedPkgVersion, pkg.version) < 0) {
|
||||
if (!installedPkgVersion || utils.compareVersions(installedPkgVersion, pkg.version) < 0) {
|
||||
packagesToInstall.push(pkg);
|
||||
}
|
||||
});
|
||||
@@ -520,7 +583,7 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
}
|
||||
}
|
||||
|
||||
public async getInstalledPipPackages(pythonExePath?: string): Promise<PythonPkgDetails[]> {
|
||||
public async getInstalledPipPackages(pythonExePath?: string, checkUserPackages: boolean = false): Promise<PythonPkgDetails[]> {
|
||||
try {
|
||||
if (pythonExePath) {
|
||||
if (!fs.existsSync(pythonExePath)) {
|
||||
@@ -531,6 +594,9 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
}
|
||||
|
||||
let cmd = `"${pythonExePath ?? this.pythonExecutable}" -m pip list --format=json`;
|
||||
if (checkUserPackages) {
|
||||
cmd = cmd.concat(' --user');
|
||||
}
|
||||
let packagesInfo = await this.executeBufferedCommand(cmd);
|
||||
let packages: PythonPkgDetails[] = [];
|
||||
if (packagesInfo) {
|
||||
@@ -676,8 +742,7 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
return false;
|
||||
}
|
||||
|
||||
let useExistingInstall = JupyterServerInstallation.getExistingPythonSetting();
|
||||
let pythonExe = JupyterServerInstallation.getPythonExePath(pathSetting, useExistingInstall);
|
||||
let pythonExe = JupyterServerInstallation.getPythonExePath(pathSetting);
|
||||
// eslint-disable-next-line no-sync
|
||||
return fs.existsSync(pythonExe);
|
||||
}
|
||||
@@ -713,11 +778,25 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
return path;
|
||||
}
|
||||
|
||||
public static getPythonExePath(pythonInstallPath: string, useExistingInstall: boolean): string {
|
||||
return path.join(
|
||||
public static getPythonExePath(pythonInstallPath: string): string {
|
||||
// The bundle version (0.0.1) is removed from the path for ADS-Python 3.8.10+.
|
||||
// Only ADS-Python 3.6.6 contains the bundle version in the path.
|
||||
let oldPythonPath = path.join(
|
||||
pythonInstallPath,
|
||||
useExistingInstall ? '' : constants.pythonBundleVersion,
|
||||
'0.0.1',
|
||||
process.platform === constants.winPlatform ? 'python.exe' : 'bin/python3');
|
||||
let newPythonPath = path.join(
|
||||
pythonInstallPath,
|
||||
process.platform === constants.winPlatform ? 'python.exe' : 'bin/python3');
|
||||
|
||||
// Note: If Python exists in both paths (which can happen if the user chose not to remove Python 3.6 when upgrading),
|
||||
// then we want to default to using the newer Python version.
|
||||
if (!fs.existsSync(newPythonPath) && !fs.existsSync(oldPythonPath) || fs.existsSync(newPythonPath)) {
|
||||
return newPythonPath;
|
||||
}
|
||||
// If Python only exists in the old path then return the old path.
|
||||
// This is for users who are still using Python 3.6
|
||||
return oldPythonPath;
|
||||
}
|
||||
|
||||
private async getPythonUserDir(pythonExecutable: string): Promise<string> {
|
||||
@@ -780,6 +859,39 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
kernelSpec.argv = kernelSpec.argv?.map(arg => arg.replace('{ADS_PYTHONDIR}', this._pythonInstallationPath));
|
||||
await fs.writeFile(kernelPath, JSON.stringify(kernelSpec, undefined, '\t'));
|
||||
}
|
||||
|
||||
private async createInstallPipPackagesHelpNotebook(userInstalledPipPackages: PythonPkgDetails[]): Promise<void> {
|
||||
let packagesList: string[] = userInstalledPipPackages.map(pkg => { return pkg.name; });
|
||||
// Filter out prose-codeaccelerator since we no longer ship it and it is not on Pypi.
|
||||
packagesList = packagesList.filter(pkg => pkg !== 'prose-codeaccelerator');
|
||||
let installPackagesCode = `import sys\n!{sys.executable} -m pip install --user ${packagesList.join(' ')}`;
|
||||
let initialContent: azdata.nb.INotebookContents = {
|
||||
cells: [{
|
||||
cell_type: 'markdown',
|
||||
source: ['# Install Pip Packages\n\nThis notebook will help you reinstall the pip packages you were previously using so that they can be used with Python 3.8.\n\n**Note:** Some packages may have a dependency on Python 3.6 and will not work with Python 3.8.\n\nRun the following code cell after Python 3.8 installation is complete.'],
|
||||
}, {
|
||||
cell_type: 'code',
|
||||
source: [installPackagesCode],
|
||||
}],
|
||||
metadata: {
|
||||
kernelspec: {
|
||||
name: 'python3',
|
||||
language: 'python3',
|
||||
display_name: 'Python 3'
|
||||
},
|
||||
language_info: {
|
||||
name: 'python3'
|
||||
}
|
||||
},
|
||||
nbformat: 4,
|
||||
nbformat_minor: 5
|
||||
};
|
||||
|
||||
await vscode.commands.executeCommand('_notebook.command.new', {
|
||||
initialContent: JSON.stringify(initialContent),
|
||||
defaultKernel: 'Python 3'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export interface PythonPkgDetails {
|
||||
|
||||
@@ -14,6 +14,7 @@ import { IQuestion, QuestionTypes } from '../prompts/question';
|
||||
import CodeAdapter from '../prompts/adapter';
|
||||
import { getErrorMessage, isEditorTitleFree } from '../common/utils';
|
||||
import * as constants from '../common/constants';
|
||||
import { readJson } from 'fs-extra';
|
||||
|
||||
|
||||
export class NotebookUriHandler implements vscode.UriHandler {
|
||||
@@ -78,11 +79,12 @@ export class NotebookUriHandler implements vscode.UriHandler {
|
||||
url = decodeURI(url);
|
||||
let uri = vscode.Uri.parse(url);
|
||||
switch (uri.scheme) {
|
||||
case 'file':
|
||||
case 'http':
|
||||
case 'https':
|
||||
break;
|
||||
default:
|
||||
vscode.window.showErrorMessage(localize('unsupportedScheme', "Cannot open link {0} as only HTTP and HTTPS links are supported", url));
|
||||
vscode.window.showErrorMessage(localize('unsupportedScheme', "Cannot open link {0} as only HTTP, HTTPS, and File links are supported", url));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -94,8 +96,12 @@ export class NotebookUriHandler implements vscode.UriHandler {
|
||||
if (!doOpen) {
|
||||
return;
|
||||
}
|
||||
|
||||
let contents = await this.download(url);
|
||||
let contents: string;
|
||||
if (uri.scheme === 'file') {
|
||||
contents = await readJson(uri.fsPath);
|
||||
} else {
|
||||
contents = await this.download(url);
|
||||
}
|
||||
let untitledUriPath = this.getUntitledUriPath(path.basename(uri.fsPath));
|
||||
let untitledUri = uri.with({ authority: '', scheme: 'untitled', path: untitledUriPath });
|
||||
if (path.extname(uri.fsPath) === '.ipynb') {
|
||||
|
||||
@@ -244,7 +244,7 @@ describe('BooksTreeViewTests', function () {
|
||||
|
||||
it('revealActiveDocumentInViewlet should return correct bookItem for highlight', async () => {
|
||||
let notebook1Path = path.join(rootFolderPath, 'Book', 'content', 'notebook1.ipynb').replace(/\\/g, '/');
|
||||
let currentSelection = await bookTreeViewProvider.findAndExpandParentNode(notebook1Path);
|
||||
let currentSelection = await bookTreeViewProvider.findAndExpandParentNode(notebook1Path, true);
|
||||
should(currentSelection).not.be.undefined();
|
||||
equalBookItems(currentSelection, expectedNotebook1);
|
||||
});
|
||||
@@ -329,7 +329,7 @@ describe('BooksTreeViewTests', function () {
|
||||
|
||||
it('revealActiveDocumentInViewlet should return correct bookItem for highlight', async () => {
|
||||
let notebook1Path = path.join(rootFolderPath, 'Book', 'content', 'notebook1.ipynb').replace(/\\/g, '/');
|
||||
let currentSelection = await providedbookTreeViewProvider.findAndExpandParentNode(notebook1Path);
|
||||
let currentSelection = await providedbookTreeViewProvider.findAndExpandParentNode(notebook1Path, true);
|
||||
should(currentSelection).not.be.undefined();
|
||||
equalBookItems(currentSelection, expectedNotebook1);
|
||||
});
|
||||
|
||||
@@ -49,50 +49,50 @@ describe('Utils Tests', function () {
|
||||
should(utils.getOSPlatformId()).not.throw();
|
||||
});
|
||||
|
||||
describe('comparePackageVersions', () => {
|
||||
describe('compareVersions', () => {
|
||||
const version1 = '1.0.0.0';
|
||||
const version1Revision = '1.0.0.1';
|
||||
const version2 = '2.0.0.0';
|
||||
const shortVersion1 = '1';
|
||||
|
||||
it('same id', () => {
|
||||
should(utils.comparePackageVersions(version1, version1)).equal(0);
|
||||
should(utils.compareVersions(version1, version1)).equal(0);
|
||||
});
|
||||
|
||||
it('first version lower', () => {
|
||||
should(utils.comparePackageVersions(version1, version2)).equal(-1);
|
||||
should(utils.compareVersions(version1, version2)).equal(-1);
|
||||
});
|
||||
|
||||
it('second version lower', () => {
|
||||
should(utils.comparePackageVersions(version2, version1)).equal(1);
|
||||
should(utils.compareVersions(version2, version1)).equal(1);
|
||||
});
|
||||
|
||||
it('short first version is padded correctly', () => {
|
||||
should(utils.comparePackageVersions(shortVersion1, version1)).equal(0);
|
||||
should(utils.compareVersions(shortVersion1, version1)).equal(0);
|
||||
});
|
||||
|
||||
it('short second version is padded correctly when', () => {
|
||||
should(utils.comparePackageVersions(version1, shortVersion1)).equal(0);
|
||||
should(utils.compareVersions(version1, shortVersion1)).equal(0);
|
||||
});
|
||||
|
||||
it('correctly compares version with only minor version difference', () => {
|
||||
should(utils.comparePackageVersions(version1Revision, version1)).equal(1);
|
||||
should(utils.compareVersions(version1Revision, version1)).equal(1);
|
||||
});
|
||||
|
||||
it('equivalent versions with wildcard characters', () => {
|
||||
should(utils.comparePackageVersions('1.*.3', '1.5.3')).equal(0);
|
||||
should(utils.compareVersions('1.*.3', '1.5.3')).equal(0);
|
||||
});
|
||||
|
||||
it('lower version with wildcard characters', () => {
|
||||
should(utils.comparePackageVersions('1.4.*', '1.5.3')).equal(-1);
|
||||
should(utils.compareVersions('1.4.*', '1.5.3')).equal(-1);
|
||||
});
|
||||
|
||||
it('higher version with wildcard characters', () => {
|
||||
should(utils.comparePackageVersions('4.5.6', '3.*')).equal(1);
|
||||
should(utils.compareVersions('4.5.6', '3.*')).equal(1);
|
||||
});
|
||||
|
||||
it('all wildcard strings should be equal', () => {
|
||||
should(utils.comparePackageVersions('*.*', '*.*.*')).equal(0);
|
||||
should(utils.compareVersions('*.*', '*.*.*')).equal(0);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -459,7 +459,7 @@ describe('Utils Tests', function () {
|
||||
|
||||
describe('getPinnedNotebooks', function (): void {
|
||||
it('Should NOT have any pinned notebooks', async function (): Promise<void> {
|
||||
let pinnedNotebooks: utils.IBookNotebook[] = utils.getPinnedNotebooks();
|
||||
let pinnedNotebooks: utils.IPinnedNotebook[] = utils.getPinnedNotebooks();
|
||||
|
||||
should(pinnedNotebooks.length).equal(0, 'Should not have any pinned notebooks');
|
||||
});
|
||||
|
||||
@@ -8,10 +8,18 @@ import * as vscode from 'vscode';
|
||||
import * as sinon from 'sinon';
|
||||
import * as azdata from 'azdata';
|
||||
import * as nock from 'nock';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { promises as fs } from 'fs';
|
||||
import uuid = require('uuid');
|
||||
|
||||
|
||||
import * as loc from '../../common/localizedConstants';
|
||||
import * as constants from '../../common/constants';
|
||||
|
||||
import { NotebookUriHandler } from '../../protocol/notebookUriHandler';
|
||||
import { CellTypes } from '../../contracts/content';
|
||||
import { winPlatform } from '../../common/constants';
|
||||
|
||||
describe('Notebook URI Handler', function (): void {
|
||||
let notebookUriHandler: NotebookUriHandler;
|
||||
@@ -47,18 +55,20 @@ describe('Notebook URI Handler', function (): void {
|
||||
});
|
||||
|
||||
it('should show error message when no query passed into open', async function (): Promise<void> {
|
||||
let showQuickPickStub = sinon.stub(vscode.window, 'showQuickPick').resolves(Promise.resolve(loc.msgYes) as any);
|
||||
await notebookUriHandler.handleUri(vscode.Uri.parse('azuredatastudio://microsoft.notebook/open'));
|
||||
|
||||
sinon.assert.calledOnce(showQuickPickStub);
|
||||
sinon.assert.calledOnce(showErrorMessageSpy);
|
||||
});
|
||||
|
||||
it('should show error message when file uri scheme is not https or http', async function (): Promise<void> {
|
||||
await notebookUriHandler.handleUri(vscode.Uri.parse('azuredatastudio://microsoft.notebook/open?file://hello.ipynb'));
|
||||
it('should show error message when uri scheme is not https, http, or file', async function (): Promise<void> {
|
||||
await notebookUriHandler.handleUri(vscode.Uri.parse('azuredatastudio://microsoft.notebook/open?//url=aaa%3A%2F%2Fhello.ipynb'));
|
||||
sinon.assert.calledOnce(showErrorMessageSpy);
|
||||
});
|
||||
|
||||
it('should show error when file is not found given file uri scheme https', async function (): Promise<void> {
|
||||
let showQuickPickStub = sinon.stub(vscode.window, 'showQuickPick').resolves(Promise.resolve(loc.msgYes) as any);
|
||||
|
||||
await notebookUriHandler.handleUri(notebookUri);
|
||||
|
||||
sinon.assert.calledOnce(showQuickPickStub);
|
||||
@@ -76,7 +86,7 @@ describe('Notebook URI Handler', function (): void {
|
||||
|
||||
});
|
||||
|
||||
it('should open the notebook when file uri is valid', async function (): Promise<void> {
|
||||
it('should open the notebook when https uri file is valid', async function (): Promise<void> {
|
||||
let showQuickPickStub = sinon.stub(vscode.window, 'showQuickPick').resolves(Promise.resolve(loc.msgYes) as any);
|
||||
nock('https://127.0.0.1')
|
||||
.get(`/Hello.ipynb`)
|
||||
@@ -107,9 +117,50 @@ describe('Notebook URI Handler', function (): void {
|
||||
.reply(httpErrorCode);
|
||||
|
||||
await notebookUriHandler.handleUri(notebookUri);
|
||||
|
||||
sinon.assert.callCount(showErrorMessageSpy, 1);
|
||||
sinon.assert.notCalled(showNotebookDocumentStub);
|
||||
});
|
||||
});
|
||||
|
||||
it('should open notebook when file uri is valid', async function (): Promise<void> {
|
||||
let showQuickPickStub = sinon.stub(vscode.window, 'showQuickPick').resolves(Promise.resolve(loc.msgYes) as any);
|
||||
let notebookDir: string = path.join(os.tmpdir(), `notebook_${uuid.v4()}`);
|
||||
let notebookPath: string = path.join(notebookDir, 'hello.ipynb');
|
||||
|
||||
await fs.mkdir(notebookDir);
|
||||
let baseUrl = 'azuredatastudio://microsoft.notebook/open?url=file://';
|
||||
if (process.platform === winPlatform) {
|
||||
// URI paths are formatted as "hostname/path", but since we're using a local path
|
||||
// we omit the host part and just add the slash. Unix paths already start with a
|
||||
// forward slash, but we have to prepend it manually when using Windows paths.
|
||||
baseUrl = baseUrl + '/';
|
||||
}
|
||||
let fileURI = baseUrl + notebookPath;
|
||||
let fileNotebookUri = vscode.Uri.parse(fileURI);
|
||||
let notebookContent: azdata.nb.INotebookContents = {
|
||||
cells: [{
|
||||
cell_type: CellTypes.Code,
|
||||
source: ['x = 1 \ny = 2'],
|
||||
metadata: { language: 'python', tags: ['parameters'] },
|
||||
execution_count: 1
|
||||
}],
|
||||
metadata: {
|
||||
kernelspec: {
|
||||
name: 'python3',
|
||||
language: 'python',
|
||||
display_name: 'Python 3'
|
||||
}
|
||||
},
|
||||
nbformat: 4,
|
||||
nbformat_minor: 5
|
||||
};
|
||||
|
||||
await fs.writeFile(notebookPath, JSON.stringify(notebookContent));
|
||||
|
||||
await notebookUriHandler.handleUri(fileNotebookUri);
|
||||
|
||||
sinon.assert.calledOnce(showQuickPickStub);
|
||||
sinon.assert.calledWith(showNotebookDocumentStub, sinon.match.any, sinon.match({ initialContent: notebookContent }));
|
||||
sinon.assert.callCount(showErrorMessageSpy, 0);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1213,9 +1213,9 @@ lodash.set@^4.3.2:
|
||||
integrity sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=
|
||||
|
||||
lodash@^4.16.4, lodash@^4.17.13, lodash@^4.17.4:
|
||||
version "4.17.19"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
|
||||
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
client/src/**
|
||||
client/tsconfig.json
|
||||
src/**
|
||||
tsconfig.json
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"activationEvents": [
|
||||
"*"
|
||||
],
|
||||
"main": "./client/out/main",
|
||||
"main": "./out/main",
|
||||
"scripts": {
|
||||
"compile": "gulp compile-extension:profiler-client"
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user