mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Remove all Big Data Cluster features (#21369)
This commit is contained in:
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"argv": [
|
||||
"python",
|
||||
"-m",
|
||||
"sparkmagic.kernels.pysparkkernel.pysparkkernel",
|
||||
"-f",
|
||||
"{connection_file}"
|
||||
],
|
||||
"display_name": "PySpark"
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"argv": [
|
||||
"python",
|
||||
"-m",
|
||||
"sparkmagic.kernels.sparkkernel.sparkkernel",
|
||||
"-f",
|
||||
"{connection_file}"
|
||||
],
|
||||
"display_name": "Spark | Scala"
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"argv": [
|
||||
"python",
|
||||
"-m",
|
||||
"sparkmagic.kernels.sparkrkernel.sparkrkernel",
|
||||
"-f",
|
||||
"{connection_file}"
|
||||
],
|
||||
"display_name": "Spark | R"
|
||||
}
|
||||
@@ -94,10 +94,6 @@
|
||||
}
|
||||
},
|
||||
"commands": [
|
||||
{
|
||||
"command": "notebook.command.analyzeNotebook",
|
||||
"title": "%notebook.analyzeJupyterNotebook%"
|
||||
},
|
||||
{
|
||||
"command": "notebook.command.open",
|
||||
"title": "%notebook.command.open%"
|
||||
@@ -128,10 +124,6 @@
|
||||
"title": "%notebook.command.addcell%",
|
||||
"icon": "resources/dark/touchbar_add_cell.png"
|
||||
},
|
||||
{
|
||||
"command": "jupyter.cmd.analyzeNotebook",
|
||||
"title": "%title.analyzeJupyterNotebook%"
|
||||
},
|
||||
{
|
||||
"command": "jupyter.task.newNotebook",
|
||||
"title": "%title.newJupyterNotebook%",
|
||||
@@ -334,10 +326,6 @@
|
||||
],
|
||||
"menus": {
|
||||
"commandPalette": [
|
||||
{
|
||||
"command": "notebook.command.analyzeNotebook",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "notebook.command.open"
|
||||
},
|
||||
@@ -373,10 +361,6 @@
|
||||
"command": "jupyter.cmd.newNotebook",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "jupyter.cmd.analyzeNotebook",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "jupyter.task.openNotebook",
|
||||
"when": "false"
|
||||
@@ -486,18 +470,6 @@
|
||||
"group": "1_notebook@2"
|
||||
}
|
||||
],
|
||||
"objectExplorer/item/context": [
|
||||
{
|
||||
"command": "notebook.command.analyzeNotebook",
|
||||
"when": "nodeType=~/^mssqlCluster/ && nodeLabel=~/[^\\s]+(\\.(csv|tsv|txt))$/ && nodeType == mssqlCluster:file",
|
||||
"group": "1notebook@1"
|
||||
},
|
||||
{
|
||||
"command": "jupyter.cmd.analyzeNotebook",
|
||||
"when": "nodeType=~/^hdfs/ && nodeLabel=~/[^\\s]+(\\.(csv|tsv|txt))$/ && nodeType == hdfs:file",
|
||||
"group": "1notebook@1"
|
||||
}
|
||||
],
|
||||
"view/item/context": [
|
||||
{
|
||||
"command": "notebook.command.trustBook",
|
||||
@@ -670,14 +642,6 @@
|
||||
"sql"
|
||||
]
|
||||
},
|
||||
{
|
||||
"magic": "lang_r",
|
||||
"language": "r",
|
||||
"executionTarget": null,
|
||||
"kernels": [
|
||||
"sql"
|
||||
]
|
||||
},
|
||||
{
|
||||
"magic": "lang_java",
|
||||
"language": "java",
|
||||
@@ -694,39 +658,6 @@
|
||||
".ipynb"
|
||||
],
|
||||
"standardKernels": [
|
||||
{
|
||||
"name": "pysparkkernel",
|
||||
"displayName": "PySpark",
|
||||
"connectionProviderIds": [
|
||||
"MSSQL"
|
||||
],
|
||||
"blockedOnSAW": true,
|
||||
"supportedLanguages": [
|
||||
"python"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "sparkkernel",
|
||||
"displayName": "Spark | Scala",
|
||||
"connectionProviderIds": [
|
||||
"MSSQL"
|
||||
],
|
||||
"supportedLanguages": [
|
||||
"scala"
|
||||
],
|
||||
"blockedOnSAW": true
|
||||
},
|
||||
{
|
||||
"name": "sparkrkernel",
|
||||
"displayName": "Spark | R",
|
||||
"connectionProviderIds": [
|
||||
"MSSQL"
|
||||
],
|
||||
"supportedLanguages": [
|
||||
"r"
|
||||
],
|
||||
"blockedOnSAW": true
|
||||
},
|
||||
{
|
||||
"name": "python3",
|
||||
"displayName": "Python 3",
|
||||
|
||||
@@ -36,7 +36,6 @@ export const jupyterNewNotebookTask = 'jupyter.task.newNotebook';
|
||||
export const jupyterOpenNotebookTask = 'jupyter.task.openNotebook';
|
||||
export const jupyterNewNotebookCommand = 'jupyter.cmd.newNotebook';
|
||||
export const jupyterReinstallDependenciesCommand = 'jupyter.reinstallDependencies';
|
||||
export const jupyterAnalyzeCommand = 'jupyter.cmd.analyzeNotebook';
|
||||
export const jupyterManagePackages = 'jupyter.cmd.managePackages';
|
||||
export const jupyterConfigurePython = 'jupyter.cmd.configurePython';
|
||||
export const localhostName = 'localhost';
|
||||
@@ -45,9 +44,6 @@ export const PackageNotFoundError = localize('managePackages.packageNotFound', "
|
||||
|
||||
export const ipykernelDisplayName = 'Python 3 (ipykernel)';
|
||||
export const python3DisplayName = 'Python 3';
|
||||
export const pysparkDisplayName = 'PySpark';
|
||||
export const sparkScalaDisplayName = 'Spark | Scala';
|
||||
export const sparkRDisplayName = 'Spark | R';
|
||||
export const powershellDisplayName = 'PowerShell';
|
||||
export const allKernelsName = 'All Kernels';
|
||||
|
||||
@@ -84,15 +80,6 @@ export const pythonWindowsInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=
|
||||
export const pythonMacInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2163337';
|
||||
export const pythonLinuxInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2163336';
|
||||
|
||||
export const KNOX_ENDPOINT_SERVER = 'host';
|
||||
export const KNOX_ENDPOINT_PORT = 'knoxport';
|
||||
export const KNOX_ENDPOINT_GATEWAY = 'gateway';
|
||||
export const CONTROLLER_ENDPOINT = 'controller';
|
||||
export const SQL_PROVIDER = 'MSSQL';
|
||||
export const USER = 'user';
|
||||
export const AUTHTYPE = 'authenticationType';
|
||||
export const INTEGRATED_AUTH = 'integrated';
|
||||
|
||||
// The version of the notebook file format that we support
|
||||
export const NBFORMAT = 4;
|
||||
export const NBFORMAT_MINOR = 2;
|
||||
|
||||
@@ -12,8 +12,6 @@ export const msgNo = localize('msgNo', "No");
|
||||
|
||||
// Jupyter Constants ///////////////////////////////////////////////////////
|
||||
export const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', "This sample code loads the file into a data frame and shows the first 10 results.");
|
||||
export const noBDCConnectionError = localize('noBDCConnectionError', "Spark kernels require a connection to a SQL Server Big Data Cluster master instance.");
|
||||
export const providerNotValidError = localize('providerNotValidError', "Non-MSSQL providers are not supported for spark kernels.");
|
||||
|
||||
// Book view-let constants
|
||||
export const allFiles = localize('allFiles', "All Files");
|
||||
|
||||
@@ -4,15 +4,12 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as os from 'os';
|
||||
import * as vscode from 'vscode';
|
||||
import * as nls from 'vscode-nls';
|
||||
import { getErrorMessage } from '../common/utils';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
const JUPYTER_NOTEBOOK_PROVIDER = 'jupyter';
|
||||
const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', "This sample code loads the file into a data frame and shows the first 10 results.");
|
||||
const noNotebookVisible = localize('noNotebookVisible', "No notebook editor is active");
|
||||
|
||||
export class NotebookUtils {
|
||||
@@ -96,31 +93,4 @@ export class NotebookUtils {
|
||||
public async toggleMarkdownStyle(style: string, showUI?: boolean, value?: string): Promise<void> {
|
||||
return vscode.commands.executeCommand(style, showUI, value);
|
||||
}
|
||||
|
||||
public async analyzeNotebook(oeContext?: azdata.ObjectExplorerContext): Promise<void> {
|
||||
let editor = await azdata.nb.showNotebookDocument(vscode.Uri.from({ scheme: 'untitled' }), {
|
||||
connectionProfile: oeContext ? oeContext.connectionProfile : undefined,
|
||||
providerId: JUPYTER_NOTEBOOK_PROVIDER,
|
||||
preview: false,
|
||||
defaultKernel: {
|
||||
name: 'pysparkkernel',
|
||||
display_name: 'PySpark',
|
||||
language: 'python'
|
||||
}
|
||||
});
|
||||
if (oeContext && oeContext.nodeInfo && oeContext.nodeInfo.nodePath) {
|
||||
// Get the file path after '/HDFS'
|
||||
let hdfsPath: string = oeContext.nodeInfo.nodePath.substring(oeContext.nodeInfo.nodePath.indexOf('/HDFS') + '/HDFS'.length);
|
||||
if (hdfsPath.length > 0) {
|
||||
let analyzeCommand = '#' + msgSampleCodeDataFrame + os.EOL + 'df = (spark.read.option("inferSchema", "true")'
|
||||
+ os.EOL + '.option("header", "true")' + os.EOL + '.csv("{0}"))' + os.EOL + 'df.show(10)';
|
||||
await editor.edit(editBuilder => {
|
||||
editBuilder.insertCell({
|
||||
cell_type: 'code',
|
||||
source: analyzeCommand.replace('{0}', hdfsPath)
|
||||
}, 0);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,28 +3,19 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as bdc from 'bdc';
|
||||
import * as childProcess from 'child_process';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as nls from 'vscode-nls';
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import * as crypto from 'crypto';
|
||||
import { notebookConfigKey, pinnedBooksConfigKey, AUTHTYPE, INTEGRATED_AUTH, KNOX_ENDPOINT_PORT, KNOX_ENDPOINT_SERVER } from './constants';
|
||||
import { notebookConfigKey, pinnedBooksConfigKey } from './constants';
|
||||
import { IPrompter, IQuestion, QuestionTypes } from '../prompts/question';
|
||||
import { BookTreeItemFormat } from '../book/bookTreeItem';
|
||||
import * as loc from './localizedConstants';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
export function getKnoxUrl(host: string, port: string): string {
|
||||
return `https://${host}:${port}/gateway`;
|
||||
}
|
||||
|
||||
export function getLivyUrl(serverName: string, port: string): string {
|
||||
return this.getKnoxUrl(serverName, port) + '/default/livy/v1/';
|
||||
}
|
||||
|
||||
export async function ensureDir(dirPath: string, outputChannel?: vscode.OutputChannel): Promise<void> {
|
||||
outputChannel?.appendLine(localize('ensureDirOutputMsg', "... Ensuring {0} exists", dirPath));
|
||||
await fs.ensureDir(dirPath);
|
||||
@@ -109,15 +100,6 @@ export enum Platform {
|
||||
Others
|
||||
}
|
||||
|
||||
interface RawEndpoint {
|
||||
serviceName: string;
|
||||
description?: string;
|
||||
endpoint?: string;
|
||||
protocol?: string;
|
||||
ipAddress?: string;
|
||||
port?: number;
|
||||
}
|
||||
|
||||
export function getOSPlatformId(): string {
|
||||
let platformId = undefined;
|
||||
switch (process.platform) {
|
||||
@@ -268,61 +250,6 @@ export function isPackageSupported(pythonVersion: string, packageVersionConstrai
|
||||
return supportedVersionFound;
|
||||
}
|
||||
|
||||
export function getClusterEndpoints(serverInfo: azdata.ServerInfo): bdc.IEndpointModel[] {
|
||||
let endpoints: RawEndpoint[] = serverInfo.options['clusterEndpoints'];
|
||||
if (!endpoints || endpoints.length === 0) { return []; }
|
||||
|
||||
return endpoints.map(e => {
|
||||
// If endpoint is missing, we're on CTP bits. All endpoints from the CTP serverInfo should be treated as HTTPS
|
||||
let endpoint = e.endpoint ? e.endpoint : `https://${e.ipAddress}:${e.port}`;
|
||||
let updatedEndpoint: bdc.IEndpointModel = {
|
||||
name: e.serviceName,
|
||||
description: e.description,
|
||||
endpoint: endpoint,
|
||||
protocol: e.protocol
|
||||
};
|
||||
return updatedEndpoint;
|
||||
});
|
||||
}
|
||||
|
||||
export type HostAndIp = { host: string, port: string };
|
||||
|
||||
export function getHostAndPortFromEndpoint(endpoint: string): HostAndIp {
|
||||
let authority = vscode.Uri.parse(endpoint).authority;
|
||||
let hostAndPortRegex = /^(.*)([,:](\d+))/g;
|
||||
let match = hostAndPortRegex.exec(authority);
|
||||
if (match) {
|
||||
return {
|
||||
host: match[1],
|
||||
port: match[3]
|
||||
};
|
||||
}
|
||||
return {
|
||||
host: authority,
|
||||
port: undefined
|
||||
};
|
||||
}
|
||||
|
||||
export function isIntegratedAuth(connection: azdata.IConnectionProfile): boolean {
|
||||
return connection.options[AUTHTYPE] && connection.options[AUTHTYPE].toLowerCase() === INTEGRATED_AUTH.toLowerCase();
|
||||
}
|
||||
|
||||
export function isSparkKernel(kernelName: string): boolean {
|
||||
return kernelName && kernelName.toLowerCase().indexOf('spark') > -1;
|
||||
}
|
||||
|
||||
export function setHostAndPort(delimeter: string, connection: azdata.IConnectionProfile): void {
|
||||
let originalHost = connection.options[KNOX_ENDPOINT_SERVER];
|
||||
if (!originalHost) {
|
||||
return;
|
||||
}
|
||||
let index = originalHost.indexOf(delimeter);
|
||||
if (index > -1) {
|
||||
connection.options[KNOX_ENDPOINT_SERVER] = originalHost.slice(0, index);
|
||||
connection.options[KNOX_ENDPOINT_PORT] = originalHost.slice(index + 1);
|
||||
}
|
||||
}
|
||||
|
||||
export async function exists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(path);
|
||||
@@ -332,22 +259,6 @@ export async function exists(path: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
const bdcConfigSectionName = 'bigDataCluster';
|
||||
const ignoreSslConfigName = 'ignoreSslVerification';
|
||||
|
||||
/**
|
||||
* Retrieves the current setting for whether to ignore SSL verification errors
|
||||
*/
|
||||
export function getIgnoreSslVerificationConfigSetting(): boolean {
|
||||
try {
|
||||
const config = vscode.workspace.getConfiguration(bdcConfigSectionName);
|
||||
return config.get<boolean>(ignoreSslConfigName, true);
|
||||
} catch (error) {
|
||||
console.error('Unexpected error retrieving ${bdcConfigSectionName}.${ignoreSslConfigName} setting : ', error);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function debounce(delay: number): Function {
|
||||
return decorate((fn, key) => {
|
||||
const timerKey = `$debounce$${key}`;
|
||||
|
||||
@@ -7,7 +7,7 @@ import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
import { BasePage } from './basePage';
|
||||
import { JupyterServerInstallation } from '../../jupyter/jupyterServerInstallation';
|
||||
import { python3DisplayName, pysparkDisplayName, sparkScalaDisplayName, sparkRDisplayName, powershellDisplayName, allKernelsName } from '../../common/constants';
|
||||
import { python3DisplayName, powershellDisplayName, allKernelsName } from '../../common/constants';
|
||||
import { getDropdownValue } from '../../common/utils';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
@@ -39,7 +39,7 @@ export class PickPackagesPage extends BasePage {
|
||||
value: this.model.kernelName
|
||||
}).component();
|
||||
} else {
|
||||
let dropdownValues = [python3DisplayName, pysparkDisplayName, sparkScalaDisplayName, sparkRDisplayName, powershellDisplayName, allKernelsName];
|
||||
let dropdownValues = [python3DisplayName, powershellDisplayName, allKernelsName];
|
||||
this.kernelDropdown = this.view.modelBuilder.dropDown().withProps({
|
||||
value: dropdownValues[0],
|
||||
values: dropdownValues,
|
||||
|
||||
@@ -128,9 +128,6 @@ export async function activate(extensionContext: vscode.ExtensionContext): Promi
|
||||
extensionContext.subscriptions.push(vscode.commands.registerCommand('notebook.command.addtext', async () => {
|
||||
await appContext.notebookUtils.addCell('markdown');
|
||||
}));
|
||||
extensionContext.subscriptions.push(vscode.commands.registerCommand('notebook.command.analyzeNotebook', async (explorerContext: azdata.ObjectExplorerContext) => {
|
||||
await appContext.notebookUtils.analyzeNotebook(explorerContext);
|
||||
}));
|
||||
extensionContext.subscriptions.push(vscode.window.registerUriHandler(new NotebookUriHandler()));
|
||||
|
||||
extensionContext.subscriptions.push(vscode.commands.registerCommand('books.command.openLocalizedBooks', async () => {
|
||||
|
||||
@@ -58,7 +58,7 @@ describe('Notebook Extension Python Installation', function () {
|
||||
console.log('Uninstalling existing pip dependencies');
|
||||
let install = jupyterController.jupyterInstallation;
|
||||
let pythonExe = JupyterServerInstallation.getPythonExePath(pythonInstallDir);
|
||||
let command = `"${pythonExe}" -m pip uninstall -y jupyter pandas sparkmagic`;
|
||||
let command = `"${pythonExe}" -m pip uninstall -y jupyter`;
|
||||
await executeStreamedCommand(command, { env: install.execOptions.env }, install.outputChannel);
|
||||
console.log('Uninstalling existing pip dependencies is done');
|
||||
|
||||
@@ -84,15 +84,15 @@ describe('Notebook Extension Python Installation', function () {
|
||||
let testPkgVersion = '0.24.2';
|
||||
let expectedPkg: PythonPkgDetails = { name: testPkg, version: testPkgVersion };
|
||||
|
||||
await install.installPipPackages([{ name: testPkg, version: testPkgVersion}], false);
|
||||
await install.installPipPackages([{ name: testPkg, version: testPkgVersion }], false);
|
||||
let packages = await install.getInstalledPipPackages();
|
||||
should(packages).containEql(expectedPkg);
|
||||
|
||||
await install.uninstallPipPackages([{ name: testPkg, version: testPkgVersion}]);
|
||||
await install.uninstallPipPackages([{ name: testPkg, version: testPkgVersion }]);
|
||||
packages = await install.getInstalledPipPackages();
|
||||
should(packages).not.containEql(expectedPkg);
|
||||
|
||||
await install.installPipPackages([{ name: testPkg, version: testPkgVersion}], false);
|
||||
await install.installPipPackages([{ name: testPkg, version: testPkgVersion }], false);
|
||||
packages = await install.getInstalledPipPackages();
|
||||
should(packages).containEql(expectedPkg);
|
||||
});
|
||||
|
||||
@@ -6,12 +6,10 @@
|
||||
import * as path from 'path';
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import * as os from 'os';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import * as constants from '../common/constants';
|
||||
import * as localizedConstants from '../common/localizedConstants';
|
||||
import { JupyterServerInstallation } from './jupyterServerInstallation';
|
||||
import * as utils from '../common/utils';
|
||||
import { IPrompter, IQuestion, QuestionTypes } from '../prompts/question';
|
||||
@@ -66,9 +64,6 @@ export class JupyterController {
|
||||
vscode.commands.registerCommand(constants.jupyterNewNotebookCommand, (explorerContext: azdata.ObjectExplorerContext) => {
|
||||
return this.saveProfileAndCreateNotebook(explorerContext ? explorerContext.connectionProfile : undefined);
|
||||
});
|
||||
vscode.commands.registerCommand(constants.jupyterAnalyzeCommand, (explorerContext: azdata.ObjectExplorerContext) => {
|
||||
return this.saveProfileAndAnalyzeNotebook(explorerContext);
|
||||
});
|
||||
|
||||
vscode.commands.registerCommand(constants.jupyterReinstallDependenciesCommand, () => { return this.handleDependenciesReinstallation(); });
|
||||
vscode.commands.registerCommand(constants.jupyterManagePackages, async (args) => { return this.doManagePackages(args); });
|
||||
@@ -93,11 +88,7 @@ export class JupyterController {
|
||||
}
|
||||
|
||||
private saveProfileAndCreateNotebook(profile: azdata.IConnectionProfile): Promise<void> {
|
||||
return this.handleNewNotebookTask(undefined, profile);
|
||||
}
|
||||
|
||||
private saveProfileAndAnalyzeNotebook(oeContext: azdata.ObjectExplorerContext): Promise<void> {
|
||||
return this.handleNewNotebookTask(oeContext, oeContext.connectionProfile);
|
||||
return this.handleNewNotebookTask(profile);
|
||||
}
|
||||
|
||||
// EVENT HANDLERS //////////////////////////////////////////////////////
|
||||
@@ -130,34 +121,17 @@ export class JupyterController {
|
||||
}
|
||||
}
|
||||
|
||||
private async handleNewNotebookTask(oeContext?: azdata.ObjectExplorerContext, profile?: azdata.IConnectionProfile): Promise<void> {
|
||||
let editor = await azdata.nb.showNotebookDocument(vscode.Uri.from({ scheme: 'untitled' }), {
|
||||
private async handleNewNotebookTask(profile?: azdata.IConnectionProfile): Promise<void> {
|
||||
await azdata.nb.showNotebookDocument(vscode.Uri.from({ scheme: 'untitled' }), {
|
||||
connectionProfile: profile,
|
||||
providerId: constants.jupyterNotebookProviderId,
|
||||
preview: false,
|
||||
defaultKernel: {
|
||||
name: 'pysparkkernel',
|
||||
display_name: 'PySpark',
|
||||
name: 'python3',
|
||||
display_name: 'Python 3',
|
||||
language: 'python'
|
||||
}
|
||||
});
|
||||
if (oeContext && oeContext.nodeInfo && oeContext.nodeInfo.nodePath) {
|
||||
// Get the file path after '/HDFS'
|
||||
let hdfsPath: string = oeContext.nodeInfo.nodePath.substring(oeContext.nodeInfo.nodePath.indexOf('/HDFS') + '/HDFS'.length);
|
||||
if (hdfsPath.length > 0) {
|
||||
let analyzeCommand = '#' + localizedConstants.msgSampleCodeDataFrame + os.EOL + 'df = (spark.read.option(\"inferSchema\", \"true\")'
|
||||
+ os.EOL + '.option(\"header\", \"true\")' + os.EOL + '.csv(\'{0}\'))' + os.EOL + 'df.show(10)';
|
||||
// TODO re-enable insert into document once APIs are finalized.
|
||||
// editor.document.cells[0].source = [analyzeCommand.replace('{0}', hdfsPath)];
|
||||
await editor.edit(editBuilder => {
|
||||
editBuilder.replace(0, {
|
||||
cell_type: 'code',
|
||||
source: analyzeCommand.replace('{0}', hdfsPath)
|
||||
});
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async handleDependenciesReinstallation(): Promise<void> {
|
||||
|
||||
@@ -60,12 +60,6 @@ export class JupyterKernel implements nb.IKernel {
|
||||
return true;
|
||||
}
|
||||
|
||||
public get requiresConnection(): boolean {
|
||||
// TODO would be good to have a smarter way to do this.
|
||||
// for now only Spark kernels need a connection
|
||||
return !!(this.kernelImpl.name && this.kernelImpl.name.toLowerCase().indexOf('spark') > -1);
|
||||
}
|
||||
|
||||
public get isReady(): boolean {
|
||||
return this.kernelImpl.isReady;
|
||||
}
|
||||
|
||||
@@ -88,22 +88,6 @@ export const requiredPowershellPkg: PythonPkgDetails = {
|
||||
version: '0.1.4'
|
||||
};
|
||||
|
||||
export const requiredSparkPackages: PythonPkgDetails[] = [
|
||||
requiredJupyterPkg,
|
||||
{
|
||||
name: 'cryptography',
|
||||
version: '3.2.1',
|
||||
installExactVersion: true
|
||||
},
|
||||
{
|
||||
name: 'sparkmagic',
|
||||
version: '0.12.9'
|
||||
}, {
|
||||
name: 'pandas',
|
||||
version: '0.24.2'
|
||||
}
|
||||
];
|
||||
|
||||
export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
public extensionPath: string;
|
||||
public pythonBinPath: string;
|
||||
@@ -162,11 +146,8 @@ export class JupyterServerInstallation implements IJupyterServerInstallation {
|
||||
this._requiredKernelPackages.set(constants.ipykernelDisplayName, [requiredJupyterPkg]);
|
||||
this._requiredKernelPackages.set(constants.python3DisplayName, [requiredJupyterPkg]);
|
||||
this._requiredKernelPackages.set(constants.powershellDisplayName, [requiredJupyterPkg, requiredPowershellPkg]);
|
||||
this._requiredKernelPackages.set(constants.pysparkDisplayName, requiredSparkPackages);
|
||||
this._requiredKernelPackages.set(constants.sparkScalaDisplayName, requiredSparkPackages);
|
||||
this._requiredKernelPackages.set(constants.sparkRDisplayName, requiredSparkPackages);
|
||||
|
||||
let allPackages = requiredSparkPackages.concat(requiredPowershellPkg);
|
||||
let allPackages = [requiredJupyterPkg, requiredPowershellPkg];
|
||||
this._requiredKernelPackages.set(constants.allKernelsName, allPackages);
|
||||
|
||||
this._requiredPackagesSet = new Set<string>();
|
||||
|
||||
@@ -3,61 +3,16 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { nb, ServerInfo, connection, IConnectionProfile, credentials } from 'azdata';
|
||||
import { nb, IConnectionProfile } from 'azdata';
|
||||
import { Session, Kernel } from '@jupyterlab/services';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as nls from 'vscode-nls';
|
||||
import * as vscode from 'vscode';
|
||||
import * as path from 'path';
|
||||
import * as utils from '../common/utils';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import { JupyterKernel } from './jupyterKernel';
|
||||
import { Deferred } from '../common/promise';
|
||||
import { JupyterServerInstallation } from './jupyterServerInstallation';
|
||||
import * as bdc from 'bdc';
|
||||
import { noBDCConnectionError, providerNotValidError } from '../common/localizedConstants';
|
||||
import { SQL_PROVIDER, CONTROLLER_ENDPOINT, KNOX_ENDPOINT_GATEWAY, KNOX_ENDPOINT_SERVER, KNOX_ENDPOINT_PORT } from '../common/constants';
|
||||
import CodeAdapter from '../prompts/adapter';
|
||||
import { IQuestion, QuestionTypes } from '../prompts/question';
|
||||
import { ExtensionContextHelper } from '../common/extensionContextHelper';
|
||||
import Logger from '../common/logger';
|
||||
|
||||
const configBase = {
|
||||
'kernel_python_credentials': {
|
||||
'url': ''
|
||||
},
|
||||
'kernel_scala_credentials': {
|
||||
'url': ''
|
||||
},
|
||||
'kernel_r_credentials': {
|
||||
'url': ''
|
||||
},
|
||||
'livy_session_startup_timeout_seconds': 100,
|
||||
'logging_config': {
|
||||
'version': 1,
|
||||
'formatters': {
|
||||
'magicsFormatter': {
|
||||
'format': '%(asctime)s\t%(levelname)s\t%(message)s',
|
||||
'datefmt': ''
|
||||
}
|
||||
},
|
||||
'handlers': {
|
||||
'magicsHandler': {
|
||||
'class': 'hdijupyterutils.filehandler.MagicsFileHandler',
|
||||
'formatter': 'magicsFormatter',
|
||||
'home_path': ''
|
||||
}
|
||||
},
|
||||
'loggers': {
|
||||
'magicsLogger': {
|
||||
'handlers': ['magicsHandler'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': 0
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export class JupyterSessionManager implements nb.SessionManager, vscode.Disposable {
|
||||
private _ready: Deferred<void>;
|
||||
@@ -114,10 +69,6 @@ export class JupyterSessionManager implements nb.SessionManager, vscode.Disposab
|
||||
return kernel;
|
||||
});
|
||||
|
||||
// For now, need to remove PySpark3, as it's been deprecated
|
||||
// May want to have a formalized deprecated kernels mechanism in the future
|
||||
kernels = kernels.filter(k => k.name !== 'pyspark3kernel');
|
||||
|
||||
let allKernels: nb.IAllKernels = {
|
||||
defaultKernel: specs.default,
|
||||
kernels: kernels
|
||||
@@ -263,117 +214,12 @@ export class JupyterSession implements nb.ISession {
|
||||
});
|
||||
}
|
||||
|
||||
public async configureKernel(): Promise<void> {
|
||||
let sparkmagicConfDir = path.join(utils.getUserHome(), '.sparkmagic');
|
||||
await utils.ensureDir(sparkmagicConfDir);
|
||||
|
||||
// Default to localhost in config file.
|
||||
let creds: ICredentials = {
|
||||
'url': 'http://localhost:8088'
|
||||
};
|
||||
|
||||
let config: ISparkMagicConfig = Object.assign({}, configBase);
|
||||
this.updateConfig(config, creds, sparkmagicConfDir);
|
||||
|
||||
let configFilePath = path.join(sparkmagicConfDir, 'config.json');
|
||||
await fs.writeFile(configFilePath, JSON.stringify(config));
|
||||
configureKernel(kernelInfo: nb.IKernelSpec): Thenable<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
public async configureConnection(connectionProfile: IConnectionProfile): Promise<void> {
|
||||
if (connectionProfile && connectionProfile.providerName && utils.isSparkKernel(this.sessionImpl.kernel.name)) {
|
||||
Logger.log(`Configuring Spark connection`);
|
||||
// %_do_not_call_change_endpoint is a SparkMagic command that lets users change endpoint options,
|
||||
// such as user/profile/host name/auth type
|
||||
|
||||
let knoxUsername = connectionProfile.userName || 'root';
|
||||
let knoxPassword: string = '';
|
||||
|
||||
//Update server info with bigdata endpoint - Unified Connection
|
||||
if (connectionProfile.providerName === SQL_PROVIDER) {
|
||||
const serverInfo: ServerInfo = await connection.getServerInfo(connectionProfile.id);
|
||||
if (!serverInfo?.options['isBigDataCluster']) {
|
||||
throw new Error(noBDCConnectionError);
|
||||
}
|
||||
const endpoints = utils.getClusterEndpoints(serverInfo);
|
||||
const controllerEndpoint = endpoints.find(ep => ep.name.toLowerCase() === CONTROLLER_ENDPOINT);
|
||||
|
||||
Logger.log(`Found controller endpoint ${controllerEndpoint.endpoint}`);
|
||||
// root is the default username for pre-CU5 instances, so while we prefer to use the connection username
|
||||
// as a default now we'll still fall back to root if it's empty for some reason. (but the calls below should
|
||||
// get the actual correct value regardless)
|
||||
let clusterController: bdc.IClusterController | undefined = undefined;
|
||||
if (!utils.isIntegratedAuth(connectionProfile)) {
|
||||
// See if the controller creds have been saved already, otherwise fall back to using
|
||||
// SQL creds as a default
|
||||
const credentialProvider = await credentials.getProvider('notebook.bdc.password');
|
||||
const usernameKey = `notebook.bdc.username::${connectionProfile.id}`;
|
||||
const savedUsername = ExtensionContextHelper.extensionContext.globalState.get<string>(usernameKey) || connectionProfile.userName;
|
||||
const connectionCreds = await connection.getCredentials(connectionProfile.id);
|
||||
const savedPassword = (await credentialProvider.readCredential(connectionProfile.id)).password || connectionCreds.password;
|
||||
clusterController = await getClusterController(controllerEndpoint.endpoint, 'basic', savedUsername, savedPassword);
|
||||
// Now that we know that the username/password are valid store them for use later on with the same connection
|
||||
await credentialProvider.saveCredential(connectionProfile.id, clusterController.password);
|
||||
await ExtensionContextHelper.extensionContext.globalState.update(usernameKey, clusterController.username);
|
||||
knoxPassword = clusterController.password;
|
||||
try {
|
||||
knoxUsername = await clusterController.getKnoxUsername(clusterController.username);
|
||||
} catch (err) {
|
||||
knoxUsername = clusterController.username;
|
||||
console.log(`Unexpected error getting Knox username for Spark kernel: ${err}`);
|
||||
}
|
||||
} else {
|
||||
clusterController = await getClusterController(controllerEndpoint.endpoint, 'integrated');
|
||||
|
||||
}
|
||||
|
||||
let gatewayEndpoint: bdc.IEndpointModel = endpoints?.find(ep => ep.name.toLowerCase() === KNOX_ENDPOINT_GATEWAY);
|
||||
if (!gatewayEndpoint) {
|
||||
Logger.log(`Querying controller for knox gateway endpoint`);
|
||||
// User doesn't have permission to see the gateway endpoint from the DMV so we need to query the controller instead
|
||||
const allEndpoints = (await clusterController.getEndPoints()).endPoints;
|
||||
gatewayEndpoint = allEndpoints?.find(ep => ep.name.toLowerCase() === KNOX_ENDPOINT_GATEWAY);
|
||||
if (!gatewayEndpoint) {
|
||||
throw new Error(localize('notebook.couldNotFindKnoxGateway', "Could not find Knox gateway endpoint"));
|
||||
}
|
||||
}
|
||||
Logger.log(`Got Knox gateway ${gatewayEndpoint.endpoint}`);
|
||||
let gatewayHostAndPort = utils.getHostAndPortFromEndpoint(gatewayEndpoint.endpoint);
|
||||
Logger.log(`Parsed knox host and port ${JSON.stringify(gatewayHostAndPort)}`);
|
||||
connectionProfile.options[KNOX_ENDPOINT_SERVER] = gatewayHostAndPort.host;
|
||||
connectionProfile.options[KNOX_ENDPOINT_PORT] = gatewayHostAndPort.port;
|
||||
|
||||
}
|
||||
else {
|
||||
throw new Error(providerNotValidError);
|
||||
}
|
||||
utils.setHostAndPort(':', connectionProfile);
|
||||
utils.setHostAndPort(',', connectionProfile);
|
||||
|
||||
let server = vscode.Uri.parse(utils.getLivyUrl(connectionProfile.options[KNOX_ENDPOINT_SERVER], connectionProfile.options[KNOX_ENDPOINT_PORT])).toString();
|
||||
let doNotCallChangeEndpointParams: string;
|
||||
let doNotCallChangeEndpointLogMessage: string;
|
||||
if (utils.isIntegratedAuth(connectionProfile)) {
|
||||
doNotCallChangeEndpointParams = `%_do_not_call_change_endpoint --server=${server} --auth=Kerberos`;
|
||||
doNotCallChangeEndpointLogMessage = doNotCallChangeEndpointParams;
|
||||
} else {
|
||||
doNotCallChangeEndpointParams = `%_do_not_call_change_endpoint --username=${knoxUsername} --server=${server} --auth=Basic_Access`;
|
||||
doNotCallChangeEndpointLogMessage = doNotCallChangeEndpointParams + ` --password=${'*'.repeat(knoxPassword.length)}`;
|
||||
doNotCallChangeEndpointParams += ` --password=${knoxPassword}`;
|
||||
}
|
||||
Logger.log(`Change endpoint command '${doNotCallChangeEndpointLogMessage}'`);
|
||||
let future = this.sessionImpl.kernel.requestExecute({
|
||||
code: doNotCallChangeEndpointParams
|
||||
}, true);
|
||||
await future.done;
|
||||
}
|
||||
}
|
||||
|
||||
private updateConfig(config: ISparkMagicConfig, creds: ICredentials, homePath: string): void {
|
||||
config.kernel_python_credentials = creds;
|
||||
config.kernel_scala_credentials = creds;
|
||||
config.kernel_r_credentials = creds;
|
||||
config.logging_config.handlers.magicsHandler.home_path = homePath;
|
||||
config.ignore_ssl_errors = utils.getIgnoreSslVerificationConfigSetting();
|
||||
configureConnection(connection: IConnectionProfile): Thenable<void> {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
private async setEnvironmentVars(skip: boolean = false): Promise<void> {
|
||||
@@ -404,76 +250,3 @@ export class JupyterSession implements nb.ISession {
|
||||
this._messagesComplete.resolve();
|
||||
}
|
||||
}
|
||||
|
||||
async function getClusterController(controllerEndpoint: string, authType: bdc.AuthType, username?: string, password?: string): Promise<bdc.IClusterController | undefined> {
|
||||
Logger.log(`Getting cluster controller ${controllerEndpoint}. Auth=${authType} Username=${username} password=${'*'.repeat(password?.length ?? 0)}`);
|
||||
const bdcApi = <bdc.IExtension>await vscode.extensions.getExtension(bdc.constants.extensionName).activate();
|
||||
const controller = bdcApi.getClusterController(
|
||||
controllerEndpoint,
|
||||
authType,
|
||||
username,
|
||||
password);
|
||||
try {
|
||||
Logger.log(`Fetching endpoints for ${controllerEndpoint} to test connection...`);
|
||||
// We just want to test the connection - so using getEndpoints since that is available to all users (not just admin)
|
||||
await controller.getEndPoints();
|
||||
return controller;
|
||||
} catch (err) {
|
||||
// Initial username/password failed so prompt user for username password until either user
|
||||
// cancels out or we successfully connect
|
||||
console.log(`Error connecting to cluster controller: ${err}`);
|
||||
let errorMessage = '';
|
||||
const prompter = new CodeAdapter();
|
||||
while (true) {
|
||||
const newUsername = await prompter.promptSingle<string>(<IQuestion>{
|
||||
type: QuestionTypes.input,
|
||||
name: 'inputPrompt',
|
||||
message: localize('promptBDCUsername', "{0}Please provide the username to connect to the BDC Controller:", errorMessage),
|
||||
default: username
|
||||
});
|
||||
if (!username) {
|
||||
console.log(`User cancelled out of username prompt for BDC Controller`);
|
||||
break;
|
||||
}
|
||||
const newPassword = await prompter.promptSingle<string>(<IQuestion>{
|
||||
type: QuestionTypes.password,
|
||||
name: 'passwordPrompt',
|
||||
message: localize('promptBDCPassword', "Please provide the password to connect to the BDC Controller"),
|
||||
default: ''
|
||||
});
|
||||
if (!password) {
|
||||
console.log(`User cancelled out of password prompt for BDC Controller`);
|
||||
break;
|
||||
}
|
||||
const controller = bdcApi.getClusterController(controllerEndpoint, authType, newUsername, newPassword);
|
||||
try {
|
||||
// We just want to test the connection - so using getEndpoints since that is available to all users (not just admin)
|
||||
await controller.getEndPoints();
|
||||
return controller;
|
||||
} catch (err) {
|
||||
errorMessage = localize('bdcConnectError', "Error: {0}. ", err.message ?? err);
|
||||
}
|
||||
}
|
||||
throw new Error(localize('clusterControllerConnectionRequired', "A connection to the cluster controller is required to run Spark jobs"));
|
||||
}
|
||||
}
|
||||
|
||||
interface ICredentials {
|
||||
'url': string;
|
||||
}
|
||||
|
||||
interface ISparkMagicConfig {
|
||||
kernel_python_credentials: ICredentials;
|
||||
kernel_scala_credentials: ICredentials;
|
||||
kernel_r_credentials: ICredentials;
|
||||
ignore_ssl_errors?: boolean;
|
||||
logging_config: {
|
||||
handlers: {
|
||||
magicsHandler: {
|
||||
home_path: string;
|
||||
class?: string;
|
||||
formatter?: string
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -203,39 +203,4 @@ describe('notebookUtils Tests', function (): void {
|
||||
should(notebookEditor.document.cells[0].contents.cell_type).equal(CellTypes.Markdown);
|
||||
});
|
||||
});
|
||||
|
||||
describe('analyzeNotebook', function () {
|
||||
it('creates cell when oeContext exists', async function (): Promise<void> {
|
||||
await azdata.nb.showNotebookDocument(vscode.Uri.from({ scheme: 'untitled' }));
|
||||
const notebookEditor = azdata.nb.activeNotebookEditor;
|
||||
sinon.replaceGetter(azdata.nb, 'activeNotebookEditor', () => notebookEditor);
|
||||
sinon.stub(azdata.nb, 'showNotebookDocument').returns(Promise.resolve(notebookEditor));
|
||||
const oeContext: azdata.ObjectExplorerContext = {
|
||||
connectionProfile: undefined,
|
||||
isConnectionNode: true,
|
||||
nodeInfo: {
|
||||
nodePath: 'path/HDFS/path2',
|
||||
errorMessage: undefined,
|
||||
isLeaf: false,
|
||||
label: 'fakeLabel',
|
||||
metadata: undefined,
|
||||
nodeStatus: undefined,
|
||||
nodeSubType: undefined,
|
||||
nodeType: undefined
|
||||
}
|
||||
};
|
||||
await notebookUtils.analyzeNotebook(oeContext);
|
||||
should(notebookEditor.document.cells.length).equal(1, 'One cell should exist');
|
||||
should(notebookEditor.document.cells[0].contents.cell_type).equal(CellTypes.Code, 'Cell was created with incorrect type');
|
||||
});
|
||||
|
||||
it('does not create new cell when oeContext does not exist', async function (): Promise<void> {
|
||||
await azdata.nb.showNotebookDocument(vscode.Uri.from({ scheme: 'untitled' }));
|
||||
const notebookEditor = azdata.nb.activeNotebookEditor;
|
||||
sinon.replaceGetter(azdata.nb, 'activeNotebookEditor', () => notebookEditor);
|
||||
sinon.stub(azdata.nb, 'showNotebookDocument').returns(Promise.resolve(notebookEditor));
|
||||
await notebookUtils.analyzeNotebook();
|
||||
should(notebookEditor.document.cells.length).equal(0, 'No cells should exist');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -10,23 +10,9 @@ import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import * as utils from '../../common/utils';
|
||||
import { MockOutputChannel } from './stubs';
|
||||
import * as azdata from 'azdata';
|
||||
import { sleep } from './testUtils';
|
||||
|
||||
describe('Utils Tests', function () {
|
||||
|
||||
it('getKnoxUrl', () => {
|
||||
const host = '127.0.0.1';
|
||||
const port = '8080';
|
||||
should(utils.getKnoxUrl(host, port)).endWith('/gateway');
|
||||
});
|
||||
|
||||
it('getLivyUrl', () => {
|
||||
const host = '127.0.0.1';
|
||||
const port = '8080';
|
||||
should(utils.getLivyUrl(host, port)).endWith('/gateway/default/livy/v1/');
|
||||
});
|
||||
|
||||
it('ensureDir', async () => {
|
||||
const dirPath = path.join(os.tmpdir(), uuid.v4());
|
||||
await should(fs.stat(dirPath)).be.rejected();
|
||||
@@ -273,63 +259,6 @@ describe('Utils Tests', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getClusterEndpoints', () => {
|
||||
const baseServerInfo: azdata.ServerInfo = {
|
||||
serverMajorVersion: -1,
|
||||
serverMinorVersion: -1,
|
||||
serverReleaseVersion: -1,
|
||||
engineEditionId: -1,
|
||||
serverVersion: '',
|
||||
serverLevel: '',
|
||||
serverEdition: '',
|
||||
isCloud: false,
|
||||
azureVersion: -1,
|
||||
osVersion: '',
|
||||
options: {},
|
||||
cpuCount: -1,
|
||||
physicalMemoryInMb: -1
|
||||
};
|
||||
it('empty endpoints does not error', () => {
|
||||
const serverInfo = Object.assign({}, baseServerInfo);
|
||||
serverInfo.options['clusterEndpoints'] = [];
|
||||
should(utils.getClusterEndpoints(serverInfo).length).equal(0);
|
||||
});
|
||||
|
||||
it('endpoints without endpoint field are created successfully', () => {
|
||||
const serverInfo = Object.assign({}, baseServerInfo);
|
||||
const ipAddress = 'localhost';
|
||||
const port = '123';
|
||||
serverInfo.options['clusterEndpoints'] = [{ ipAddress: ipAddress, port: port }];
|
||||
const endpoints = utils.getClusterEndpoints(serverInfo);
|
||||
should(endpoints.length).equal(1);
|
||||
should(endpoints[0].endpoint).equal('https://localhost:123');
|
||||
});
|
||||
|
||||
it('endpoints with endpoint field are created successfully', () => {
|
||||
const endpoint = 'https://myActualEndpoint:8080';
|
||||
const serverInfo = Object.assign({}, baseServerInfo);
|
||||
serverInfo.options['clusterEndpoints'] = [{ endpoint: endpoint, ipAddress: 'localhost', port: '123' }];
|
||||
const endpoints = utils.getClusterEndpoints(serverInfo);
|
||||
should(endpoints.length).equal(1);
|
||||
should(endpoints[0].endpoint).equal(endpoint);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getHostAndPortFromEndpoint', () => {
|
||||
it('valid endpoint is parsed correctly', () => {
|
||||
const host = 'localhost';
|
||||
const port = '123';
|
||||
const hostAndIp = utils.getHostAndPortFromEndpoint(`https://${host}:${port}`);
|
||||
should(hostAndIp).deepEqual({ host: host, port: port });
|
||||
});
|
||||
|
||||
it('invalid endpoint is returned as is', () => {
|
||||
const host = 'localhost';
|
||||
const hostAndIp = utils.getHostAndPortFromEndpoint(`https://${host}`);
|
||||
should(hostAndIp).deepEqual({ host: host, port: undefined });
|
||||
});
|
||||
});
|
||||
|
||||
describe('exists', () => {
|
||||
it('runs as expected', async () => {
|
||||
const filename = path.join(os.tmpdir(), `NotebookUtilsTest_${uuid.v4()}`);
|
||||
@@ -345,12 +274,6 @@ describe('Utils Tests', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getIgnoreSslVerificationConfigSetting', () => {
|
||||
it('runs as expected', async () => {
|
||||
should(utils.getIgnoreSslVerificationConfigSetting()).be.true();
|
||||
});
|
||||
});
|
||||
|
||||
describe('debounce', () => {
|
||||
class DebounceTest {
|
||||
public fnCalled = 0;
|
||||
|
||||
@@ -5,42 +5,18 @@
|
||||
|
||||
import * as should from 'should';
|
||||
import * as TypeMoq from 'typemoq';
|
||||
import * as utils from '../../common/utils';
|
||||
import * as sinon from 'sinon';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as bdc from 'bdc';
|
||||
import * as vscode from 'vscode';
|
||||
import { nb, IConnectionProfile, connection, ConnectionOptionSpecialType, ServerInfo } from 'azdata';
|
||||
import { nb } from 'azdata';
|
||||
import { SessionManager, Session, Kernel } from '@jupyterlab/services';
|
||||
import 'mocha';
|
||||
import { JupyterSessionManager, JupyterSession } from '../../jupyter/jupyterSessionManager';
|
||||
import { Deferred } from '../../common/promise';
|
||||
import { SessionStub, KernelStub, FutureStub } from '../common';
|
||||
import { noBDCConnectionError, providerNotValidError } from '../../common/localizedConstants';
|
||||
import { ExtensionContextHelper } from '../../common/extensionContextHelper';
|
||||
import { AppContext } from '../../common/appContext';
|
||||
import uuid = require('uuid');
|
||||
|
||||
class TestClusterController implements bdc.IClusterController {
|
||||
getClusterConfig(): Promise<any> {
|
||||
return Promise.resolve({});
|
||||
}
|
||||
getKnoxUsername(clusterUsername: string): Promise<string> {
|
||||
return Promise.resolve('knoxUsername');
|
||||
}
|
||||
getEndPoints(promptConnect?: boolean): Promise<bdc.IEndPointsResponse> {
|
||||
return Promise.resolve( {
|
||||
response: undefined,
|
||||
endPoints: []
|
||||
});
|
||||
}
|
||||
username: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
before(async function(): Promise<void> {
|
||||
before(async function (): Promise<void> {
|
||||
// We have to reset the extension context here since the test runner unloads the files before running the tests
|
||||
// so the static state is lost
|
||||
const api = await vscode.extensions.getExtension('Microsoft.notebook').activate();
|
||||
@@ -207,181 +183,6 @@ describe('Jupyter Session', function (): void {
|
||||
should(options.name).equal('python');
|
||||
});
|
||||
|
||||
it('should write configuration to config.json file', async function (): Promise<void> {
|
||||
let tempDir = os.tmpdir();
|
||||
let configPath = path.join(tempDir, '.sparkmagic', 'config.json');
|
||||
const expectedResult = {
|
||||
'kernel_python_credentials': {
|
||||
'url': 'http://localhost:8088'
|
||||
},
|
||||
'kernel_scala_credentials': {
|
||||
'url': 'http://localhost:8088'
|
||||
},
|
||||
'kernel_r_credentials': {
|
||||
'url': 'http://localhost:8088'
|
||||
},
|
||||
'livy_session_startup_timeout_seconds': 100,
|
||||
'logging_config': {
|
||||
'version': 1,
|
||||
'formatters': {
|
||||
'magicsFormatter': {
|
||||
'format': '%(asctime)s\t%(levelname)s\t%(message)s',
|
||||
'datefmt': ''
|
||||
}
|
||||
},
|
||||
'handlers': {
|
||||
'magicsHandler': {
|
||||
'class': 'hdijupyterutils.filehandler.MagicsFileHandler',
|
||||
'formatter': 'magicsFormatter',
|
||||
'home_path': ''
|
||||
}
|
||||
},
|
||||
'loggers': {
|
||||
'magicsLogger': {
|
||||
'handlers': ['magicsHandler'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': 0
|
||||
}
|
||||
}
|
||||
},
|
||||
'ignore_ssl_errors': true,
|
||||
};
|
||||
expectedResult.logging_config.handlers.magicsHandler.home_path = path.join(tempDir, '.sparkmagic');
|
||||
sinon.stub(utils, 'getUserHome').returns(tempDir);
|
||||
await session.configureKernel();
|
||||
let result = await fs.promises.readFile(configPath, 'utf-8');
|
||||
should(JSON.parse(result) === expectedResult);
|
||||
});
|
||||
|
||||
it('should configure connection correctly for MSSQL and SqlLogin auth type', async function (): Promise<void> {
|
||||
const isLinux = os.platform() === 'linux';
|
||||
if (!isLinux) {
|
||||
let connectionProfile: IConnectionProfile = {
|
||||
authenticationType: '',
|
||||
connectionName: '',
|
||||
databaseName: '',
|
||||
id: 'id',
|
||||
providerName: 'MSSQL',
|
||||
options: {
|
||||
authenticationType: connection.AuthenticationType.SqlLogin,
|
||||
},
|
||||
password: '',
|
||||
savePassword: false,
|
||||
saveProfile: false,
|
||||
serverName: '',
|
||||
userName: ''
|
||||
};
|
||||
let futureMock = TypeMoq.Mock.ofType(FutureStub);
|
||||
let kernelMock = TypeMoq.Mock.ofType(KernelStub);
|
||||
kernelMock.setup(k => k.name).returns(() => 'spark');
|
||||
kernelMock.setup(m => m.requestExecute(TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => futureMock.object);
|
||||
mockJupyterSession.setup(s => s.kernel).returns(() => kernelMock.object);
|
||||
let creds = { [ConnectionOptionSpecialType.password]: 'password' };
|
||||
sinon.stub(connection, 'getCredentials').returns(Promise.resolve(creds));
|
||||
|
||||
// Set up connection info to big data cluster
|
||||
const mockServerInfo: ServerInfo = {
|
||||
serverMajorVersion: 0,
|
||||
serverMinorVersion: 0,
|
||||
serverReleaseVersion: 0,
|
||||
engineEditionId: 0,
|
||||
serverVersion: '',
|
||||
serverLevel: '',
|
||||
serverEdition: '',
|
||||
isCloud: false,
|
||||
azureVersion: 0,
|
||||
osVersion: '',
|
||||
cpuCount: 0,
|
||||
physicalMemoryInMb: -1,
|
||||
options: {
|
||||
isBigDataCluster: true
|
||||
}
|
||||
};
|
||||
const mockGatewayEndpoint: bdc.IEndpointModel = {
|
||||
name: 'gateway',
|
||||
description: '',
|
||||
endpoint: '',
|
||||
protocol: '',
|
||||
};
|
||||
const mockControllerEndpoint: bdc.IEndpointModel = {
|
||||
name: 'controller',
|
||||
description: '',
|
||||
endpoint: '',
|
||||
protocol: '',
|
||||
};
|
||||
const mockHostAndIp: utils.HostAndIp = {
|
||||
host: '127.0.0.1',
|
||||
port: '1337'
|
||||
};
|
||||
const mockClustercontroller = new TestClusterController();
|
||||
mockClustercontroller.username = 'admin';
|
||||
mockClustercontroller.password = uuid.v4();
|
||||
let mockBdcExtension: TypeMoq.IMock<bdc.IExtension> = TypeMoq.Mock.ofType<bdc.IExtension>();
|
||||
let mockExtension: TypeMoq.IMock<vscode.Extension<any>> = TypeMoq.Mock.ofType<vscode.Extension<any>>();
|
||||
mockBdcExtension.setup(m => m.getClusterController(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => mockClustercontroller);
|
||||
mockBdcExtension.setup((m: any) => m.then).returns(() => mockBdcExtension);
|
||||
mockExtension.setup(m => m.activate()).returns(() => Promise.resolve(mockBdcExtension.object));
|
||||
mockExtension.setup((m: any) => m.then).returns(() => mockExtension);
|
||||
sinon.stub(vscode.extensions, 'getExtension').returns(mockExtension.object);
|
||||
sinon.stub(connection, 'getServerInfo').returns(Promise.resolve(mockServerInfo));
|
||||
sinon.stub(utils, 'getClusterEndpoints').returns([mockGatewayEndpoint, mockControllerEndpoint]);
|
||||
sinon.stub(utils, 'getHostAndPortFromEndpoint').returns(mockHostAndIp);
|
||||
await session.configureConnection(connectionProfile);
|
||||
should(connectionProfile.options['host']).equal(mockHostAndIp.host);
|
||||
should(connectionProfile.options['knoxport']).equal(mockHostAndIp.port);
|
||||
}
|
||||
});
|
||||
|
||||
it('configure connection should throw error if there is no connection to big data cluster', async function (): Promise<void> {
|
||||
let connectionProfile: IConnectionProfile = {
|
||||
authenticationType: '',
|
||||
connectionName: '',
|
||||
databaseName: '',
|
||||
id: 'id',
|
||||
providerName: 'MSSQL',
|
||||
options: {
|
||||
authenticationType: connection.AuthenticationType.SqlLogin,
|
||||
},
|
||||
password: '',
|
||||
savePassword: false,
|
||||
saveProfile: false,
|
||||
serverName: '',
|
||||
userName: ''
|
||||
};
|
||||
let futureMock = TypeMoq.Mock.ofType(FutureStub);
|
||||
let kernelMock = TypeMoq.Mock.ofType(KernelStub);
|
||||
kernelMock.setup(k => k.name).returns(() => 'spark');
|
||||
kernelMock.setup(m => m.requestExecute(TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => futureMock.object);
|
||||
mockJupyterSession.setup(s => s.kernel).returns(() => kernelMock.object);
|
||||
let credentials = { [ConnectionOptionSpecialType.password]: 'password' };
|
||||
sinon.stub(connection, 'getCredentials').returns(Promise.resolve(credentials));
|
||||
await should(session.configureConnection(connectionProfile)).be.rejectedWith(noBDCConnectionError);
|
||||
});
|
||||
|
||||
it('configure connection should throw error if provider is not MSSQL for spark kernel', async function (): Promise<void> {
|
||||
let connectionProfile: IConnectionProfile = {
|
||||
authenticationType: '',
|
||||
connectionName: '',
|
||||
databaseName: '',
|
||||
id: 'id',
|
||||
providerName: 'provider',
|
||||
options: {
|
||||
authenticationType: connection.AuthenticationType.SqlLogin,
|
||||
},
|
||||
password: '',
|
||||
savePassword: false,
|
||||
saveProfile: false,
|
||||
serverName: '',
|
||||
userName: ''
|
||||
};
|
||||
let futureMock = TypeMoq.Mock.ofType(FutureStub);
|
||||
let kernelMock = TypeMoq.Mock.ofType(KernelStub);
|
||||
kernelMock.setup(k => k.name).returns(() => 'spark');
|
||||
kernelMock.setup(m => m.requestExecute(TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => futureMock.object);
|
||||
mockJupyterSession.setup(s => s.kernel).returns(() => kernelMock.object);
|
||||
await should(session.configureConnection(connectionProfile)).be.rejectedWith(providerNotValidError);
|
||||
});
|
||||
|
||||
it('should set environment variables correctly', function (): void {
|
||||
let futureMock = TypeMoq.Mock.ofType(FutureStub);
|
||||
let kernelMock = TypeMoq.Mock.ofType(KernelStub);
|
||||
|
||||
@@ -11,8 +11,8 @@ import * as uuid from 'uuid';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as request from 'request';
|
||||
import * as utils from '../../common/utils';
|
||||
import { requiredJupyterPkg, JupyterServerInstallation, requiredPowershellPkg, PythonInstallSettings, PythonPkgDetails, requiredSparkPackages } from '../../jupyter/jupyterServerInstallation';
|
||||
import { powershellDisplayName, pysparkDisplayName, python3DisplayName, sparkRDisplayName, sparkScalaDisplayName, winPlatform } from '../../common/constants';
|
||||
import { requiredJupyterPkg, JupyterServerInstallation, requiredPowershellPkg, PythonInstallSettings, PythonPkgDetails } from '../../jupyter/jupyterServerInstallation';
|
||||
import { powershellDisplayName, python3DisplayName, winPlatform } from '../../common/constants';
|
||||
|
||||
describe('Jupyter Server Installation', function () {
|
||||
let outputChannelStub: TypeMoq.IMock<vscode.OutputChannel>;
|
||||
@@ -234,17 +234,6 @@ describe('Jupyter Server Installation', function () {
|
||||
should(packages).be.deepEqual([requiredJupyterPkg, requiredPowershellPkg]);
|
||||
});
|
||||
|
||||
it('Get required packages test - Spark kernels', async function () {
|
||||
let packages = installation.getRequiredPackagesForKernel(pysparkDisplayName);
|
||||
should(packages).be.deepEqual(requiredSparkPackages, 'Unexpected packages for PySpark kernel.');
|
||||
|
||||
packages = installation.getRequiredPackagesForKernel(sparkScalaDisplayName);
|
||||
should(packages).be.deepEqual(requiredSparkPackages, 'Unexpected packages for Spark Scala kernel.');
|
||||
|
||||
packages = installation.getRequiredPackagesForKernel(sparkRDisplayName);
|
||||
should(packages).be.deepEqual(requiredSparkPackages, 'Unexpected packages for Spark R kernel.');
|
||||
});
|
||||
|
||||
it('Install python test - Run install while Python is already running', async function () {
|
||||
// Should reject overwriting an existing python install if running on Windows and python is currently running.
|
||||
if (process.platform === winPlatform) {
|
||||
|
||||
1
extensions/notebook/src/typings/refs.d.ts
vendored
1
extensions/notebook/src/typings/refs.d.ts
vendored
@@ -6,5 +6,4 @@
|
||||
/// <reference path='../../../../src/sql/azdata.d.ts'/>
|
||||
/// <reference path='../../../../src/sql/azdata.proposed.d.ts'/>
|
||||
/// <reference path='../../../../src/vscode-dts/vscode.d.ts'/>
|
||||
/// <reference path='../../../big-data-cluster/src/bdc.d.ts'/>
|
||||
/// <reference types='@types/node'/>
|
||||
|
||||
Reference in New Issue
Block a user