mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Remove all Big Data Cluster features (#21369)
This commit is contained in:
@@ -36,7 +36,6 @@ export const jupyterNewNotebookTask = 'jupyter.task.newNotebook';
|
||||
export const jupyterOpenNotebookTask = 'jupyter.task.openNotebook';
|
||||
export const jupyterNewNotebookCommand = 'jupyter.cmd.newNotebook';
|
||||
export const jupyterReinstallDependenciesCommand = 'jupyter.reinstallDependencies';
|
||||
export const jupyterAnalyzeCommand = 'jupyter.cmd.analyzeNotebook';
|
||||
export const jupyterManagePackages = 'jupyter.cmd.managePackages';
|
||||
export const jupyterConfigurePython = 'jupyter.cmd.configurePython';
|
||||
export const localhostName = 'localhost';
|
||||
@@ -45,9 +44,6 @@ export const PackageNotFoundError = localize('managePackages.packageNotFound', "
|
||||
|
||||
export const ipykernelDisplayName = 'Python 3 (ipykernel)';
|
||||
export const python3DisplayName = 'Python 3';
|
||||
export const pysparkDisplayName = 'PySpark';
|
||||
export const sparkScalaDisplayName = 'Spark | Scala';
|
||||
export const sparkRDisplayName = 'Spark | R';
|
||||
export const powershellDisplayName = 'PowerShell';
|
||||
export const allKernelsName = 'All Kernels';
|
||||
|
||||
@@ -84,15 +80,6 @@ export const pythonWindowsInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=
|
||||
export const pythonMacInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2163337';
|
||||
export const pythonLinuxInstallUrl = 'https://go.microsoft.com/fwlink/?linkid=2163336';
|
||||
|
||||
export const KNOX_ENDPOINT_SERVER = 'host';
|
||||
export const KNOX_ENDPOINT_PORT = 'knoxport';
|
||||
export const KNOX_ENDPOINT_GATEWAY = 'gateway';
|
||||
export const CONTROLLER_ENDPOINT = 'controller';
|
||||
export const SQL_PROVIDER = 'MSSQL';
|
||||
export const USER = 'user';
|
||||
export const AUTHTYPE = 'authenticationType';
|
||||
export const INTEGRATED_AUTH = 'integrated';
|
||||
|
||||
// The version of the notebook file format that we support
|
||||
export const NBFORMAT = 4;
|
||||
export const NBFORMAT_MINOR = 2;
|
||||
|
||||
@@ -12,8 +12,6 @@ export const msgNo = localize('msgNo', "No");
|
||||
|
||||
// Jupyter Constants ///////////////////////////////////////////////////////
|
||||
export const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', "This sample code loads the file into a data frame and shows the first 10 results.");
|
||||
export const noBDCConnectionError = localize('noBDCConnectionError', "Spark kernels require a connection to a SQL Server Big Data Cluster master instance.");
|
||||
export const providerNotValidError = localize('providerNotValidError', "Non-MSSQL providers are not supported for spark kernels.");
|
||||
|
||||
// Book view-let constants
|
||||
export const allFiles = localize('allFiles', "All Files");
|
||||
|
||||
@@ -4,15 +4,12 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as os from 'os';
|
||||
import * as vscode from 'vscode';
|
||||
import * as nls from 'vscode-nls';
|
||||
import { getErrorMessage } from '../common/utils';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
const JUPYTER_NOTEBOOK_PROVIDER = 'jupyter';
|
||||
const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', "This sample code loads the file into a data frame and shows the first 10 results.");
|
||||
const noNotebookVisible = localize('noNotebookVisible', "No notebook editor is active");
|
||||
|
||||
export class NotebookUtils {
|
||||
@@ -96,31 +93,4 @@ export class NotebookUtils {
|
||||
public async toggleMarkdownStyle(style: string, showUI?: boolean, value?: string): Promise<void> {
|
||||
return vscode.commands.executeCommand(style, showUI, value);
|
||||
}
|
||||
|
||||
public async analyzeNotebook(oeContext?: azdata.ObjectExplorerContext): Promise<void> {
|
||||
let editor = await azdata.nb.showNotebookDocument(vscode.Uri.from({ scheme: 'untitled' }), {
|
||||
connectionProfile: oeContext ? oeContext.connectionProfile : undefined,
|
||||
providerId: JUPYTER_NOTEBOOK_PROVIDER,
|
||||
preview: false,
|
||||
defaultKernel: {
|
||||
name: 'pysparkkernel',
|
||||
display_name: 'PySpark',
|
||||
language: 'python'
|
||||
}
|
||||
});
|
||||
if (oeContext && oeContext.nodeInfo && oeContext.nodeInfo.nodePath) {
|
||||
// Get the file path after '/HDFS'
|
||||
let hdfsPath: string = oeContext.nodeInfo.nodePath.substring(oeContext.nodeInfo.nodePath.indexOf('/HDFS') + '/HDFS'.length);
|
||||
if (hdfsPath.length > 0) {
|
||||
let analyzeCommand = '#' + msgSampleCodeDataFrame + os.EOL + 'df = (spark.read.option("inferSchema", "true")'
|
||||
+ os.EOL + '.option("header", "true")' + os.EOL + '.csv("{0}"))' + os.EOL + 'df.show(10)';
|
||||
await editor.edit(editBuilder => {
|
||||
editBuilder.insertCell({
|
||||
cell_type: 'code',
|
||||
source: analyzeCommand.replace('{0}', hdfsPath)
|
||||
}, 0);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,28 +3,19 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as bdc from 'bdc';
|
||||
import * as childProcess from 'child_process';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as nls from 'vscode-nls';
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import * as crypto from 'crypto';
|
||||
import { notebookConfigKey, pinnedBooksConfigKey, AUTHTYPE, INTEGRATED_AUTH, KNOX_ENDPOINT_PORT, KNOX_ENDPOINT_SERVER } from './constants';
|
||||
import { notebookConfigKey, pinnedBooksConfigKey } from './constants';
|
||||
import { IPrompter, IQuestion, QuestionTypes } from '../prompts/question';
|
||||
import { BookTreeItemFormat } from '../book/bookTreeItem';
|
||||
import * as loc from './localizedConstants';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
export function getKnoxUrl(host: string, port: string): string {
|
||||
return `https://${host}:${port}/gateway`;
|
||||
}
|
||||
|
||||
export function getLivyUrl(serverName: string, port: string): string {
|
||||
return this.getKnoxUrl(serverName, port) + '/default/livy/v1/';
|
||||
}
|
||||
|
||||
export async function ensureDir(dirPath: string, outputChannel?: vscode.OutputChannel): Promise<void> {
|
||||
outputChannel?.appendLine(localize('ensureDirOutputMsg', "... Ensuring {0} exists", dirPath));
|
||||
await fs.ensureDir(dirPath);
|
||||
@@ -109,15 +100,6 @@ export enum Platform {
|
||||
Others
|
||||
}
|
||||
|
||||
interface RawEndpoint {
|
||||
serviceName: string;
|
||||
description?: string;
|
||||
endpoint?: string;
|
||||
protocol?: string;
|
||||
ipAddress?: string;
|
||||
port?: number;
|
||||
}
|
||||
|
||||
export function getOSPlatformId(): string {
|
||||
let platformId = undefined;
|
||||
switch (process.platform) {
|
||||
@@ -268,61 +250,6 @@ export function isPackageSupported(pythonVersion: string, packageVersionConstrai
|
||||
return supportedVersionFound;
|
||||
}
|
||||
|
||||
export function getClusterEndpoints(serverInfo: azdata.ServerInfo): bdc.IEndpointModel[] {
|
||||
let endpoints: RawEndpoint[] = serverInfo.options['clusterEndpoints'];
|
||||
if (!endpoints || endpoints.length === 0) { return []; }
|
||||
|
||||
return endpoints.map(e => {
|
||||
// If endpoint is missing, we're on CTP bits. All endpoints from the CTP serverInfo should be treated as HTTPS
|
||||
let endpoint = e.endpoint ? e.endpoint : `https://${e.ipAddress}:${e.port}`;
|
||||
let updatedEndpoint: bdc.IEndpointModel = {
|
||||
name: e.serviceName,
|
||||
description: e.description,
|
||||
endpoint: endpoint,
|
||||
protocol: e.protocol
|
||||
};
|
||||
return updatedEndpoint;
|
||||
});
|
||||
}
|
||||
|
||||
export type HostAndIp = { host: string, port: string };
|
||||
|
||||
export function getHostAndPortFromEndpoint(endpoint: string): HostAndIp {
|
||||
let authority = vscode.Uri.parse(endpoint).authority;
|
||||
let hostAndPortRegex = /^(.*)([,:](\d+))/g;
|
||||
let match = hostAndPortRegex.exec(authority);
|
||||
if (match) {
|
||||
return {
|
||||
host: match[1],
|
||||
port: match[3]
|
||||
};
|
||||
}
|
||||
return {
|
||||
host: authority,
|
||||
port: undefined
|
||||
};
|
||||
}
|
||||
|
||||
export function isIntegratedAuth(connection: azdata.IConnectionProfile): boolean {
|
||||
return connection.options[AUTHTYPE] && connection.options[AUTHTYPE].toLowerCase() === INTEGRATED_AUTH.toLowerCase();
|
||||
}
|
||||
|
||||
export function isSparkKernel(kernelName: string): boolean {
|
||||
return kernelName && kernelName.toLowerCase().indexOf('spark') > -1;
|
||||
}
|
||||
|
||||
export function setHostAndPort(delimeter: string, connection: azdata.IConnectionProfile): void {
|
||||
let originalHost = connection.options[KNOX_ENDPOINT_SERVER];
|
||||
if (!originalHost) {
|
||||
return;
|
||||
}
|
||||
let index = originalHost.indexOf(delimeter);
|
||||
if (index > -1) {
|
||||
connection.options[KNOX_ENDPOINT_SERVER] = originalHost.slice(0, index);
|
||||
connection.options[KNOX_ENDPOINT_PORT] = originalHost.slice(index + 1);
|
||||
}
|
||||
}
|
||||
|
||||
export async function exists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(path);
|
||||
@@ -332,22 +259,6 @@ export async function exists(path: string): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
const bdcConfigSectionName = 'bigDataCluster';
|
||||
const ignoreSslConfigName = 'ignoreSslVerification';
|
||||
|
||||
/**
|
||||
* Retrieves the current setting for whether to ignore SSL verification errors
|
||||
*/
|
||||
export function getIgnoreSslVerificationConfigSetting(): boolean {
|
||||
try {
|
||||
const config = vscode.workspace.getConfiguration(bdcConfigSectionName);
|
||||
return config.get<boolean>(ignoreSslConfigName, true);
|
||||
} catch (error) {
|
||||
console.error('Unexpected error retrieving ${bdcConfigSectionName}.${ignoreSslConfigName} setting : ', error);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function debounce(delay: number): Function {
|
||||
return decorate((fn, key) => {
|
||||
const timerKey = `$debounce$${key}`;
|
||||
|
||||
Reference in New Issue
Block a user