mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Spark features with dashboard are enabled (#3883)
* Spark features are enabled * Fixed as PR comments * minor change * PR comments fixed * minor fix * change constant name to avoid conflicts with sqlopsextension * sqlContext to context * Changed tab name to SQL Server Big Data Cluster * Added isCluster to ContextProvider to control display big data cluster dashboard tab Ported New/open Notebook code to mssql extension and enable them in dashboard * Fixed tslint
This commit is contained in:
@@ -7,6 +7,10 @@
|
||||
import * as vscode from 'vscode';
|
||||
import * as sqlops from 'sqlops';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import { SqlOpsDataClient, ClientOptions } from 'dataprotocol-client';
|
||||
import { IConfig, ServerProvider, Events } from 'service-downloader';
|
||||
import { ServerOptions, TransportKind } from 'vscode-languageclient';
|
||||
@@ -20,15 +24,21 @@ import { Telemetry, LanguageClientErrorHandler } from './telemetry';
|
||||
import { TelemetryFeature, AgentServicesFeature, DacFxServicesFeature } from './features';
|
||||
import { AppContext } from './appContext';
|
||||
import { ApiWrapper } from './apiWrapper';
|
||||
import { MssqlObjectExplorerNodeProvider } from './objectExplorerNodeProvider/objectExplorerNodeProvider';
|
||||
import { UploadFilesCommand, MkDirCommand, SaveFileCommand, PreviewFileCommand, CopyPathCommand, DeleteFilesCommand } from './objectExplorerNodeProvider/hdfsCommands';
|
||||
import { IPrompter } from './prompts/question';
|
||||
import CodeAdapter from './prompts/adapter';
|
||||
import { MssqlExtensionApi, MssqlObjectExplorerBrowser } from './api/mssqlapis';
|
||||
import { OpenSparkJobSubmissionDialogCommand, OpenSparkJobSubmissionDialogFromFileCommand, OpenSparkJobSubmissionDialogTask } from './sparkFeature/dialog/dialogCommands';
|
||||
import { OpenSparkYarnHistoryTask } from './sparkFeature/historyTask';
|
||||
import { MssqlObjectExplorerNodeProvider, mssqlOutputChannel } from './objectExplorerNodeProvider/objectExplorerNodeProvider';
|
||||
|
||||
const baseConfig = require('./config.json');
|
||||
const outputChannel = vscode.window.createOutputChannel(Constants.serviceName);
|
||||
const statusView = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
|
||||
const jupyterNotebookProviderId = 'jupyter';
|
||||
const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', 'This sample code loads the file into a data frame and shows the first 10 results.');
|
||||
|
||||
let untitledCounter = 0;
|
||||
|
||||
export async function activate(context: vscode.ExtensionContext): Promise<MssqlExtensionApi> {
|
||||
// lets make sure we support this platform first
|
||||
@@ -96,8 +106,11 @@ export async function activate(context: vscode.ExtensionContext): Promise<MssqlE
|
||||
languageClient.start();
|
||||
credentialsStore.start();
|
||||
resourceProvider.start();
|
||||
|
||||
let nodeProvider = new MssqlObjectExplorerNodeProvider(appContext);
|
||||
sqlops.dataprotocol.registerObjectExplorerNodeProvider(nodeProvider);
|
||||
activateSparkFeatures(appContext);
|
||||
activateNotebookTask(appContext);
|
||||
}, e => {
|
||||
Telemetry.sendTelemetryEvent('ServiceInitializingFailed');
|
||||
vscode.window.showErrorMessage('Failed to start Sql tools service');
|
||||
@@ -120,7 +133,7 @@ export async function activate(context: vscode.ExtensionContext): Promise<MssqlE
|
||||
return {
|
||||
getNode: (context: sqlops.ObjectExplorerContext) => {
|
||||
let oeProvider = appContext.getService<MssqlObjectExplorerNodeProvider>(Constants.ObjectExplorerService);
|
||||
return <any>oeProvider.findNodeForContext(context);
|
||||
return <any>oeProvider.findSqlClusterNodeByContext(context);
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -128,6 +141,93 @@ export async function activate(context: vscode.ExtensionContext): Promise<MssqlE
|
||||
return api;
|
||||
}
|
||||
|
||||
function activateSparkFeatures(appContext: AppContext): void {
|
||||
let extensionContext = appContext.extensionContext;
|
||||
let apiWrapper = appContext.apiWrapper;
|
||||
let outputChannel: vscode.OutputChannel = mssqlOutputChannel;
|
||||
extensionContext.subscriptions.push(new OpenSparkJobSubmissionDialogCommand(appContext, outputChannel));
|
||||
extensionContext.subscriptions.push(new OpenSparkJobSubmissionDialogFromFileCommand(appContext, outputChannel));
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterLivySubmitSparkJobTask, (profile: sqlops.IConnectionProfile) => {
|
||||
new OpenSparkJobSubmissionDialogTask(appContext, outputChannel).execute(profile);
|
||||
});
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterLivyOpenSparkHistory, (profile: sqlops.IConnectionProfile) => {
|
||||
new OpenSparkYarnHistoryTask(appContext).execute(profile, true);
|
||||
});
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterLivyOpenYarnHistory, (profile: sqlops.IConnectionProfile) => {
|
||||
new OpenSparkYarnHistoryTask(appContext).execute(profile, false);
|
||||
});
|
||||
}
|
||||
|
||||
function activateNotebookTask(appContext: AppContext): void {
|
||||
let apiWrapper = appContext.apiWrapper;
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterNewNotebookTask, (profile: sqlops.IConnectionProfile) => {
|
||||
return saveProfileAndCreateNotebook(profile);
|
||||
});
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterOpenNotebookTask, (profile: sqlops.IConnectionProfile) => {
|
||||
return handleOpenNotebookTask(profile);
|
||||
});
|
||||
}
|
||||
|
||||
function saveProfileAndCreateNotebook(profile: sqlops.IConnectionProfile): Promise<void> {
|
||||
return handleNewNotebookTask(undefined, profile);
|
||||
}
|
||||
|
||||
async function handleNewNotebookTask(oeContext?: sqlops.ObjectExplorerContext, profile?: sqlops.IConnectionProfile): Promise<void> {
|
||||
// Ensure we get a unique ID for the notebook. For now we're using a different prefix to the built-in untitled files
|
||||
// to handle this. We should look into improving this in the future
|
||||
let untitledUri = vscode.Uri.parse(`untitled:Notebook-${untitledCounter++}`);
|
||||
let editor = await sqlops.nb.showNotebookDocument(untitledUri, {
|
||||
connectionId: profile.id,
|
||||
providerId: jupyterNotebookProviderId,
|
||||
preview: false,
|
||||
defaultKernel: {
|
||||
name: 'pyspark3kernel',
|
||||
display_name: 'PySpark3',
|
||||
language: 'python'
|
||||
}
|
||||
});
|
||||
if (oeContext && oeContext.nodeInfo && oeContext.nodeInfo.nodePath) {
|
||||
// Get the file path after '/HDFS'
|
||||
let hdfsPath: string = oeContext.nodeInfo.nodePath.substring(oeContext.nodeInfo.nodePath.indexOf('/HDFS') + '/HDFS'.length);
|
||||
if (hdfsPath.length > 0) {
|
||||
let analyzeCommand = "#" + msgSampleCodeDataFrame + os.EOL + "df = (spark.read.option(\"inferSchema\", \"true\")"
|
||||
+ os.EOL + ".option(\"header\", \"true\")" + os.EOL + ".csv('{0}'))" + os.EOL + "df.show(10)";
|
||||
editor.edit(editBuilder => {
|
||||
editBuilder.replace(0, {
|
||||
cell_type: 'code',
|
||||
source: analyzeCommand.replace('{0}', hdfsPath)
|
||||
});
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function handleOpenNotebookTask(profile: sqlops.IConnectionProfile): Promise<void> {
|
||||
let notebookFileTypeName = localize('notebookFileType', 'Notebooks');
|
||||
let filter = {};
|
||||
filter[notebookFileTypeName] = 'ipynb';
|
||||
let uris = await vscode.window.showOpenDialog({
|
||||
filters: filter,
|
||||
canSelectFiles: true,
|
||||
canSelectMany: false
|
||||
});
|
||||
if (uris && uris.length > 0) {
|
||||
let fileUri = uris[0];
|
||||
// Verify this is a .ipynb file since this isn't actually filtered on Mac/Linux
|
||||
if (path.extname(fileUri.fsPath) !== '.ipynb') {
|
||||
// in the future might want additional supported types
|
||||
vscode.window.showErrorMessage(localize('unsupportedFileType', 'Only .ipynb Notebooks are supported'));
|
||||
} else {
|
||||
await sqlops.nb.showNotebookDocument(fileUri, {
|
||||
connectionId: profile.id,
|
||||
providerId: jupyterNotebookProviderId,
|
||||
preview: false
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function generateServerOptions(executablePath: string): ServerOptions {
|
||||
let launchArgs = Utils.getCommonLaunchArgsAndCleanupOldLogFiles('sqltools', executablePath);
|
||||
return { command: executablePath, args: launchArgs, transport: TransportKind.stdio };
|
||||
|
||||
Reference in New Issue
Block a user