diff --git a/extensions/mssql/package.json b/extensions/mssql/package.json
index 0ab5d47961..e61875578e 100644
--- a/extensions/mssql/package.json
+++ b/extensions/mssql/package.json
@@ -29,7 +29,8 @@
"uri-js": "^4.2.2",
"vscode-extension-telemetry": "^0.0.15",
"vscode-nls": "^4.0.0",
- "webhdfs": "^1.1.1"
+ "webhdfs": "^1.1.1",
+ "request-promise": "^4.2.2"
},
"devDependencies": {},
"contributes": {
@@ -68,6 +69,54 @@
{
"command": "mssqlCluster.copyPath",
"title": "%mssqlCluster.copyPath%"
+ },
+ {
+ "command": "mssqlCluster.task.newNotebook",
+ "title": "%notebook.command.new%",
+ "icon": {
+ "dark": "resources/dark/new_notebook_inverse.svg",
+ "light": "resources/light/new_notebook.svg"
+ }
+ },
+ {
+ "command": "mssqlCluster.task.openNotebook",
+ "title": "%notebook.command.open%",
+ "icon": {
+ "dark": "resources/dark/open_notebook_inverse.svg",
+ "light": "resources/light/open_notebook.svg"
+ }
+ },
+ {
+ "command": "mssqlCluster.livy.cmd.submitSparkJob",
+ "title": "%title.submitSparkJob%"
+ },
+ {
+ "command": "mssqlCluster.livy.task.submitSparkJob",
+ "title": "%title.newSparkJob%",
+ "icon": {
+ "dark": "resources/dark/new_spark_job_inverse.svg",
+ "light": "resources/light/new_spark_job.svg"
+ }
+ },
+ {
+ "command": "mssqlCluster.livy.task.openSparkHistory",
+ "title": "%title.openSparkHistory%",
+ "icon": {
+ "dark": "resources/dark/new_spark_job_inverse.svg",
+ "light": "resources/light/new_spark_job.svg"
+ }
+ },
+ {
+ "command": "mssqlCluster.livy.task.openYarnHistory",
+ "title": "%title.openYarnHistory%",
+ "icon": {
+ "dark": "resources/light/hadoop.svg",
+ "light": "resources/light/hadoop.svg"
+ }
+ },
+ {
+ "command": "mssqlCluster.livy.cmd.submitFileToSparkJob",
+ "title": "%title.submitSparkJob%"
}
],
"outputChannels": [
@@ -184,6 +233,22 @@
{
"command": "mssqlCluster.copyPath",
"when": "false"
+ },
+ {
+ "command": "mssqlCluster.task.newNotebook",
+ "when": "false"
+ },
+ {
+ "command": "mssqlCluster.task.openNotebook",
+ "when": "false"
+ },
+ {
+ "command": "mssqlCluster.livy.cmd.submitFileToSparkJob",
+ "when": "false"
+ },
+ {
+ "command": "mssqlCluster.livy.task.submitSparkJob",
+ "when": "false"
}
],
"objectExplorer/item/context": [
@@ -216,6 +281,11 @@
"command": "mssqlCluster.deleteFiles",
"when": "nodeType=~/^mssqlCluster/ && viewItem != mssqlCluster:connection && nodeType != mssqlCluster:message",
"group": "1mssqlCluster@4"
+ },
+ {
+ "command": "mssqlCluster.livy.cmd.submitFileToSparkJob",
+ "when": "nodeType == mssqlCluster:file && nodeSubType == mssqlCluster:spark",
+ "group": "1mssqlCluster@6"
}
]
},
@@ -314,6 +384,34 @@
}
]
},
+ "dashboard.tabs": [
+ {
+ "id": "mssql-big-data-cluster",
+ "description": "tab.bigDataClusterDescription",
+ "provider": "MSSQL",
+ "title": "%title.bigDataCluster%",
+ "when": "connectionProvider == 'MSSQL' && mssql:iscluster",
+ "container": {
+ "grid-container": [
+ {
+ "name": "%title.tasks%",
+ "row": 0,
+ "col": 0,
+ "colspan": 2,
+ "widget": {
+ "tasks-widget": [
+ "mssqlCluster.task.newNotebook",
+ "mssqlCluster.task.openNotebook",
+ "mssqlCluster.livy.task.submitSparkJob",
+ "mssqlCluster.livy.task.openSparkHistory",
+ "mssqlCluster.livy.task.openYarnHistory"
+ ]
+ }
+ }
+ ]
+ }
+ }
+ ],
"connectionProvider": {
"providerId": "MSSQL",
"displayName": "Microsoft SQL Server",
@@ -800,4 +898,4 @@
]
}
}
-}
+}
\ No newline at end of file
diff --git a/extensions/mssql/package.nls.json b/extensions/mssql/package.nls.json
index dd30e3362e..b7e77c4cb1 100644
--- a/extensions/mssql/package.nls.json
+++ b/extensions/mssql/package.nls.json
@@ -5,10 +5,24 @@
"json.schemas.fileMatch.item.desc": "A file pattern that can contain '*' to match against when resolving JSON files to schemas.",
"json.schemas.schema.desc": "The schema definition for the given URL. The schema only needs to be provided to avoid accesses to the schema URL.",
"json.format.enable.desc": "Enable/disable default JSON formatter (requires restart)",
+
"mssqlCluster.uploadFiles": "Upload files",
"mssqlCluster.mkdir": "New directory",
"mssqlCluster.deleteFiles": "Delete",
"mssqlCluster.previewFile": "Preview",
"mssqlCluster.saveFile": "Save",
- "mssqlCluster.copyPath": "Copy Path"
+ "mssqlCluster.copyPath": "Copy Path",
+
+ "notebook.command.new": "New Notebook",
+ "notebook.command.open": "Open Notebook",
+
+ "tab.bigDataClusterDescription": "Tasks and information about your SQL Server Big Data Cluster",
+ "title.bigDataCluster": "SQL Server Big Data Cluster",
+ "title.submitSparkJob": "Submit Spark Job",
+ "title.newSparkJob": "New Spark Job",
+ "title.openSparkHistory": "View Spark History",
+ "title.openYarnHistory": "View Yarn History",
+ "title.tasks": "Tasks",
+ "title.installPackages": "Install Packages",
+ "title.configurePython": "Configure Python for Notebooks"
}
\ No newline at end of file
diff --git a/extensions/mssql/resources/dark/cluster_inverse.svg b/extensions/mssql/resources/dark/cluster_inverse.svg
new file mode 100644
index 0000000000..b35c0c5d36
--- /dev/null
+++ b/extensions/mssql/resources/dark/cluster_inverse.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/extensions/mssql/resources/dark/new_notebook_inverse.svg b/extensions/mssql/resources/dark/new_notebook_inverse.svg
new file mode 100644
index 0000000000..e0072afee1
--- /dev/null
+++ b/extensions/mssql/resources/dark/new_notebook_inverse.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/extensions/mssql/resources/dark/new_spark_job_inverse.svg b/extensions/mssql/resources/dark/new_spark_job_inverse.svg
new file mode 100644
index 0000000000..e5ed4b3190
--- /dev/null
+++ b/extensions/mssql/resources/dark/new_spark_job_inverse.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/extensions/mssql/resources/dark/open_notebook_inverse.svg b/extensions/mssql/resources/dark/open_notebook_inverse.svg
new file mode 100644
index 0000000000..a95750c49f
--- /dev/null
+++ b/extensions/mssql/resources/dark/open_notebook_inverse.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/extensions/mssql/resources/light/cluster.svg b/extensions/mssql/resources/light/cluster.svg
new file mode 100644
index 0000000000..e0e8e68f41
--- /dev/null
+++ b/extensions/mssql/resources/light/cluster.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/extensions/mssql/resources/light/hadoop.svg b/extensions/mssql/resources/light/hadoop.svg
new file mode 100644
index 0000000000..0757489a17
--- /dev/null
+++ b/extensions/mssql/resources/light/hadoop.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/extensions/mssql/resources/light/new_notebook.svg b/extensions/mssql/resources/light/new_notebook.svg
new file mode 100644
index 0000000000..9618487568
--- /dev/null
+++ b/extensions/mssql/resources/light/new_notebook.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/extensions/mssql/resources/light/new_spark_job.svg b/extensions/mssql/resources/light/new_spark_job.svg
new file mode 100644
index 0000000000..3775bf4da3
--- /dev/null
+++ b/extensions/mssql/resources/light/new_spark_job.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/extensions/mssql/resources/light/open_notebook.svg b/extensions/mssql/resources/light/open_notebook.svg
new file mode 100644
index 0000000000..0041ae9b21
--- /dev/null
+++ b/extensions/mssql/resources/light/open_notebook.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/extensions/mssql/src/apiWrapper.ts b/extensions/mssql/src/apiWrapper.ts
index 1e91915904..5847305073 100644
--- a/extensions/mssql/src/apiWrapper.ts
+++ b/extensions/mssql/src/apiWrapper.ts
@@ -33,12 +33,31 @@ export class ApiWrapper {
return sqlops.dataprotocol.registerFileBrowserProvider(provider);
}
+ public createDialog(title: string): sqlops.window.modelviewdialog.Dialog {
+ return sqlops.window.modelviewdialog.createDialog(title);
+ }
+
+ public openDialog(dialog: sqlops.window.modelviewdialog.Dialog): void {
+ return sqlops.window.modelviewdialog.openDialog(dialog);
+ }
+
+ public closeDialog(dialog: sqlops.window.modelviewdialog.Dialog): void {
+ return sqlops.window.modelviewdialog.closeDialog(dialog);
+ }
+
public registerTaskHandler(taskId: string, handler: (profile: sqlops.IConnectionProfile) => void): void {
sqlops.tasks.registerTask(taskId, handler);
}
- // VSCode APIs
+ public startBackgroundOperation(operationInfo: sqlops.BackgroundOperationInfo): void {
+ sqlops.tasks.startBackgroundOperation(operationInfo);
+ }
+ public getActiveConnections(): Thenable {
+ return sqlops.connection.getActiveConnections();
+ }
+
+ // VSCode APIs
public executeCommand(command: string, ...rest: any[]): Thenable {
return vscode.commands.executeCommand(command, ...rest);
}
@@ -47,6 +66,18 @@ export class ApiWrapper {
return vscode.commands.registerCommand(command, callback, thisArg);
}
+ public showErrorMessage(message: string, ...items: string[]): Thenable {
+ return vscode.window.showErrorMessage(message, ...items);
+ }
+
+ public showWarningMessage(message: string, ...items: string[]): Thenable {
+ return vscode.window.showWarningMessage(message, ...items);
+ }
+
+ public showInformationMessage(message: string, ...items: string[]): Thenable {
+ return vscode.window.showInformationMessage(message, ...items);
+ }
+
public showOpenDialog(options: vscode.OpenDialogOptions): Thenable {
return vscode.window.showOpenDialog(options);
}
@@ -70,24 +101,19 @@ export class ApiWrapper {
return vscode.window.showTextDocument(document, options);
}
- public showErrorMessage(message: string, ...items: string[]): Thenable {
- return vscode.window.showErrorMessage(message, ...items);
- }
-
- public showWarningMessage(message: string, ...items: string[]): Thenable {
- return vscode.window.showWarningMessage(message, ...items);
- }
-
- public showInformationMessage(message: string, ...items: string[]): Thenable {
- return vscode.window.showInformationMessage(message, ...items);
+ public get workspaceFolders(): vscode.WorkspaceFolder[] {
+ return vscode.workspace.workspaceFolders;
}
public createStatusBarItem(alignment?: vscode.StatusBarAlignment, priority?: number): vscode.StatusBarItem {
return vscode.window.createStatusBarItem(alignment, priority);
}
- public get workspaceFolders(): vscode.WorkspaceFolder[] {
- return vscode.workspace.workspaceFolders;
+ public createOutputChannel(name: string): vscode.OutputChannel {
+ return vscode.window.createOutputChannel(name);
}
+ public createTab(title: string): sqlops.window.modelviewdialog.DialogTab {
+ return sqlops.window.modelviewdialog.createTab(title);
+ }
}
diff --git a/extensions/mssql/src/appContext.ts b/extensions/mssql/src/appContext.ts
index eaedd182ed..4abec27439 100644
--- a/extensions/mssql/src/appContext.ts
+++ b/extensions/mssql/src/appContext.ts
@@ -13,16 +13,16 @@ import { ApiWrapper } from './apiWrapper';
*/
export class AppContext {
- private serviceMap: Map = new Map();
- constructor(public readonly extensionContext: vscode.ExtensionContext, public readonly apiWrapper: ApiWrapper) {
- this.apiWrapper = apiWrapper || new ApiWrapper();
- }
+ private serviceMap: Map = new Map();
+ constructor(public readonly extensionContext: vscode.ExtensionContext, public readonly apiWrapper: ApiWrapper) {
+ this.apiWrapper = apiWrapper || new ApiWrapper();
+ }
- public getService(serviceName: string): T {
- return this.serviceMap.get(serviceName) as T;
- }
+ public getService(serviceName: string): T {
+ return this.serviceMap.get(serviceName) as T;
+ }
- public registerService(serviceName: string, service: T): void {
- this.serviceMap.set(serviceName, service);
- }
+ public registerService(serviceName: string, service: T): void {
+ this.serviceMap.set(serviceName, service);
+ }
}
diff --git a/extensions/mssql/src/constants.ts b/extensions/mssql/src/constants.ts
index 7bbd5deb0c..f21deef992 100644
--- a/extensions/mssql/src/constants.ts
+++ b/extensions/mssql/src/constants.ts
@@ -57,4 +57,18 @@ export enum MssqlClusterItems {
export enum MssqlClusterItemsSubType {
Spark = 'mssqlCluster:spark'
-}
\ No newline at end of file
+}
+
+// SPARK JOB SUBMISSION //////////////////////////////////////////////////////////
+export const mssqlClusterNewNotebookTask = 'mssqlCluster.task.newNotebook';
+export const mssqlClusterOpenNotebookTask = 'mssqlCluster.task.openNotebook';
+export const mssqlClusterLivySubmitSparkJobCommand = 'mssqlCluster.livy.cmd.submitSparkJob';
+export const mssqlClusterLivySubmitSparkJobFromFileCommand = 'mssqlCluster.livy.cmd.submitFileToSparkJob';
+export const mssqlClusterLivySubmitSparkJobTask = 'mssqlCluster.livy.task.submitSparkJob';
+export const mssqlClusterLivyOpenSparkHistory = 'mssqlCluster.livy.task.openSparkHistory';
+export const mssqlClusterLivyOpenYarnHistory = 'mssqlCluster.livy.task.openYarnHistory';
+export const mssqlClusterLivySubmitPath = '/gateway/default/livy/v1/batches';
+export const mssqlClusterLivyTimeInMSForCheckYarnApp = 1000;
+export const mssqlClusterLivyRetryTimesForCheckYarnApp = 20;
+export const mssqlClusterSparkJobFileSelectorButtonWidth = '30px';
+export const mssqlClusterSparkJobFileSelectorButtonHeight = '30px';
diff --git a/extensions/mssql/src/contextProvider.ts b/extensions/mssql/src/contextProvider.ts
index 3faffbe832..8a21f2c376 100644
--- a/extensions/mssql/src/contextProvider.ts
+++ b/extensions/mssql/src/contextProvider.ts
@@ -7,6 +7,7 @@ import * as vscode from 'vscode';
import * as sqlops from 'sqlops';
import * as types from './types';
+import * as Constants from './constants';
export enum BuiltInCommands {
SetContext = 'setContext',
@@ -14,7 +15,8 @@ export enum BuiltInCommands {
export enum ContextKeys {
ISCLOUD = 'mssql:iscloud',
- EDITIONID = 'mssql:engineedition'
+ EDITIONID = 'mssql:engineedition',
+ ISCLUSTER = 'mssql:iscluster'
}
const isCloudEditions = [
@@ -37,6 +39,7 @@ export default class ContextProvider {
public onDashboardOpen(e: sqlops.DashboardDocument): void {
let iscloud: boolean;
let edition: number;
+ let isCluster: boolean = false;
if (e.profile.providerName.toLowerCase() === 'mssql' && !types.isUndefinedOrNull(e.serverInfo) && !types.isUndefinedOrNull(e.serverInfo.engineEditionId)) {
if (isCloudEditions.some(i => i === e.serverInfo.engineEditionId)) {
iscloud = true;
@@ -45,6 +48,13 @@ export default class ContextProvider {
}
edition = e.serverInfo.engineEditionId;
+
+ if (!types.isUndefinedOrNull(e.serverInfo.options)) {
+ let isBigDataCluster = e.serverInfo.options[Constants.isBigDataClusterProperty];
+ if (isBigDataCluster) {
+ isCluster = isBigDataCluster;
+ }
+ }
}
if (iscloud === true || iscloud === false) {
@@ -54,6 +64,10 @@ export default class ContextProvider {
if (!types.isUndefinedOrNull(edition)) {
setCommandContext(ContextKeys.EDITIONID, edition);
}
+
+ if (!types.isUndefinedOrNull(isCluster)) {
+ setCommandContext(ContextKeys.ISCLUSTER, isCluster);
+ }
}
dispose(): void {
diff --git a/extensions/mssql/src/localizedConstants.ts b/extensions/mssql/src/localizedConstants.ts
index 48ac14d1d3..01d7fcb1a0 100644
--- a/extensions/mssql/src/localizedConstants.ts
+++ b/extensions/mssql/src/localizedConstants.ts
@@ -10,4 +10,18 @@ const localize = nls.loadMessageBundle();
// HDFS Constants //////////////////////////////////////////////////////////
export const msgMissingNodeContext = localize('msgMissingNodeContext', 'Node Command called without any node passed');
-export const msgTimeout = localize('connectionTimeout', 'connection timed out. Host name or port may be incorrect');
+
+// Spark Job Submission Constants //////////////////////////////////////////
+export const sparkLocalFileDestinationHint = localize('sparkJobSubmission_LocalFileDestinationHint', 'Local file will be uploaded to HDFS. ');
+export const sparkJobSubmissionEndMessage = localize('sparkJobSubmission_SubmissionEndMessage', '.......................... Submit Spark Job End ............................');
+export function sparkJobSubmissionPrepareUploadingFile(localPath: string, clusterFolder: string): string { return localize('sparkJobSubmission_PrepareUploadingFile', 'Uploading file from local {0} to HDFS folder: {1}', localPath, clusterFolder); }
+export const sparkJobSubmissionUploadingFileSucceeded = localize('sparkJobSubmission_UploadingFileSucceeded', 'Upload file to cluster Succeeded!');
+export function sparkJobSubmissionUploadingFileFailed(err: string): string { return localize('sparkJobSubmission_UploadingFileFailed', 'Upload file to cluster Failed. {0}', err); }
+export function sparkJobSubmissionPrepareSubmitJob(jobName: string): string { return localize('sparkJobSubmission_PrepareSubmitJob', 'Submitting job {0} ... ', jobName); }
+export const sparkJobSubmissionSparkJobHasBeenSubmitted = localize('sparkJobSubmission_SubmitJobFinished', 'The Spark Job has been submitted.');
+export function sparkJobSubmissionSubmitJobFailed(err: string): string { return localize('sparkJobSubmission_SubmitJobFailed', 'Spark Job Submission Failed. {0} ', err); }
+export function sparkJobSubmissionYarnUIMessage(yarnUIURL: string): string { return localize('sparkJobSubmission_YarnUIMessage', 'YarnUI Url: {0} ', yarnUIURL); }
+export function sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryLink: string): string { return localize('sparkJobSubmission_SparkHistoryLinkMessage', 'Spark History Url: {0} ', sparkHistoryLink); }
+export function sparkJobSubmissionGetApplicationIdFailed(err: string): string { return localize('sparkJobSubmission_GetApplicationIdFailed', 'Get Application Id Failed. {0}', err); }
+export function sparkJobSubmissionLocalFileNotExisted(path: string): string { return localize('sparkJobSubmission_LocalFileNotExisted', 'Local file {0} does not existed. ', path); }
+export const sparkJobSubmissionNoSqlBigDataClusterFound = localize('sparkJobSubmission_NoSqlBigDataClusterFound','No Sql Server Big Data Cluster found.');
diff --git a/extensions/mssql/src/main.ts b/extensions/mssql/src/main.ts
index 42674f6409..2986b6b2a8 100644
--- a/extensions/mssql/src/main.ts
+++ b/extensions/mssql/src/main.ts
@@ -7,6 +7,10 @@
import * as vscode from 'vscode';
import * as sqlops from 'sqlops';
import * as path from 'path';
+import * as os from 'os';
+import * as nls from 'vscode-nls';
+const localize = nls.loadMessageBundle();
+
import { SqlOpsDataClient, ClientOptions } from 'dataprotocol-client';
import { IConfig, ServerProvider, Events } from 'service-downloader';
import { ServerOptions, TransportKind } from 'vscode-languageclient';
@@ -20,15 +24,21 @@ import { Telemetry, LanguageClientErrorHandler } from './telemetry';
import { TelemetryFeature, AgentServicesFeature, DacFxServicesFeature } from './features';
import { AppContext } from './appContext';
import { ApiWrapper } from './apiWrapper';
-import { MssqlObjectExplorerNodeProvider } from './objectExplorerNodeProvider/objectExplorerNodeProvider';
import { UploadFilesCommand, MkDirCommand, SaveFileCommand, PreviewFileCommand, CopyPathCommand, DeleteFilesCommand } from './objectExplorerNodeProvider/hdfsCommands';
import { IPrompter } from './prompts/question';
import CodeAdapter from './prompts/adapter';
import { MssqlExtensionApi, MssqlObjectExplorerBrowser } from './api/mssqlapis';
+import { OpenSparkJobSubmissionDialogCommand, OpenSparkJobSubmissionDialogFromFileCommand, OpenSparkJobSubmissionDialogTask } from './sparkFeature/dialog/dialogCommands';
+import { OpenSparkYarnHistoryTask } from './sparkFeature/historyTask';
+import { MssqlObjectExplorerNodeProvider, mssqlOutputChannel } from './objectExplorerNodeProvider/objectExplorerNodeProvider';
const baseConfig = require('./config.json');
const outputChannel = vscode.window.createOutputChannel(Constants.serviceName);
const statusView = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
+const jupyterNotebookProviderId = 'jupyter';
+const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', 'This sample code loads the file into a data frame and shows the first 10 results.');
+
+let untitledCounter = 0;
export async function activate(context: vscode.ExtensionContext): Promise {
// lets make sure we support this platform first
@@ -96,8 +106,11 @@ export async function activate(context: vscode.ExtensionContext): Promise {
Telemetry.sendTelemetryEvent('ServiceInitializingFailed');
vscode.window.showErrorMessage('Failed to start Sql tools service');
@@ -120,7 +133,7 @@ export async function activate(context: vscode.ExtensionContext): Promise {
let oeProvider = appContext.getService(Constants.ObjectExplorerService);
- return oeProvider.findNodeForContext(context);
+ return oeProvider.findSqlClusterNodeByContext(context);
}
};
}
@@ -128,6 +141,93 @@ export async function activate(context: vscode.ExtensionContext): Promise {
+ new OpenSparkJobSubmissionDialogTask(appContext, outputChannel).execute(profile);
+ });
+ apiWrapper.registerTaskHandler(Constants.mssqlClusterLivyOpenSparkHistory, (profile: sqlops.IConnectionProfile) => {
+ new OpenSparkYarnHistoryTask(appContext).execute(profile, true);
+ });
+ apiWrapper.registerTaskHandler(Constants.mssqlClusterLivyOpenYarnHistory, (profile: sqlops.IConnectionProfile) => {
+ new OpenSparkYarnHistoryTask(appContext).execute(profile, false);
+ });
+}
+
+function activateNotebookTask(appContext: AppContext): void {
+ let apiWrapper = appContext.apiWrapper;
+ apiWrapper.registerTaskHandler(Constants.mssqlClusterNewNotebookTask, (profile: sqlops.IConnectionProfile) => {
+ return saveProfileAndCreateNotebook(profile);
+ });
+ apiWrapper.registerTaskHandler(Constants.mssqlClusterOpenNotebookTask, (profile: sqlops.IConnectionProfile) => {
+ return handleOpenNotebookTask(profile);
+ });
+}
+
+function saveProfileAndCreateNotebook(profile: sqlops.IConnectionProfile): Promise {
+ return handleNewNotebookTask(undefined, profile);
+}
+
+async function handleNewNotebookTask(oeContext?: sqlops.ObjectExplorerContext, profile?: sqlops.IConnectionProfile): Promise {
+ // Ensure we get a unique ID for the notebook. For now we're using a different prefix to the built-in untitled files
+ // to handle this. We should look into improving this in the future
+ let untitledUri = vscode.Uri.parse(`untitled:Notebook-${untitledCounter++}`);
+ let editor = await sqlops.nb.showNotebookDocument(untitledUri, {
+ connectionId: profile.id,
+ providerId: jupyterNotebookProviderId,
+ preview: false,
+ defaultKernel: {
+ name: 'pyspark3kernel',
+ display_name: 'PySpark3',
+ language: 'python'
+ }
+ });
+ if (oeContext && oeContext.nodeInfo && oeContext.nodeInfo.nodePath) {
+ // Get the file path after '/HDFS'
+ let hdfsPath: string = oeContext.nodeInfo.nodePath.substring(oeContext.nodeInfo.nodePath.indexOf('/HDFS') + '/HDFS'.length);
+ if (hdfsPath.length > 0) {
+ let analyzeCommand = "#" + msgSampleCodeDataFrame + os.EOL + "df = (spark.read.option(\"inferSchema\", \"true\")"
+ + os.EOL + ".option(\"header\", \"true\")" + os.EOL + ".csv('{0}'))" + os.EOL + "df.show(10)";
+ editor.edit(editBuilder => {
+ editBuilder.replace(0, {
+ cell_type: 'code',
+ source: analyzeCommand.replace('{0}', hdfsPath)
+ });
+ });
+
+ }
+ }
+}
+
+async function handleOpenNotebookTask(profile: sqlops.IConnectionProfile): Promise {
+ let notebookFileTypeName = localize('notebookFileType', 'Notebooks');
+ let filter = {};
+ filter[notebookFileTypeName] = 'ipynb';
+ let uris = await vscode.window.showOpenDialog({
+ filters: filter,
+ canSelectFiles: true,
+ canSelectMany: false
+ });
+ if (uris && uris.length > 0) {
+ let fileUri = uris[0];
+ // Verify this is a .ipynb file since this isn't actually filtered on Mac/Linux
+ if (path.extname(fileUri.fsPath) !== '.ipynb') {
+ // in the future might want additional supported types
+ vscode.window.showErrorMessage(localize('unsupportedFileType', 'Only .ipynb Notebooks are supported'));
+ } else {
+ await sqlops.nb.showNotebookDocument(fileUri, {
+ connectionId: profile.id,
+ providerId: jupyterNotebookProviderId,
+ preview: false
+ });
+ }
+ }
+}
+
function generateServerOptions(executablePath: string): ServerOptions {
let launchArgs = Utils.getCommonLaunchArgsAndCleanupOldLogFiles('sqltools', executablePath);
return { command: executablePath, args: launchArgs, transport: TransportKind.stdio };
diff --git a/extensions/mssql/src/objectExplorerNodeProvider/connection.ts b/extensions/mssql/src/objectExplorerNodeProvider/connection.ts
index 4b2748b913..84ce0589bc 100644
--- a/extensions/mssql/src/objectExplorerNodeProvider/connection.ts
+++ b/extensions/mssql/src/objectExplorerNodeProvider/connection.ts
@@ -6,214 +6,57 @@
'use strict';
import * as sqlops from 'sqlops';
-import * as UUID from 'vscode-languageclient/lib/utils/uuid';
import * as nls from 'vscode-nls';
const localize = nls.loadMessageBundle();
import * as constants from '../constants';
-import * as LocalizedConstants from '../localizedConstants';
-import * as utils from '../utils';
import { IFileSource, IHdfsOptions, IRequestParams, FileSourceFactory } from './fileSources';
-import { IEndpoint } from './objectExplorerNodeProvider';
-function appendIfExists(uri: string, propName: string, propValue: string): string {
- if (propValue) {
- uri = `${uri};${propName}=${propValue}`;
- }
- return uri;
-}
-
-interface IValidationResult {
- isValid: boolean;
- errors: string;
-}
-
-export class Connection {
+export class SqlClusterConnection {
+ private _connection: sqlops.connection.Connection;
+ private _profile: sqlops.IConnectionProfile;
private _host: string;
- private _knoxPort: string;
+ private _port: string;
+ private _user: string;
+ private _password: string;
- constructor(private connectionInfo: sqlops.ConnectionInfo, private connectionUri?: string, private _connectionId?: string) {
- if (!this.connectionInfo) {
- throw new Error(localize('connectionInfoMissing', 'connectionInfo is required'));
- }
-
- if (!this._connectionId) {
- this._connectionId = UUID.generateUuid();
+ constructor(connectionInfo: sqlops.connection.Connection | sqlops.IConnectionProfile) {
+ this.validate(connectionInfo);
+
+ if ('id' in connectionInfo) {
+ this._profile = connectionInfo;
+ this._connection = this.toConnection(this._profile);
+ } else {
+ this._connection = connectionInfo;
+ this._profile = this.toConnectionProfile(this._connection);
}
+ this._host = this._connection.options[constants.hostPropName];
+ this._port = this._connection.options[constants.knoxPortPropName];
+ this._user = this._connection.options[constants.userPropName];
+ this._password = this._connection.options[constants.passwordPropName];
}
- public get uri(): string {
- return this.connectionUri;
+ public get connection(): sqlops.connection.Connection { return this._connection; }
+ public get profile(): sqlops.IConnectionProfile { return this._profile; }
+ public get host(): string { return this._host; }
+ public get port(): string { return this._port || constants.defaultKnoxPort; }
+ public get user(): string { return this._user; }
+ public get password(): string { return this._password; }
+
+ public isMatch(connection: SqlClusterConnection | sqlops.ConnectionInfo): boolean {
+ if (!connection) { return false; }
+ let options1 = connection instanceof SqlClusterConnection ?
+ connection._connection.options : connection.options;
+ let options2 = this._connection.options;
+ return [constants.hostPropName, constants.knoxPortPropName, constants.userPropName]
+ .every(e => options1[e] === options2[e]);
}
- public saveUriWithPrefix(prefix: string): string {
- let uri = `${prefix}${this.host}`;
- uri = appendIfExists(uri, constants.knoxPortPropName, this.knoxport);
- uri = appendIfExists(uri, constants.userPropName, this.user);
- uri = appendIfExists(uri, constants.groupIdPropName, this.connectionInfo.options[constants.groupIdPropName]);
- this.connectionUri = uri;
- return this.connectionUri;
- }
-
- public async tryConnect(factory?: FileSourceFactory): Promise {
- let fileSource = this.createHdfsFileSource(factory, {
- timeout: this.connecttimeout
- });
- let summary: sqlops.ConnectionInfoSummary = undefined;
- try {
- await fileSource.enumerateFiles(constants.hdfsRootPath);
- summary = {
- ownerUri: this.connectionUri,
- connectionId: this.connectionId,
- connectionSummary: {
- serverName: this.host,
- databaseName: undefined,
- userName: this.user
- },
- errorMessage: undefined,
- errorNumber: undefined,
- messages: undefined,
- serverInfo: this.getEmptyServerInfo()
- };
- } catch (error) {
- summary = {
- ownerUri: this.connectionUri,
- connectionId: undefined,
- connectionSummary: undefined,
- errorMessage: this.getConnectError(error),
- errorNumber: undefined,
- messages: undefined,
- serverInfo: undefined
- };
- }
- return summary;
- }
-
- private getConnectError(error: string | Error): string {
- let errorMsg = utils.getErrorMessage(error);
- if (errorMsg.indexOf('ETIMEDOUT') > -1) {
- errorMsg = LocalizedConstants.msgTimeout;
- } else if (errorMsg.indexOf('ENOTFOUND') > -1) {
- errorMsg = LocalizedConstants.msgTimeout;
- }
- return localize('connectError', 'Connection failed with error: {0}', errorMsg);
- }
-
- private getEmptyServerInfo(): sqlops.ServerInfo {
- let info: sqlops.ServerInfo = {
- serverMajorVersion: 0,
- serverMinorVersion: 0,
- serverReleaseVersion: 0,
- engineEditionId: 0,
- serverVersion: '',
- serverLevel: '',
- serverEdition: '',
- isCloud: false,
- azureVersion: 0,
- osVersion: '',
- options: {}
- };
- return info;
- }
-
- public get connectionId(): string {
- return this._connectionId;
- }
-
- public get host(): string {
- if (!this._host) {
- this.ensureHostAndPort();
- }
- return this._host;
- }
-
- /**
- * Sets host and port values, using any ',' or ':' delimited port in the hostname in
- * preference to the built in port.
- */
- private ensureHostAndPort(): void {
- this._host = this.connectionInfo.options[constants.hostPropName];
- this._knoxPort = Connection.getKnoxPortOrDefault(this.connectionInfo);
- // determine whether the host has either a ',' or ':' in it
- this.setHostAndPort(',');
- this.setHostAndPort(':');
- }
-
- // set port and host correctly after we've identified that a delimiter exists in the host name
- private setHostAndPort(delimeter: string): void {
- let originalHost = this._host;
- let index = originalHost.indexOf(delimeter);
- if (index > -1) {
- this._host = originalHost.slice(0, index);
- this._knoxPort = originalHost.slice(index + 1);
- }
- }
-
- public get user(): string {
- return this.connectionInfo.options[constants.userPropName];
- }
-
- public get password(): string {
- return this.connectionInfo.options[constants.passwordPropName];
- }
-
- public get knoxport(): string {
- if (!this._knoxPort) {
- this.ensureHostAndPort();
- }
- return this._knoxPort;
- }
-
- private static getKnoxPortOrDefault(connInfo: sqlops.ConnectionInfo): string {
- let port = connInfo.options[constants.knoxPortPropName];
- if (!port) {
- port = constants.defaultKnoxPort;
- }
- return port;
- }
-
- public get connecttimeout(): number {
- let timeoutSeconds: number = this.connectionInfo.options['connecttimeout'];
- if (!timeoutSeconds) {
- timeoutSeconds = constants.hadoopConnectionTimeoutSeconds;
- }
- // connect timeout is in milliseconds
- return timeoutSeconds * 1000;
- }
-
- public get sslverification(): string {
- return this.connectionInfo.options['sslverification'];
- }
-
- public get groupId(): string {
- return this.connectionInfo.options[constants.groupIdName];
- }
-
- public async isMatch(connectionInfo: sqlops.ConnectionInfo): Promise {
- if (!connectionInfo) {
- return false;
- }
- let profile = connectionInfo as sqlops.IConnectionProfile;
- if (profile) {
- let result: IEndpoint = await utils.getClusterEndpoint(profile.id, constants.hadoopKnoxEndpointName);
- if (result === undefined || !result.ipAddress || !result.port) {
- return false;
- }
- return connectionInfo.options.groupId === this.groupId
- && result.ipAddress === this.host
- && String(result.port).startsWith(this.knoxport)
- && String(result.port).endsWith(this.knoxport);
- // TODO: enable the user check when the unified user is used
- //&& connectionInfo.options.user === this.user;
- }
- }
-
- public createHdfsFileSource(factory?: FileSourceFactory, additionalRequestParams?: IRequestParams): IFileSource {
- factory = factory || FileSourceFactory.instance;
+ public createHdfsFileSource(): IFileSource {
let options: IHdfsOptions = {
protocol: 'https',
host: this.host,
- port: this.knoxport,
+ port: this.port,
user: this.user,
path: 'gateway/default/webhdfs/v1',
requestParams: {
@@ -223,9 +66,49 @@ export class Connection {
}
}
};
- if (additionalRequestParams) {
- options.requestParams = Object.assign(options.requestParams, additionalRequestParams);
+ return FileSourceFactory.instance.createHdfsFileSource(options);
+ }
+
+ private validate(connectionInfo: sqlops.ConnectionInfo): void {
+ if (!connectionInfo) {
+ throw new Error(localize('connectionInfoUndefined', 'ConnectionInfo is undefined.'));
}
- return factory.createHdfsFileSource(options);
+ if (!connectionInfo.options) {
+ throw new Error(localize('connectionInfoOptionsUndefined', 'ConnectionInfo.options is undefined.'));
+ }
+ let missingProperties: string[] = this.getMissingProperties(connectionInfo);
+ if (missingProperties && missingProperties.length > 0) {
+ throw new Error(localize('connectionInfoOptionsMissingProperties',
+ 'Some missing properties in connectionInfo.options: {0}',
+ missingProperties.join(', ')));
+ }
+ }
+
+ private getMissingProperties(connectionInfo: sqlops.ConnectionInfo): string[] {
+ if (!connectionInfo || !connectionInfo.options) { return undefined; }
+ return [
+ constants.hostPropName, constants.knoxPortPropName,
+ constants.userPropName, constants.passwordPropName
+ ].filter(e => connectionInfo.options[e] === undefined);
+ }
+
+ private toConnection(connProfile: sqlops.IConnectionProfile): sqlops.connection.Connection {
+ let connection: sqlops.connection.Connection = Object.assign(connProfile,
+ { connectionId: this._profile.id });
+ return connection;
+ }
+
+ private toConnectionProfile(connectionInfo: sqlops.connection.Connection): sqlops.IConnectionProfile {
+ let options = connectionInfo.options;
+ let connProfile: sqlops.IConnectionProfile = Object.assign({},
+ connectionInfo,
+ {
+ serverName: `${options[constants.hostPropName]},${options[constants.knoxPortPropName]}`,
+ userName: options[constants.userPropName],
+ password: options[constants.passwordPropName],
+ id: connectionInfo.connectionId,
+ }
+ );
+ return connProfile;
}
}
diff --git a/extensions/mssql/src/objectExplorerNodeProvider/hdfsCommands.ts b/extensions/mssql/src/objectExplorerNodeProvider/hdfsCommands.ts
index 07437ff10d..9c32ff9e61 100644
--- a/extensions/mssql/src/objectExplorerNodeProvider/hdfsCommands.ts
+++ b/extensions/mssql/src/objectExplorerNodeProvider/hdfsCommands.ts
@@ -21,7 +21,7 @@ import { IPrompter, IQuestion, QuestionTypes } from '../prompts/question';
import * as constants from '../constants';
import * as LocalizedConstants from '../localizedConstants';
import * as utils from '../utils';
-import { Connection } from './connection';
+import { SqlClusterConnection } from './connection';
import { AppContext } from '../appContext';
import { TreeNode } from './treeNodes';
import { MssqlObjectExplorerNodeProvider } from './objectExplorerNodeProvider';
@@ -45,14 +45,14 @@ function getSaveableUri(apiWrapper: ApiWrapper, fileName: string, isPreview?: bo
return vscode.Uri.file(fspath.join(root, fileName));
}
-export async function getNode(context: ICommandViewContext |ICommandObjectExplorerContext, appContext: AppContext): Promise {
+export async function getNode(context: ICommandViewContext | ICommandObjectExplorerContext, appContext: AppContext): Promise {
let node: T = undefined;
if (context && context.type === constants.ViewType && context.node) {
node = context.node as T;
} else if (context && context.type === constants.ObjectExplorerService) {
- let oeProvider = appContext.getService(constants.ObjectExplorerService);
- if (oeProvider) {
- node = await oeProvider.findNodeForContext(context.explorerContext);
+ let oeNodeProvider = appContext.getService(constants.ObjectExplorerService);
+ if (oeNodeProvider) {
+ node = await oeNodeProvider.findSqlClusterNodeByContext(context);
}
} else {
throw new Error(LocalizedConstants.msgMissingNodeContext);
@@ -73,7 +73,7 @@ export class UploadFilesCommand extends ProgressCommand {
async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise {
try {
let folderNode = await getNode(context, this.appContext);
- const allFilesFilter = localize('allFiles', 'All Files');
+ const allFilesFilter = localize('allFiles', 'All Files');
let filter = {};
filter[allFilesFilter] = '*';
if (folderNode) {
@@ -180,11 +180,11 @@ export class DeleteFilesCommand extends Command {
super('mssqlCluster.deleteFiles', appContext);
}
- protected async preExecute(context: ICommandViewContext |ICommandObjectExplorerContext, args: object = {}): Promise {
+ protected async preExecute(context: ICommandViewContext | ICommandObjectExplorerContext, args: object = {}): Promise {
return this.execute(context, args);
}
- async execute(context: ICommandViewContext |ICommandObjectExplorerContext, ...args: any[]): Promise {
+ async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise {
try {
let node = await getNode(context, this.appContext);
if (node) {
@@ -282,6 +282,7 @@ export class SaveFileCommand extends ProgressCommand {
await this.apiWrapper.executeCommand('vscode.open', fileUri);
}
}
+
export class PreviewFileCommand extends ProgressCommand {
public static readonly DefaultMaxSize = 30 * 1024 * 1024;
@@ -334,6 +335,7 @@ export class PreviewFileCommand extends ProgressCommand {
}
}
}
+
export class CopyPathCommand extends Command {
public static readonly DefaultMaxSize = 30 * 1024 * 1024;
@@ -359,79 +361,3 @@ export class CopyPathCommand extends Command {
}
}
}
-
-/**
- * The connect task is only expected to work in the file-tree based APIs, not Object Explorer
- */
-export class ConnectTask {
- constructor(private hdfsProvider: HdfsProvider, private prompter: IPrompter, private apiWrapper: ApiWrapper) {
-
- }
-
- async execute(profile: sqlops.IConnectionProfile, ...args: any[]): Promise {
- if (profile) {
- return this.createFromProfile(profile);
- }
- return this.createHdfsConnection();
- }
-
- private createFromProfile(profile: sqlops.IConnectionProfile): Promise {
- let connection = new Connection(profile);
- if (profile.providerName === constants.mssqlClusterProviderName && connection.host) {
- // TODO need to get the actual port and auth to be used since this will be non-default
- // in future versions
- this.hdfsProvider.addHdfsConnection( {
- protocol: 'https',
- host: connection.host,
- port: connection.knoxport,
- user: connection.user,
- path: 'gateway/default/webhdfs/v1',
- requestParams: {
- auth: {
- user: connection.user,
- pass: connection.password
- }
- }
- });
- }
- return Promise.resolve(undefined);
- }
-
- private addConnection(options: IHdfsOptions): void {
- let display: string = `${options.user}@${options.host}:${options.port}`;
- this.hdfsProvider.addConnection(display, FileSourceFactory.instance.createHdfsFileSource(options));
- }
-
- private async createHdfsConnection(profile?: sqlops.IConnectionProfile): Promise {
- let questions: IQuestion[] = [
- {
- type: QuestionTypes.input,
- name: constants.hdfsHost,
- message: localize('msgSetWebHdfsHost', 'HDFS URL and port'),
- default: 'localhost:50070'
- },
- {
- type: QuestionTypes.input,
- name: constants.hdfsUser,
- message: localize('msgSetWebHdfsUser', 'User Name'),
- default: 'root'
- }];
-
- let answers = await this.prompter.prompt(questions);
- if (answers) {
- let hostAndPort: string = answers[constants.hdfsHost];
- let parts = hostAndPort.split(':');
- let host: string = parts[0];
- let port: string = parts.length > 1 ? parts[1] : undefined;
- let user: string = answers[constants.hdfsUser];
-
-
- let options: IHdfsOptions = {
- host: host,
- port: port,
- user: user
- };
- this.addConnection(options);
- }
- }
-}
diff --git a/extensions/mssql/src/objectExplorerNodeProvider/hdfsProvider.ts b/extensions/mssql/src/objectExplorerNodeProvider/hdfsProvider.ts
index 778e8de7fc..5aceb69673 100644
--- a/extensions/mssql/src/objectExplorerNodeProvider/hdfsProvider.ts
+++ b/extensions/mssql/src/objectExplorerNodeProvider/hdfsProvider.ts
@@ -130,7 +130,7 @@ export class FolderNode extends HdfsFileSourceNode {
// Note: for now, assuming HDFS-provided sorting is sufficient
this.children = files.map((file) => {
let node: TreeNode = file.isDirectory ? new FolderNode(this.context, file.path, this.fileSource)
- : new FileNode(this.context, file.path, this.fileSource);
+ : new FileNode(this.context, file.path, this.fileSource);
node.parent = this;
return node;
});
diff --git a/extensions/mssql/src/objectExplorerNodeProvider/objectExplorerNodeProvider.ts b/extensions/mssql/src/objectExplorerNodeProvider/objectExplorerNodeProvider.ts
index 3223ba7803..ac8f4ad73c 100644
--- a/extensions/mssql/src/objectExplorerNodeProvider/objectExplorerNodeProvider.ts
+++ b/extensions/mssql/src/objectExplorerNodeProvider/objectExplorerNodeProvider.ts
@@ -10,32 +10,27 @@ import * as vscode from 'vscode';
import * as nls from 'vscode-nls';
const localize = nls.loadMessageBundle();
-import * as UUID from 'vscode-languageclient/lib/utils/uuid';
import { ProviderBase } from './providerBase';
-import { Connection } from './connection';
+import { SqlClusterConnection } from './connection';
import * as utils from '../utils';
import { TreeNode } from './treeNodes';
import { ConnectionNode, TreeDataContext, ITreeChangeHandler } from './hdfsProvider';
import { IFileSource } from './fileSources';
import { AppContext } from '../appContext';
import * as constants from '../constants';
+import * as SqlClusterLookUp from '../sqlClusterLookUp';
+import { ICommandObjectExplorerContext } from './command';
-const outputChannel = vscode.window.createOutputChannel(constants.providerId);
-export interface IEndpoint {
- serviceName: string;
- ipAddress: string;
- port: number;
-}
+export const mssqlOutputChannel = vscode.window.createOutputChannel(constants.providerId);
export class MssqlObjectExplorerNodeProvider extends ProviderBase implements sqlops.ObjectExplorerNodeProvider, ITreeChangeHandler {
public readonly supportedProviderId: string = constants.providerId;
- private sessionMap: Map;
+ private sessionMap: Map;
private expandCompleteEmitter = new vscode.EventEmitter();
constructor(private appContext: AppContext) {
super();
-
- this.sessionMap = new Map();
+ this.sessionMap = new Map();
this.appContext.registerService(constants.ObjectExplorerService, this);
}
@@ -49,44 +44,19 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements sql
});
}
- private async doSessionOpen(sessionInfo: sqlops.ObjectExplorerSession): Promise {
- let connectionProfile = await sqlops.objectexplorer.getSessionConnectionProfile(sessionInfo.sessionId);
- if (!connectionProfile) {
- return false;
- } else {
- let credentials = await sqlops.connection.getCredentials(connectionProfile.id);
- let serverInfo = await sqlops.connection.getServerInfo(connectionProfile.id);
- if (!serverInfo || !credentials || !serverInfo.options) {
- return false;
- }
- let endpoints: IEndpoint[] = serverInfo.options[constants.clusterEndpointsProperty];
- if (!endpoints || endpoints.length === 0) {
- return false;
- }
- let index = endpoints.findIndex(ep => ep.serviceName === constants.hadoopKnoxEndpointName);
- if (index === -1) {
- return false;
- }
+ private async doSessionOpen(session: sqlops.ObjectExplorerSession): Promise {
+ if (!session || !session.sessionId) { return false; }
- let connInfo: sqlops.connection.Connection = {
- options: {
- 'host': endpoints[index].ipAddress,
- 'groupId': connectionProfile.options.groupId,
- 'knoxport': endpoints[index].port,
- 'user': 'root', //connectionProfile.options.userName cluster setup has to have the same user for master and big data cluster
- 'password': credentials.password,
- },
- providerName: constants.mssqlClusterProviderName,
- connectionId: UUID.generateUuid()
- };
+ let sqlConnProfile = await sqlops.objectexplorer.getSessionConnectionProfile(session.sessionId);
+ if (!sqlConnProfile) { return false; }
- let connection = new Connection(connInfo);
- connection.saveUriWithPrefix(constants.objectExplorerPrefix);
- let session = new Session(connection, sessionInfo.sessionId);
- session.root = new RootNode(session, new TreeDataContext(this.appContext.extensionContext, this), sessionInfo.rootNode.nodePath);
- this.sessionMap.set(sessionInfo.sessionId, session);
- return true;
- }
+ let clusterConnInfo = await SqlClusterLookUp.getSqlClusterConnection(sqlConnProfile);
+ if (!clusterConnInfo) { return false; }
+
+ let clusterConnection = new SqlClusterConnection(clusterConnInfo);
+ let clusterSession = new SqlClusterSession(clusterConnection, session, sqlConnProfile, this.appContext, this);
+ this.sessionMap.set(session.sessionId, clusterSession);
+ return true;
}
expandNode(nodeInfo: sqlops.ExpandNodeInfo, isRefresh: boolean = false): Thenable {
@@ -125,15 +95,15 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements sql
return true;
}
- private async startExpansion(session: Session, nodeInfo: sqlops.ExpandNodeInfo, isRefresh: boolean = false): Promise {
+ private async startExpansion(session: SqlClusterSession, nodeInfo: sqlops.ExpandNodeInfo, isRefresh: boolean = false): Promise {
let expandResult: sqlops.ObjectExplorerExpandInfo = {
- sessionId: session.uri,
+ sessionId: session.sessionId,
nodePath: nodeInfo.nodePath,
errorMessage: undefined,
nodes: []
};
try {
- let node = await session.root.findNodeByPath(nodeInfo.nodePath, true);
+ let node = await session.rootNode.findNodeByPath(nodeInfo.nodePath, true);
if (node) {
expandResult.errorMessage = node.getNodeInfo().errorMessage;
let children = await node.getChildren(true);
@@ -182,57 +152,55 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements sql
private async notifyNodeChangesAsync(node: TreeNode): Promise {
try {
- let session = this.getSessionForNode(node);
+ let session = this.getSqlClusterSessionForNode(node);
if (!session) {
this.appContext.apiWrapper.showErrorMessage(localize('sessionNotFound', 'Session for node {0} does not exist', node.nodePathValue));
} else {
let nodeInfo = node.getNodeInfo();
let expandInfo: sqlops.ExpandNodeInfo = {
nodePath: nodeInfo.nodePath,
- sessionId: session.uri
+ sessionId: session.sessionId
};
await this.refreshNode(expandInfo);
}
} catch (err) {
- outputChannel.appendLine(localize('notifyError', 'Error notifying of node change: {0}', err));
+ mssqlOutputChannel.appendLine(localize('notifyError', 'Error notifying of node change: {0}', err));
}
}
- private getSessionForNode(node: TreeNode): Session {
- let rootNode: DataServicesNode = undefined;
- while (rootNode === undefined && node !== undefined) {
+ private getSqlClusterSessionForNode(node: TreeNode): SqlClusterSession {
+ let sqlClusterSession: SqlClusterSession = undefined;
+ while (node !== undefined) {
if (node instanceof DataServicesNode) {
- rootNode = node;
+ sqlClusterSession = node.session;
break;
} else {
node = node.parent;
}
}
- if (rootNode) {
- return rootNode.session;
- }
- // Not found
- return undefined;
+ return sqlClusterSession;
}
- async findNodeForContext(explorerContext: sqlops.ObjectExplorerContext): Promise {
+ async findSqlClusterNodeByContext(context: ICommandObjectExplorerContext | sqlops.ObjectExplorerContext): Promise {
let node: T = undefined;
- let session = await this.findSessionForConnection(explorerContext.connectionProfile);
+ let explorerContext = 'explorerContext' in context ? context.explorerContext : context;
+ let sqlConnProfile = explorerContext.connectionProfile;
+ let session = this.findSqlClusterSessionBySqlConnProfile(sqlConnProfile);
if (session) {
if (explorerContext.isConnectionNode) {
// Note: ideally fix so we verify T matches RootNode and go from there
- node = session.root;
+ node = session.rootNode;
} else {
// Find the node under the session
- node = await session.root.findNodeByPath(explorerContext.nodeInfo.nodePath, true);
+ node = await session.rootNode.findNodeByPath(explorerContext.nodeInfo.nodePath, true);
}
}
return node;
}
- private async findSessionForConnection(connectionProfile: sqlops.IConnectionProfile): Promise {
+ public findSqlClusterSessionBySqlConnProfile(connectionProfile: sqlops.IConnectionProfile): SqlClusterSession {
for (let session of this.sessionMap.values()) {
- if (session.connection && await session.connection.isMatch(connectionProfile)) {
+ if (session.isMatchedSqlConnection(connectionProfile)) {
return session;
}
}
@@ -240,50 +208,58 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements sql
}
}
-export class Session {
- private _root: RootNode;
- constructor(private _connection: Connection, private sessionId?: string) {
+export class SqlClusterSession {
+ private _rootNode: SqlClusterRootNode;
+
+ constructor(
+ private _sqlClusterConnection: SqlClusterConnection,
+ private _sqlSession: sqlops.ObjectExplorerSession,
+ private _sqlConnectionProfile: sqlops.IConnectionProfile,
+ private _appContext: AppContext,
+ private _changeHandler: ITreeChangeHandler
+ ) {
+ this._rootNode = new SqlClusterRootNode(this,
+ new TreeDataContext(this._appContext.extensionContext, this._changeHandler),
+ this._sqlSession.rootNode.nodePath);
}
- public get uri(): string {
- return this.sessionId || this._connection.uri;
- }
+ public get sqlClusterConnection(): SqlClusterConnection { return this._sqlClusterConnection; }
+ public get sqlSession(): sqlops.ObjectExplorerSession { return this._sqlSession; }
+ public get sqlConnectionProfile(): sqlops.IConnectionProfile { return this._sqlConnectionProfile; }
+ public get sessionId(): string { return this._sqlSession.sessionId; }
+ public get rootNode(): SqlClusterRootNode { return this._rootNode; }
- public get connection(): Connection {
- return this._connection;
- }
-
- public set root(node: RootNode) {
- this._root = node;
- }
-
- public get root(): RootNode {
- return this._root;
+ public isMatchedSqlConnection(sqlConnProfile: sqlops.IConnectionProfile): boolean {
+ return this._sqlConnectionProfile.id === sqlConnProfile.id;
}
}
-class RootNode extends TreeNode {
- private children: TreeNode[];
- constructor(private _session: Session, private context: TreeDataContext, private nodePath: string) {
+class SqlClusterRootNode extends TreeNode {
+ private _children: TreeNode[];
+ constructor(
+ private _session: SqlClusterSession,
+ private _treeDataContext: TreeDataContext,
+ private _nodePathValue: string
+ ) {
super();
}
- public get session(): Session {
+ public get session(): SqlClusterSession {
return this._session;
}
public get nodePathValue(): string {
- return this.nodePath;
+ return this._nodePathValue;
}
public getChildren(refreshChildren: boolean): TreeNode[] | Promise {
- if (refreshChildren || !this.children) {
- this.children = [];
- let dataServicesNode = new DataServicesNode(this._session, this.context, this.nodePath);
+ if (refreshChildren || !this._children) {
+ this._children = [];
+ let dataServicesNode = new DataServicesNode(this._session, this._treeDataContext, this._nodePathValue);
dataServicesNode.parent = this;
- this.children.push(dataServicesNode);
+ this._children.push(dataServicesNode);
}
- return this.children;
+ return this._children;
}
getTreeItem(): vscode.TreeItem | Promise {
@@ -307,31 +283,28 @@ class RootNode extends TreeNode {
}
class DataServicesNode extends TreeNode {
- private children: TreeNode[];
- constructor(private _session: Session, private context: TreeDataContext, private nodePath: string) {
+ private _children: TreeNode[];
+ constructor(private _session: SqlClusterSession, private _context: TreeDataContext, private _nodePath: string) {
super();
}
- public get session(): Session {
+ public get session(): SqlClusterSession {
return this._session;
}
public get nodePathValue(): string {
- return this.nodePath;
+ return this._nodePath;
}
public getChildren(refreshChildren: boolean): TreeNode[] | Promise {
- if (refreshChildren || !this.children) {
- this.children = [];
- let hdfsNode = new ConnectionNode(this.context, localize('hdfsFolder', 'HDFS'), this.createHdfsFileSource());
+ if (refreshChildren || !this._children) {
+ this._children = [];
+ let fileSource: IFileSource = this.session.sqlClusterConnection.createHdfsFileSource();
+ let hdfsNode = new ConnectionNode(this._context, localize('hdfsFolder', 'HDFS'), fileSource);
hdfsNode.parent = this;
- this.children.push(hdfsNode);
+ this._children.push(hdfsNode);
}
- return this.children;
- }
-
- private createHdfsFileSource(): IFileSource {
- return this.session.connection.createHdfsFileSource();
+ return this._children;
}
getTreeItem(): vscode.TreeItem | Promise {
diff --git a/extensions/mssql/src/prompts/password.ts b/extensions/mssql/src/prompts/password.ts
index 8a19e8c643..ef6de55886 100644
--- a/extensions/mssql/src/prompts/password.ts
+++ b/extensions/mssql/src/prompts/password.ts
@@ -7,9 +7,9 @@ import InputPrompt from './input';
export default class PasswordPrompt extends InputPrompt {
- constructor(question: any, ignoreFocusOut?: boolean) {
- super(question, ignoreFocusOut);
+ constructor(question: any, ignoreFocusOut?: boolean) {
+ super(question, ignoreFocusOut);
- this._options.password = true;
- }
+ this._options.password = true;
+ }
}
diff --git a/extensions/mssql/src/prompts/question.ts b/extensions/mssql/src/prompts/question.ts
index 3b8f600589..e570d89568 100644
--- a/extensions/mssql/src/prompts/question.ts
+++ b/extensions/mssql/src/prompts/question.ts
@@ -4,65 +4,65 @@
import vscode = require('vscode');
export class QuestionTypes {
- public static get input(): string { return 'input'; }
- public static get password(): string { return 'password'; }
- public static get list(): string { return 'list'; }
- public static get confirm(): string { return 'confirm'; }
- public static get checkbox(): string { return 'checkbox'; }
- public static get expand(): string { return 'expand'; }
+ public static get input(): string { return 'input'; }
+ public static get password(): string { return 'password'; }
+ public static get list(): string { return 'list'; }
+ public static get confirm(): string { return 'confirm'; }
+ public static get checkbox(): string { return 'checkbox'; }
+ public static get expand(): string { return 'expand'; }
}
// Question interface to clarify how to use the prompt feature
// based on Bower Question format: https://github.com/bower/bower/blob/89069784bb46bfd6639b4a75e98a0d7399a8c2cb/packages/bower-logger/README.md
export interface IQuestion {
- // Type of question (see QuestionTypes)
- type: string;
- // Name of the question for disambiguation
- name: string;
- // Message to display to the user
- message: string;
- // Optional placeHolder to give more detailed information to the user
- placeHolder?: any;
- // Optional default value - this will be used instead of placeHolder
- default?: any;
- // optional set of choices to be used. Can be QuickPickItems or a simple name-value pair
- choices?: Array;
- // Optional validation function that returns an error string if validation fails
- validate?: (value: any) => string;
- // Optional pre-prompt function. Takes in set of answers so far, and returns true if prompt should occur
- shouldPrompt?: (answers: {[id: string]: any}) => boolean;
- // Optional action to take on the question being answered
- onAnswered?: (value: any) => void;
- // Optional set of options to support matching choices.
- matchOptions?: vscode.QuickPickOptions;
+ // Type of question (see QuestionTypes)
+ type: string;
+ // Name of the question for disambiguation
+ name: string;
+ // Message to display to the user
+ message: string;
+ // Optional placeHolder to give more detailed information to the user
+ placeHolder?: any;
+ // Optional default value - this will be used instead of placeHolder
+ default?: any;
+ // optional set of choices to be used. Can be QuickPickItems or a simple name-value pair
+ choices?: Array;
+ // Optional validation function that returns an error string if validation fails
+ validate?: (value: any) => string;
+ // Optional pre-prompt function. Takes in set of answers so far, and returns true if prompt should occur
+ shouldPrompt?: (answers: { [id: string]: any }) => boolean;
+ // Optional action to take on the question being answered
+ onAnswered?: (value: any) => void;
+ // Optional set of options to support matching choices.
+ matchOptions?: vscode.QuickPickOptions;
}
// Pair used to display simple choices to the user
export interface INameValueChoice {
- name: string;
- value: any;
+ name: string;
+ value: any;
}
// Generic object that can be used to define a set of questions and handle the result
export interface IQuestionHandler {
- // Set of questions to be answered
- questions: IQuestion[];
- // Optional callback, since questions may handle themselves
- callback?: IPromptCallback;
+ // Set of questions to be answered
+ questions: IQuestion[];
+ // Optional callback, since questions may handle themselves
+ callback?: IPromptCallback;
}
export interface IPrompter {
- promptSingle(question: IQuestion, ignoreFocusOut?: boolean): Promise;
- /**
- * Prompts for multiple questions
- *
- * @returns {[questionId: string]: T} Map of question IDs to results, or undefined if
- * the user canceled the question session
- */
- prompt(questions: IQuestion[], ignoreFocusOut?: boolean): Promise<{[questionId: string]: any}>;
- promptCallback(questions: IQuestion[], callback: IPromptCallback): void;
+ promptSingle(question: IQuestion, ignoreFocusOut?: boolean): Promise;
+ /**
+ * Prompts for multiple questions
+ *
+ * @returns {[questionId: string]: T} Map of question IDs to results, or undefined if
+ * the user canceled the question session
+ */
+ prompt(questions: IQuestion[], ignoreFocusOut?: boolean): Promise<{ [questionId: string]: any }>;
+ promptCallback(questions: IQuestion[], callback: IPromptCallback): void;
}
export interface IPromptCallback {
- (answers: {[id: string]: any}): void;
+ (answers: { [id: string]: any }): void;
}
diff --git a/extensions/mssql/src/resourceProvider/resourceProvider.ts b/extensions/mssql/src/resourceProvider/resourceProvider.ts
index 5cb23770a5..99d723589f 100644
--- a/extensions/mssql/src/resourceProvider/resourceProvider.ts
+++ b/extensions/mssql/src/resourceProvider/resourceProvider.ts
@@ -4,15 +4,12 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
-import * as path from 'path';
+import * as sqlops from 'sqlops';
import { IConfig, ServerProvider } from 'service-downloader';
import { SqlOpsDataClient, SqlOpsFeature, ClientOptions } from 'dataprotocol-client';
import { ServerCapabilities, ClientCapabilities, RPCMessageType, ServerOptions, TransportKind } from 'vscode-languageclient';
import * as UUID from 'vscode-languageclient/lib/utils/uuid';
-
-import * as sqlops from 'sqlops';
import { Disposable } from 'vscode';
-
import { CreateFirewallRuleRequest, HandleFirewallRuleRequest, CreateFirewallRuleParams, HandleFirewallRuleParams } from './contracts';
import * as Constants from './constants';
import * as Utils from '../utils';
diff --git a/extensions/mssql/src/sparkFeature/dialog/dialogCommands.ts b/extensions/mssql/src/sparkFeature/dialog/dialogCommands.ts
new file mode 100644
index 0000000000..e48f9b5ee8
--- /dev/null
+++ b/extensions/mssql/src/sparkFeature/dialog/dialogCommands.ts
@@ -0,0 +1,140 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as sqlops from 'sqlops';
+import * as nls from 'vscode-nls';
+import * as vscode from 'vscode';
+const localize = nls.loadMessageBundle();
+
+import { ICommandViewContext, Command, ICommandObjectExplorerContext, ICommandUnknownContext } from '../../objectExplorerNodeProvider/command';
+import { SparkJobSubmissionDialog } from './sparkJobSubmission/sparkJobSubmissionDialog';
+import { AppContext } from '../../appContext';
+import { getErrorMessage } from '../../utils';
+import * as constants from '../../constants';
+import { HdfsFileSourceNode } from '../../objectExplorerNodeProvider/hdfsProvider';
+import { getNode } from '../../objectExplorerNodeProvider/hdfsCommands';
+import * as LocalizedConstants from '../../localizedConstants';
+import * as SqlClusterLookUp from '../../sqlClusterLookUp';
+import { SqlClusterConnection } from '../../objectExplorerNodeProvider/connection';
+
+export class OpenSparkJobSubmissionDialogCommand extends Command {
+ constructor(appContext: AppContext, private outputChannel: vscode.OutputChannel) {
+ super(constants.mssqlClusterLivySubmitSparkJobCommand, appContext);
+ }
+
+ protected async preExecute(context: ICommandUnknownContext | ICommandObjectExplorerContext, args: object = {}): Promise {
+ return this.execute(context, args);
+ }
+
+ async execute(context: ICommandUnknownContext | ICommandObjectExplorerContext, ...args: any[]): Promise {
+ try {
+ let sqlClusterConnection: SqlClusterConnection = undefined;
+ if (context.type === constants.ObjectExplorerService) {
+ sqlClusterConnection = SqlClusterLookUp.findSqlClusterConnection(context, this.appContext);
+ }
+ if (!sqlClusterConnection) {
+ sqlClusterConnection = await this.selectConnection();
+ }
+
+ let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
+ await dialog.openDialog();
+ } catch (error) {
+ this.appContext.apiWrapper.showErrorMessage(getErrorMessage(error));
+ }
+ }
+
+ private async selectConnection(): Promise {
+ let connectionList: sqlops.connection.Connection[] = await this.apiWrapper.getActiveConnections();
+ let displayList: string[] = new Array();
+ let connectionMap: Map = new Map();
+ if (connectionList && connectionList.length > 0) {
+ connectionList.forEach(conn => {
+ if (conn.providerName === constants.sqlProviderName) {
+ displayList.push(conn.options.host);
+ connectionMap.set(conn.options.host, conn);
+ }
+ });
+ }
+
+ let selectedHost: string = await vscode.window.showQuickPick(displayList, {
+ placeHolder:
+ localize('sparkJobSubmission_PleaseSelectSqlWithCluster',
+ 'Please select SQL Server with Big Data Cluster. ')
+ });
+ let errorMsg = localize('sparkJobSubmission_NoSqlSelected', 'No Sql Server is selected.');
+ if (!selectedHost) { throw new Error(errorMsg); }
+
+ let sqlConnection = connectionMap.get(selectedHost);
+ if (!sqlConnection) { throw new Error(errorMsg); }
+
+ let sqlClusterConnection = await SqlClusterLookUp.getSqlClusterConnection(sqlConnection);
+ if (!sqlClusterConnection) {
+ throw new Error(LocalizedConstants.sparkJobSubmissionNoSqlBigDataClusterFound);
+ }
+
+ return new SqlClusterConnection(sqlClusterConnection);
+ }
+}
+
+// Open the submission dialog for a specific file path.
+export class OpenSparkJobSubmissionDialogFromFileCommand extends Command {
+ constructor(appContext: AppContext, private outputChannel: vscode.OutputChannel) {
+ super(constants.mssqlClusterLivySubmitSparkJobFromFileCommand, appContext);
+ }
+
+ protected async preExecute(context: ICommandViewContext | ICommandObjectExplorerContext, args: object = {}): Promise {
+ return this.execute(context, args);
+ }
+
+ async execute(context: ICommandViewContext | ICommandObjectExplorerContext, ...args: any[]): Promise {
+ let path: string = undefined;
+ try {
+ let node = await getNode(context, this.appContext);
+ if (node && node.hdfsPath) {
+ path = node.hdfsPath;
+ } else {
+ this.apiWrapper.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
+ return;
+ }
+ } catch (err) {
+ this.apiWrapper.showErrorMessage(localize('sparkJobSubmission_GetFilePathFromSelectedNodeFailed', 'Error Get File Path: {0}', err));
+ return;
+ }
+
+ try {
+ let sqlClusterConnection: SqlClusterConnection = undefined;
+ if (context.type === constants.ObjectExplorerService) {
+ sqlClusterConnection = await SqlClusterLookUp.findSqlClusterConnection(context, this.appContext);
+ }
+ if (!sqlClusterConnection) {
+ throw new Error(LocalizedConstants.sparkJobSubmissionNoSqlBigDataClusterFound);
+ }
+ let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
+ await dialog.openDialog(path);
+ } catch (error) {
+ this.appContext.apiWrapper.showErrorMessage(getErrorMessage(error));
+ }
+ }
+}
+
+export class OpenSparkJobSubmissionDialogTask {
+ constructor(private appContext: AppContext, private outputChannel: vscode.OutputChannel) {
+ }
+
+ async execute(profile: sqlops.IConnectionProfile, ...args: any[]): Promise {
+ try {
+ let sqlClusterConnection = SqlClusterLookUp.findSqlClusterConnection(profile, this.appContext);
+ if (!sqlClusterConnection) {
+ throw new Error(LocalizedConstants.sparkJobSubmissionNoSqlBigDataClusterFound);
+ }
+ let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
+ await dialog.openDialog();
+ } catch (error) {
+ this.appContext.apiWrapper.showErrorMessage(getErrorMessage(error));
+ }
+ }
+}
diff --git a/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkAdvancedTab.ts b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkAdvancedTab.ts
new file mode 100644
index 0000000000..9884f9e030
--- /dev/null
+++ b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkAdvancedTab.ts
@@ -0,0 +1,81 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as sqlops from 'sqlops';
+import * as nls from 'vscode-nls';
+const localize = nls.loadMessageBundle();
+
+import { SparkJobSubmissionModel } from './sparkJobSubmissionModel';
+import { AppContext } from '../../../appContext';
+import { ApiWrapper } from '../../../apiWrapper';
+
+export class SparkAdvancedTab {
+ private _tab: sqlops.window.modelviewdialog.DialogTab;
+ public get tab(): sqlops.window.modelviewdialog.DialogTab { return this._tab; }
+
+ private _referenceFilesInputBox: sqlops.InputBoxComponent;
+ private _referenceJARFilesInputBox: sqlops.InputBoxComponent;
+ private _referencePyFilesInputBox: sqlops.InputBoxComponent;
+
+ private get apiWrapper(): ApiWrapper {
+ return this.appContext.apiWrapper;
+ }
+
+ constructor(private appContext: AppContext) {
+ this._tab = this.apiWrapper.createTab(localize('sparkJobSubmission_AdvancedTabName', 'ADVANCED'));
+
+ this._tab.registerContent(async (modelView) => {
+ let builder = modelView.modelBuilder;
+ let parentLayout: sqlops.FormItemLayout = {
+ horizontal: false,
+ componentWidth: '400px'
+ };
+
+ let formContainer = builder.formContainer();
+
+ this._referenceJARFilesInputBox = builder.inputBox().component();
+ formContainer.addFormItem({
+ component: this._referenceJARFilesInputBox,
+ title: localize('sparkJobSubmission_ReferenceJarList', 'Reference Jars')
+ },
+ Object.assign(
+ {
+ info: localize('sparkJobSubmission_ReferenceJarListToolTip',
+ 'Jars to be placed in executor working directory. The Jar path needs to be an HDFS Path. Multiple paths should be split by semicolon (;)')
+ },
+ parentLayout));
+
+ this._referencePyFilesInputBox = builder.inputBox().component();
+ formContainer.addFormItem({
+ component: this._referencePyFilesInputBox,
+ title: localize('sparkJobSubmission_ReferencePyList', 'Reference py Files')
+ },
+ Object.assign(
+ {
+ info: localize('sparkJobSubmission_ReferencePyListTooltip',
+ 'Py Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)')
+ },
+ parentLayout));
+
+ this._referenceFilesInputBox = builder.inputBox().component();
+ formContainer.addFormItem({
+ component: this._referenceFilesInputBox,
+ title: localize('sparkJobSubmission_ReferenceFilesList', 'Reference Files')
+ },
+ Object.assign({
+ info: localize('sparkJobSubmission_ReferenceFilesListTooltip',
+ 'Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)')
+ }, parentLayout));
+
+ await modelView.initializeModel(formContainer.component());
+ });
+ }
+
+ public getInputValues(): string[] {
+ return [this._referenceJARFilesInputBox.value, this._referencePyFilesInputBox.value, this._referenceFilesInputBox.value];
+ }
+}
diff --git a/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkConfigurationTab.ts b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkConfigurationTab.ts
new file mode 100644
index 0000000000..b5f94d62cf
--- /dev/null
+++ b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkConfigurationTab.ts
@@ -0,0 +1,280 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as sqlops from 'sqlops';
+import * as nls from 'vscode-nls';
+import * as fspath from 'path';
+import * as fs from 'fs';
+import * as vscode from 'vscode';
+import * as utils from '../../../utils';
+import * as LocalizedConstants from '../../../localizedConstants';
+import * as constants from '../../../constants';
+
+import { AppContext } from '../../../appContext';
+import { ApiWrapper } from '../../../apiWrapper';
+import { SparkJobSubmissionModel } from './sparkJobSubmissionModel';
+import { SparkFileSource } from './sparkJobSubmissionService';
+
+const localize = nls.loadMessageBundle();
+
+export class SparkConfigurationTab {
+ private _tab: sqlops.window.modelviewdialog.DialogTab;
+ public get tab(): sqlops.window.modelviewdialog.DialogTab { return this._tab; }
+
+ private _jobNameInputBox: sqlops.InputBoxComponent;
+ private _sparkContextLabel: sqlops.TextComponent;
+ private _fileSourceDropDown: sqlops.DropDownComponent;
+ private _sparkSourceFileInputBox: sqlops.InputBoxComponent;
+ private _filePickerButton: sqlops.ButtonComponent;
+ private _sourceFlexContainer: sqlops.FlexContainer;
+ private _sourceFlexContainerWithHint: sqlops.FlexContainer;
+ private _localUploadDestinationLabel: sqlops.TextComponent;
+ private _mainClassInputBox: sqlops.InputBoxComponent;
+ private _argumentsInputBox: sqlops.InputBoxComponent;
+
+ private get apiWrapper(): ApiWrapper {
+ return this.appContext.apiWrapper;
+ }
+
+ // If path is specified, means the default source setting for this tab is HDFS file, otherwise, it would be local file.
+ constructor(private _dataModel: SparkJobSubmissionModel, private appContext: AppContext, private _path?: string) {
+ this._tab = this.apiWrapper.createTab(localize('sparkJobSubmission_GeneralTabName', 'GENERAL'));
+
+ this._tab.registerContent(async (modelView) => {
+ let builder = modelView.modelBuilder;
+ let parentLayout: sqlops.FormItemLayout = {
+ horizontal: false,
+ componentWidth: '400px'
+ };
+
+ let formContainer = builder.formContainer();
+
+ this._jobNameInputBox = builder.inputBox().withProperties({
+ placeHolder: localize('sparkJobSubmission_JobNamePlaceHolder', 'Enter a name ...'),
+ value: (this._path) ? fspath.basename(this._path) : ''
+ }).component();
+
+ formContainer.addFormItem({
+ component: this._jobNameInputBox,
+ title: localize('sparkJobSubmission_JobName', 'Job Name'),
+ required: true
+ }, parentLayout);
+
+ this._sparkContextLabel = builder.text().withProperties({
+ value: this._dataModel.getSparkClusterUrl()
+ }).component();
+ formContainer.addFormItem({
+ component: this._sparkContextLabel,
+ title: localize('sparkJobSubmission_SparkCluster', 'Spark Cluster')
+ }, parentLayout);
+
+ this._fileSourceDropDown = builder.dropDown().withProperties({
+ values: [SparkFileSource.Local.toString(), SparkFileSource.HDFS.toString()],
+ value: (this._path) ? SparkFileSource.HDFS.toString() : SparkFileSource.Local.toString()
+ }).component();
+
+ this._fileSourceDropDown.onValueChanged(selection => {
+ let isLocal = selection.selected === SparkFileSource.Local.toString();
+ // Disable browser button for cloud source.
+ if (this._filePickerButton) {
+ this._filePickerButton.updateProperties({
+ enabled: isLocal,
+ required: isLocal
+ });
+ }
+
+ // Clear the path When switching source.
+ if (this._sparkSourceFileInputBox) {
+ this._sparkSourceFileInputBox.value = '';
+ }
+
+ if (this._localUploadDestinationLabel) {
+ if (isLocal) {
+ this._localUploadDestinationLabel.value = LocalizedConstants.sparkLocalFileDestinationHint;
+ } else {
+ this._localUploadDestinationLabel.value = '';
+ }
+ }
+ });
+
+ this._sparkSourceFileInputBox = builder.inputBox().withProperties({
+ required: true,
+ placeHolder: localize('sparkJobSubmission_FilePathPlaceHolder', 'Path to a .jar or .py file'),
+ value: (this._path) ? this._path : ''
+ }).component();
+ this._sparkSourceFileInputBox.onTextChanged(text => {
+ if (this._fileSourceDropDown.value === SparkFileSource.Local.toString()) {
+ this._dataModel.updateModelByLocalPath(text);
+ if (this._localUploadDestinationLabel) {
+ if (text) {
+ this._localUploadDestinationLabel.value = localize('sparkJobSubmission_LocalFileDestinationHintWithPath',
+ 'The selected local file will be uploaded to HDFS: {0}', this._dataModel.hdfsSubmitFilePath);
+ } else {
+ this._localUploadDestinationLabel.value = LocalizedConstants.sparkLocalFileDestinationHint;
+ }
+ }
+ } else {
+ this._dataModel.hdfsSubmitFilePath = text;
+ }
+
+ // main class disable/enable is according to whether it's jar file.
+ let isJarFile = this._dataModel.isJarFile();
+ this._mainClassInputBox.updateProperties({ enabled: isJarFile, required: isJarFile });
+ if (!isJarFile) {
+ // Clear main class for py file.
+ this._mainClassInputBox.value = '';
+ }
+ });
+
+ this._filePickerButton = builder.button().withProperties({
+ required: (this._path) ? false : true,
+ enabled: (this._path) ? false : true,
+ label: '•••',
+ width: constants.mssqlClusterSparkJobFileSelectorButtonWidth,
+ height: constants.mssqlClusterSparkJobFileSelectorButtonHeight
+ }).component();
+ this._filePickerButton.onDidClick(() => this.onSelectFile());
+
+ this._sourceFlexContainer = builder.flexContainer().component();
+ this._sourceFlexContainer.addItem(this._fileSourceDropDown, { flex: '0 0 auto', CSSStyles: { 'minWidth': '75px', 'marginBottom': '5px', 'paddingRight': '3px' } });
+ this._sourceFlexContainer.addItem(this._sparkSourceFileInputBox, { flex: '1 1 auto', CSSStyles: { 'marginBottom': '5px', 'paddingRight': '3px' } });
+ // Do not add margin for file picker button as the label forces it to have 5px margin
+ this._sourceFlexContainer.addItem(this._filePickerButton, { flex: '0 0 auto' });
+ this._sourceFlexContainer.setLayout({
+ flexFlow: 'row',
+ height: '100%',
+ justifyContent: 'center',
+ alignItems: 'center',
+ alignContent: 'stretch'
+ });
+
+ this._localUploadDestinationLabel = builder.text().withProperties({
+ value: (this._path) ? '' : LocalizedConstants.sparkLocalFileDestinationHint
+ }).component();
+ this._sourceFlexContainerWithHint = builder.flexContainer().component();
+ this._sourceFlexContainerWithHint.addItem(this._sourceFlexContainer, { flex: '0 0 auto' });
+ this._sourceFlexContainerWithHint.addItem(this._localUploadDestinationLabel, { flex: '1 1 auto' });
+ this._sourceFlexContainerWithHint.setLayout({
+ flexFlow: 'column',
+ width: '100%',
+ justifyContent: 'center',
+ alignItems: 'stretch',
+ alignContent: 'stretch'
+ });
+
+ formContainer.addFormItem({
+ component: this._sourceFlexContainerWithHint,
+ title: localize('sparkJobSubmission_MainFilePath', 'JAR/py File'),
+ required: true
+ }, parentLayout);
+
+ this._mainClassInputBox = builder.inputBox().component();
+ formContainer.addFormItem({
+ component: this._mainClassInputBox,
+ title: localize('sparkJobSubmission_MainClass', 'Main Class'),
+ required: true
+ }, parentLayout);
+
+ this._argumentsInputBox = builder.inputBox().component();
+ formContainer.addFormItem({
+ component: this._argumentsInputBox,
+ title: localize('sparkJobSubmission_Arguments', 'Arguments')
+ },
+ Object.assign(
+ { info: localize('sparkJobSubmission_ArgumentsTooltip', 'Command line arguments used in your main class, multiple arguments should be split by space.') },
+ parentLayout));
+
+ await modelView.initializeModel(formContainer.component());
+ });
+ }
+
+ public async validate(): Promise {
+ if (!this._jobNameInputBox.value) {
+ this._dataModel.showDialogError(localize('sparkJobSubmission_NotSpecifyJobName', 'Property Job Name is not specified.'));
+ return false;
+ }
+
+ if (this._fileSourceDropDown.value === SparkFileSource.Local.toString()) {
+ if (this._sparkSourceFileInputBox.value) {
+ this._dataModel.isMainSourceFromLocal = true;
+ this._dataModel.updateModelByLocalPath(this._sparkSourceFileInputBox.value);
+ } else {
+ this._dataModel.showDialogError(localize('sparkJobSubmission_NotSpecifyJARPYPath', 'Property JAR/py File is not specified.'));
+ return false;
+ }
+ } else {
+ if (this._sparkSourceFileInputBox.value) {
+ this._dataModel.isMainSourceFromLocal = false;
+ this._dataModel.hdfsSubmitFilePath = this._sparkSourceFileInputBox.value;
+ } else {
+ this._dataModel.showDialogError(localize('sparkJobSubmission_NotSpecifyJARPYPath', 'Property JAR/py File is not specified.'));
+ return false;
+ }
+ }
+
+ if (this._dataModel.isJarFile() && !this._mainClassInputBox.value) {
+ this._dataModel.showDialogError(localize('sparkJobSubmission_NotSpecifyMainClass', 'Property Main Class is not specified.'));
+ return false;
+ }
+
+ // 1. For local file Source check whether they existed.
+ if (this._dataModel.isMainSourceFromLocal) {
+ if (!fs.existsSync(this._dataModel.localFileSourcePath)) {
+ this._dataModel.showDialogError(LocalizedConstants.sparkJobSubmissionLocalFileNotExisted(this._dataModel.localFileSourcePath));
+ return false;
+ }
+ } else {
+ // 2. Check HDFS file existed for HDFS source.
+ try {
+ let isFileExisted = await this._dataModel.isClusterFileExisted(this._dataModel.hdfsSubmitFilePath);
+ if (!isFileExisted) {
+ this._dataModel.showDialogError(localize('sparkJobSubmission_HDFSFileNotExistedWithPath', '{0} does not exist in Cluster or exception thrown. ', this._dataModel.hdfsSubmitFilePath));
+ return false;
+ }
+ } catch (error) {
+ this._dataModel.showDialogError(localize('sparkJobSubmission_HDFSFileNotExisted', 'The specified HDFS file does not exist. '));
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ private async onSelectFile(): Promise {
+ let filePath = await this.pickFile();
+ if (filePath) {
+ this._sparkSourceFileInputBox.value = filePath;
+ }
+ }
+
+ public getInputValues(): string[] {
+ return [this._jobNameInputBox.value, this._mainClassInputBox.value, this._argumentsInputBox.value];
+ }
+
+ public async pickFile(): Promise {
+ try {
+ let filter = { 'JAR/py files': ['jar', 'py'] };
+ let options: vscode.OpenDialogOptions = {
+ canSelectFiles: true,
+ canSelectFolders: false,
+ canSelectMany: false,
+ openLabel: localize('sparkSelectLocalFile', 'Select'),
+ filters: filter
+ };
+
+ let fileUris: vscode.Uri[] = await this.apiWrapper.showOpenDialog(options);
+ if (fileUris && fileUris[0]) {
+ return fileUris[0].fsPath;
+ }
+
+ return undefined;
+ } catch (err) {
+ this.apiWrapper.showErrorMessage(localize('sparkJobSubmission_SelectFileError', 'Error in locating the file due to Error: {0}', utils.getErrorMessage(err)));
+ return undefined;
+ }
+ }
+}
diff --git a/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionDialog.ts b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionDialog.ts
new file mode 100644
index 0000000000..144c1ef9a7
--- /dev/null
+++ b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionDialog.ts
@@ -0,0 +1,168 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as sqlops from 'sqlops';
+import * as vscode from 'vscode';
+import * as nls from 'vscode-nls';
+import * as utils from '../../../utils';
+import * as LocalizedConstants from '../../../localizedConstants';
+
+import { AppContext } from '../../../appContext';
+import { ApiWrapper } from '../../../apiWrapper';
+import { SparkJobSubmissionModel } from './sparkJobSubmissionModel';
+import { SparkConfigurationTab } from './sparkConfigurationTab';
+import { SparkJobSubmissionInput } from './sparkJobSubmissionService';
+import { SparkAdvancedTab } from './sparkAdvancedTab';
+import { SqlClusterConnection } from '../../../objectExplorerNodeProvider/connection';
+
+const localize = nls.loadMessageBundle();
+
+export class SparkJobSubmissionDialog {
+ private _dialog: sqlops.window.modelviewdialog.Dialog;
+ private _dataModel: SparkJobSubmissionModel;
+ private _sparkConfigTab: SparkConfigurationTab;
+ private _sparkAdvancedTab: SparkAdvancedTab;
+ private get apiWrapper(): ApiWrapper {
+ return this.appContext.apiWrapper;
+ }
+
+ constructor(
+ private sqlClusterConnection: SqlClusterConnection,
+ private appContext: AppContext,
+ private outputChannel: vscode.OutputChannel) {
+ if (!this.sqlClusterConnection || !this.appContext || !this.outputChannel) {
+ throw new Error(localize('sparkJobSubmission_SparkJobSubmissionDialogInitializeError',
+ 'Parameters for SparkJobSubmissionDialog is illegal'));
+ }
+ }
+
+ public async openDialog(path?: string): Promise {
+ this._dialog = this.apiWrapper.createDialog(localize('sparkJobSubmission_DialogTitleNewJob', 'New Job'));
+
+ this._dataModel = new SparkJobSubmissionModel(this.sqlClusterConnection, this._dialog, this.appContext);
+
+ this._sparkConfigTab = new SparkConfigurationTab(this._dataModel, this.appContext, path);
+ this._sparkAdvancedTab = new SparkAdvancedTab(this.appContext);
+
+ this._dialog.content = [this._sparkConfigTab.tab, this._sparkAdvancedTab.tab];
+
+ this._dialog.cancelButton.label = localize('sparkJobSubmission_DialogCancelButton', 'Cancel');
+
+ this._dialog.okButton.label = localize('sparkJobSubmission_DialogSubmitButton', 'Submit');
+ this._dialog.okButton.onClick(() => this.onClickOk());
+
+ this._dialog.registerCloseValidator(() => this.handleValidate());
+
+ await this.apiWrapper.openDialog(this._dialog);
+ }
+
+ private onClickOk(): void {
+ let jobName = localize('sparkJobSubmission_SubmitSparkJob', '{0} Spark Job Submission:',
+ this._sparkConfigTab.getInputValues()[0]);
+ this.apiWrapper.startBackgroundOperation(
+ {
+ connection: this.sqlClusterConnection.connection,
+ displayName: jobName,
+ description: jobName,
+ isCancelable: false,
+ operation: op => {
+ this.onSubmit(op);
+ }
+ }
+ );
+ }
+
+ private async onSubmit(op: sqlops.BackgroundOperation): Promise {
+ try {
+ this.outputChannel.show();
+ let msg = localize('sparkJobSubmission_SubmissionStartMessage',
+ '.......................... Submit Spark Job Start ..........................');
+ this.outputChannel.appendLine(msg);
+ // 1. Upload local file to HDFS for local source.
+ if (this._dataModel.isMainSourceFromLocal) {
+ try {
+ this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionPrepareUploadingFile(this._dataModel.localFileSourcePath, this._dataModel.hdfsFolderDestinationPath)));
+ op.updateStatus(sqlops.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionPrepareUploadingFile(this._dataModel.localFileSourcePath, this._dataModel.hdfsFolderDestinationPath));
+ await this._dataModel.uploadFile(this._dataModel.localFileSourcePath, this._dataModel.hdfsFolderDestinationPath);
+ vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded);
+ this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded));
+ op.updateStatus(sqlops.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded);
+ } catch (error) {
+ vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error)));
+ this.outputChannel.appendLine(this.addErrorTag(LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error))));
+ op.updateStatus(sqlops.TaskStatus.Failed, LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error)));
+ this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
+ return;
+ }
+ }
+
+ // 2. Submit job to cluster.
+ let submissionSettings: SparkJobSubmissionInput = this.getSubmissionInput();
+ this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.jobName)));
+ op.updateStatus(sqlops.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.jobName));
+ let livyBatchId = await this._dataModel.submitBatchJobByLivy(submissionSettings);
+ vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted);
+ this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted));
+ op.updateStatus(sqlops.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted);
+
+ // 3. Get SparkHistory/YarnUI Url.
+ try {
+ let appId = await this._dataModel.getApplicationID(submissionSettings, livyBatchId);
+
+ let sparkHistoryUrl = this._dataModel.generateSparkHistoryUIUrl(submissionSettings, appId);
+ vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl));
+ this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl)));
+ op.updateStatus(sqlops.TaskStatus.Succeeded, LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl));
+
+ /*
+ // Spark Tracking URl is not working now.
+ let sparkTrackingUrl = this._dataModel.generateSparkTrackingUIUrl(submissionSettings, appId);
+ vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionTrackingLinkMessage(sparkTrackingUrl));
+ this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionTrackingLinkMessage(sparkTrackingUrl)));
+ op.updateStatus(sqlops.TaskStatus.Succeeded, LocalizedConstants.sparkJobSubmissionTrackingLinkMessage(sparkTrackingUrl));
+ */
+
+ let yarnUIUrl = this._dataModel.generateYarnUIUrl(submissionSettings, appId);
+ vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl));
+ this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl)));
+ op.updateStatus(sqlops.TaskStatus.Succeeded, LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl));
+ } catch (error) {
+ vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error)));
+ this.outputChannel.appendLine(this.addErrorTag(LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error))));
+ op.updateStatus(sqlops.TaskStatus.Failed, LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error)));
+ this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
+ return;
+ }
+
+ this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
+ } catch (error) {
+ vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error)));
+ this.outputChannel.appendLine(this.addErrorTag(LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error))));
+ op.updateStatus(sqlops.TaskStatus.Failed, LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error)));
+ this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
+ }
+ }
+
+ private async handleValidate(): Promise {
+ return this._sparkConfigTab.validate();
+ }
+
+ private getSubmissionInput(): SparkJobSubmissionInput {
+ let generalConfig = this._sparkConfigTab.getInputValues();
+ let advancedConfig = this._sparkAdvancedTab.getInputValues();
+ return new SparkJobSubmissionInput(generalConfig[0], this._dataModel.hdfsSubmitFilePath, generalConfig[1], generalConfig[2],
+ advancedConfig[0], advancedConfig[1], advancedConfig[2]);
+ }
+
+ private addInfoTag(info: string): string {
+ return `[Info] ${info}`;
+ }
+
+ private addErrorTag(error: string): string {
+ return `[Error] ${error}`;
+ }
+}
diff --git a/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionModel.ts b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionModel.ts
new file mode 100644
index 0000000000..233f3a14ac
--- /dev/null
+++ b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionModel.ts
@@ -0,0 +1,206 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as sqlops from 'sqlops';
+import * as nls from 'vscode-nls';
+const localize = nls.loadMessageBundle();
+import * as fs from 'fs';
+import * as fspath from 'path';
+import * as os from 'os';
+
+import * as constants from '../../../constants';
+import { SqlClusterConnection } from '../../../objectExplorerNodeProvider/connection';
+import * as LocalizedConstants from '../../../localizedConstants';
+import * as utils from '../../../utils';
+import { SparkJobSubmissionService, SparkJobSubmissionInput, LivyLogResponse } from './sparkJobSubmissionService';
+import { AppContext } from '../../../appContext';
+import { IFileSource, File, joinHdfsPath } from '../../../objectExplorerNodeProvider/fileSources';
+
+
+// Stores important state and service methods used by the Spark Job Submission Dialog.
+export class SparkJobSubmissionModel {
+ private _dialogService: SparkJobSubmissionService;
+ private _guidForClusterFolder: string;
+ public get guidForClusterFolder(): string { return this._guidForClusterFolder; }
+
+ // Whether the file is from local or HDFS
+ public isMainSourceFromLocal: boolean;
+
+ // indicate the final path to be submitted within HDFS
+ public hdfsSubmitFilePath: string;
+
+ // local file uploading related path: source; destinationFolder
+ public localFileSourcePath: string;
+ public hdfsFolderDestinationPath: string;
+
+ constructor(
+ private readonly _sqlClusterConnection: SqlClusterConnection,
+ private readonly _dialog: sqlops.window.modelviewdialog.Dialog,
+ private readonly _appContext: AppContext,
+ requestService?: (args: any) => any) {
+
+ if (!this._sqlClusterConnection || !this._dialog || !this._appContext) {
+ throw new Error(localize('sparkJobSubmission_SparkJobSubmissionModelInitializeError',
+ 'Parameters for SparkJobSubmissionModel is illegal'));
+ }
+
+ this._dialogService = new SparkJobSubmissionService(requestService);
+ this._guidForClusterFolder = utils.generateGuid();
+ }
+
+ public get connection(): SqlClusterConnection { return this._sqlClusterConnection; }
+ public get dialogService(): SparkJobSubmissionService { return this._dialogService; }
+ public get dialog(): sqlops.window.modelviewdialog.Dialog { return this._dialog; }
+
+ public isJarFile(): boolean {
+ if (this.hdfsSubmitFilePath) {
+ return this.hdfsSubmitFilePath.toLowerCase().endsWith('jar');
+ }
+
+ return false;
+ }
+
+ public showDialogError(message: string): void {
+ let errorLevel = sqlops.window.modelviewdialog.MessageLevel ? sqlops.window.modelviewdialog.MessageLevel : 0;
+ this._dialog.message = {
+ text: message,
+ level: errorLevel
+ };
+ }
+
+ public showDialogInfo(message: string): void {
+ let infoLevel = sqlops.window.modelviewdialog.MessageLevel ? sqlops.window.modelviewdialog.MessageLevel.Information : 2;
+ this._dialog.message = {
+ text: message,
+ level: infoLevel
+ };
+ }
+
+ public getSparkClusterUrl(): string {
+ if (this._sqlClusterConnection && this._sqlClusterConnection.host && this._sqlClusterConnection.port) {
+ return `https://${this._sqlClusterConnection.host}:${this._sqlClusterConnection.port}`;
+ }
+
+ // Only for safety check, Won't happen with correct Model initialize.
+ return '';
+ }
+
+ public async submitBatchJobByLivy(submissionArgs: SparkJobSubmissionInput): Promise {
+ try {
+ if (!submissionArgs) {
+ return Promise.reject(localize('sparkJobSubmission_submissionArgsIsInvalid', 'submissionArgs is invalid. '));
+ }
+
+ submissionArgs.setSparkClusterInfo(this._sqlClusterConnection);
+ let livyBatchId = await this._dialogService.submitBatchJob(submissionArgs);
+ return livyBatchId;
+ } catch (error) {
+ return Promise.reject(error);
+ }
+ }
+
+ public async getApplicationID(submissionArgs: SparkJobSubmissionInput, livyBatchId: string, retryTime?: number): Promise {
+ // TODO: whether set timeout as 15000ms
+ try {
+ if (!submissionArgs) {
+ return Promise.reject(localize('sparkJobSubmission_submissionArgsIsInvalid', 'submissionArgs is invalid. '));
+ }
+
+ if (!utils.isValidNumber(livyBatchId)) {
+ return Promise.reject(new Error(localize('sparkJobSubmission_LivyBatchIdIsInvalid', 'livyBatchId is invalid. ')));
+ }
+
+ if (!retryTime) {
+ retryTime = constants.mssqlClusterLivyRetryTimesForCheckYarnApp;
+ }
+
+ submissionArgs.setSparkClusterInfo(this._sqlClusterConnection);
+ let response: LivyLogResponse = undefined;
+ let timeOutCount: number = 0;
+ do {
+ timeOutCount++;
+ await this.sleep(constants.mssqlClusterLivyTimeInMSForCheckYarnApp);
+ response = await this._dialogService.getYarnAppId(submissionArgs, livyBatchId);
+ } while (response.appId === '' && timeOutCount < retryTime);
+
+ if (response.appId === '') {
+ return Promise.reject(localize('sparkJobSubmission_GetApplicationIdTimeOut', 'Get Application Id time out. {0}[Log] {1}', os.EOL, response.log));
+ } else {
+ return response.appId;
+ }
+ } catch (error) {
+ return Promise.reject(error);
+ }
+ }
+
+ public async uploadFile(localFilePath: string, hdfsFolderPath: string): Promise {
+ try {
+ if (!localFilePath || !hdfsFolderPath) {
+ return Promise.reject(localize('sparkJobSubmission_localFileOrFolderNotSpecified.', 'Property localFilePath or hdfsFolderPath is not specified. '));
+ }
+
+ if (!fs.existsSync(localFilePath)) {
+ return Promise.reject(LocalizedConstants.sparkJobSubmissionLocalFileNotExisted(localFilePath));
+ }
+
+ let fileSource: IFileSource = this._sqlClusterConnection.createHdfsFileSource();
+ await fileSource.writeFile(new File(localFilePath, false), hdfsFolderPath);
+ } catch (error) {
+ return Promise.reject(error);
+ }
+ }
+
+ public async isClusterFileExisted(path: string): Promise {
+ try {
+ if (!path) {
+ return Promise.reject(localize('sparkJobSubmission_PathNotSpecified.', 'Property Path is not specified. '));
+ }
+
+ let fileSource: IFileSource = this._sqlClusterConnection.createHdfsFileSource();
+ return await fileSource.exists(path);
+ } catch (error) {
+ return Promise.reject(error);
+ }
+ }
+
+ public updateModelByLocalPath(localPath: string): void {
+ if (localPath) {
+ this.localFileSourcePath = localPath;
+ this.hdfsFolderDestinationPath = this.generateDestinationFolder();
+ let fileName = fspath.basename(localPath);
+ this.hdfsSubmitFilePath = joinHdfsPath(this.hdfsFolderDestinationPath, fileName);
+ } else {
+ this.hdfsSubmitFilePath = '';
+ }
+ }
+
+ // Example path: /SparkSubmission/2018/08/21/b682a6c4-1954-401e-8542-9c573d69d9c0/default_artifact.jar
+ private generateDestinationFolder(): string {
+ let day = new Date();
+ return `/SparkSubmission/${day.getUTCFullYear()}/${day.getUTCMonth() + 1}/${day.getUTCDate()}/${this._guidForClusterFolder}`;
+ }
+
+ // Example: https://host:30443/gateway/default/yarn/cluster/app/application_1532646201938_0057
+ public generateYarnUIUrl(submissionArgs: SparkJobSubmissionInput, appId: string): string {
+ return `https://${submissionArgs.host}:${submissionArgs.port}/gateway/default/yarn/cluster/app/${appId}`;
+ }
+
+ // Example: https://host:30443/gateway/default/yarn/proxy/application_1532646201938_0411
+ public generateSparkTrackingUIUrl(submissionArgs: SparkJobSubmissionInput, appId: string): string {
+ return `https://${submissionArgs.host}:${submissionArgs.port}/gateway/default/yarn/proxy/${appId}`;
+ }
+
+ // Example: https://host:30443/gateway/default/sparkhistory/history/application_1532646201938_0057/1
+ public generateSparkHistoryUIUrl(submissionArgs: SparkJobSubmissionInput, appId: string): string {
+ return `https://${submissionArgs.host}:${submissionArgs.port}/gateway/default/sparkhistory/history/${appId}/1`;
+ }
+
+ private async sleep(ms: number): Promise<{}> {
+ // tslint:disable-next-line no-string-based-set-timeout
+ return new Promise(resolve => setTimeout(resolve, ms));
+ }
+}
diff --git a/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionService.ts b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionService.ts
new file mode 100644
index 0000000000..1e4980903e
--- /dev/null
+++ b/extensions/mssql/src/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionService.ts
@@ -0,0 +1,187 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as os from 'os';
+import * as nls from 'vscode-nls';
+const localize = nls.loadMessageBundle();
+import * as constants from '../../../constants';
+import { SqlClusterConnection } from '../../../objectExplorerNodeProvider/connection';
+import * as utils from '../../../utils';
+
+export class SparkJobSubmissionService {
+ private _requestPromise: (args: any) => any;
+
+ constructor(
+ requestService?: (args: any) => any) {
+ if (requestService) {
+ // this is to fake the request service for test.
+ this._requestPromise = requestService;
+ } else {
+ this._requestPromise = require('request-promise');
+ }
+ }
+
+ public async submitBatchJob(submissionArgs: SparkJobSubmissionInput): Promise {
+ try {
+ let livyUrl: string = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/`;
+ let options = {
+ uri: livyUrl,
+ method: 'POST',
+ json: true,
+ // TODO, change it back after service's authentication changed.
+ rejectUnauthorized: false,
+ body: {
+ file: submissionArgs.sparkFile,
+ proxyUser: submissionArgs.user,
+ className: submissionArgs.mainClass,
+ name: submissionArgs.jobName
+ },
+ // authentication headers
+ headers: {
+ 'Authorization': 'Basic ' + new Buffer(submissionArgs.user + ':' + submissionArgs.password).toString('base64')
+ }
+ };
+
+ // Set arguments
+ if (submissionArgs.jobArguments && submissionArgs.jobArguments.trim()) {
+ let argsList = submissionArgs.jobArguments.split(' ');
+ if (argsList.length > 0) {
+ options.body['args'] = argsList;
+ }
+ }
+
+ // Set jars files
+ if (submissionArgs.jarFileList && submissionArgs.jarFileList.trim()) {
+ let jarList = submissionArgs.jarFileList.split(';');
+ if (jarList.length > 0) {
+ options.body['jars'] = jarList;
+ }
+ }
+
+ // Set py files
+ if (submissionArgs.pyFileList && submissionArgs.pyFileList.trim()) {
+ let pyList = submissionArgs.pyFileList.split(';');
+ if (pyList.length > 0) {
+ options.body['pyFiles'] = pyList;
+ }
+ }
+
+ // Set other files
+ if (submissionArgs.otherFileList && submissionArgs.otherFileList.trim()) {
+ let otherList = submissionArgs.otherFileList.split(';');
+ if (otherList.length > 0) {
+ options.body['files'] = otherList;
+ }
+ }
+
+ const response = await this._requestPromise(options);
+ if (response && utils.isValidNumber(response.id)) {
+ return response.id;
+ }
+
+ return Promise.reject(new Error(localize('sparkJobSubmission_LivyNoBatchIdReturned',
+ 'No Spark job batch id is returned from response.{0}[Error] {1}', os.EOL, JSON.stringify(response))));
+ } catch (error) {
+ return Promise.reject(error);
+ }
+ }
+
+ public async getYarnAppId(submissionArgs: SparkJobSubmissionInput, livyBatchId: string): Promise {
+ try {
+ let livyUrl = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/${livyBatchId}/log`;
+ let options = {
+ uri: livyUrl,
+ method: 'GET',
+ json: true,
+ rejectUnauthorized: false,
+ // authentication headers
+ headers: {
+ 'Authorization': 'Basic ' + new Buffer(submissionArgs.user + ':' + submissionArgs.password).toString('base64')
+ }
+ };
+
+ const response = await this._requestPromise(options);
+ if (response && response.log) {
+ return this.extractYarnAppIdFromLog(response.log);
+ }
+
+ return Promise.reject(localize('sparkJobSubmission_LivyNoLogReturned',
+ 'No log is returned within response.{0}[Error] {1}', os.EOL, JSON.stringify(response)));
+ } catch (error) {
+ return Promise.reject(error);
+ }
+ }
+
+
+ private extractYarnAppIdFromLog(log: any): LivyLogResponse {
+ let logForPrint = log;
+ if (Array.isArray(log)) {
+ logForPrint = log.join(os.EOL);
+ }
+
+ // eg: '18/08/23 11:02:50 INFO yarn.Client: Application report for application_1532646201938_0182 (state: ACCEPTED)'
+ for (let entry of log) {
+ if (entry.indexOf('Application report for') >= 0 && entry.indexOf('(state: ACCEPTED)') >= 0) {
+ let tokens = entry.split(' ');
+ for (let token of tokens) {
+ if (token.startsWith('application_')) {
+ return new LivyLogResponse(logForPrint, token);
+ }
+ }
+ }
+ }
+
+ return new LivyLogResponse(logForPrint, '');
+ }
+}
+
+export class SparkJobSubmissionInput {
+ public setSparkClusterInfo(sqlClusterConnection: SqlClusterConnection): void {
+ this._host = sqlClusterConnection.host;
+ this._port = sqlClusterConnection.port;
+ this._livyPath = constants.mssqlClusterLivySubmitPath;
+ this._user = sqlClusterConnection.user;
+ this._passWord = sqlClusterConnection.password;
+ }
+
+ constructor(
+ private readonly _jobName: string,
+ private readonly _sparkFile: string,
+ private readonly _mainClass: string,
+ private readonly _arguments: string,
+ private readonly _jarFileList: string,
+ private readonly _pyFileList: string,
+ private readonly _otherFileList: string,
+ private _host?: string,
+ private _port?: string,
+ private _livyPath?: string,
+ private _user?: string,
+ private _passWord?: string) {
+ }
+
+ public get jobName(): string { return this._jobName; }
+ public get sparkFile(): string { return this._sparkFile; }
+ public get mainClass(): string { return this._mainClass; }
+ public get jobArguments(): string { return this._arguments; }
+ public get jarFileList(): string { return this._jarFileList; }
+ public get otherFileList(): string { return this._otherFileList; }
+ public get pyFileList(): string { return this._pyFileList; }
+ public get host(): string { return this._host; }
+ public get port(): string { return this._port; }
+ public get livyPath(): string { return this._livyPath; }
+ public get user(): string { return this._user; }
+ public get password(): string { return this._passWord; }
+}
+
+export enum SparkFileSource {
+ HDFS = 'HDFS',
+ Local = 'Local'
+}
+
+export class LivyLogResponse {
+ constructor(public log: string, public appId: string) { }
+}
diff --git a/extensions/mssql/src/sparkFeature/historyTask.ts b/extensions/mssql/src/sparkFeature/historyTask.ts
new file mode 100644
index 0000000000..b1c0b817a8
--- /dev/null
+++ b/extensions/mssql/src/sparkFeature/historyTask.ts
@@ -0,0 +1,45 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as sqlops from 'sqlops';
+import * as vscode from 'vscode';
+import { AppContext } from '../appContext';
+import { getErrorMessage } from '../utils';
+import * as SqlClusterLookUp from '../sqlClusterLookUp';
+
+export class OpenSparkYarnHistoryTask {
+ constructor(private appContext: AppContext) {
+ }
+
+ async execute(sqlConnProfile: sqlops.IConnectionProfile, isSpark: boolean): Promise {
+ try {
+ let sqlClusterConnection = SqlClusterLookUp.findSqlClusterConnection(sqlConnProfile, this.appContext);
+ if (!sqlClusterConnection)
+ {
+ let name = isSpark? 'Spark' : 'Yarn';
+ this.appContext.apiWrapper.showErrorMessage(`Please connect to the Spark cluster before View ${name} History.`);
+ return;
+ }
+ if (isSpark) {
+ vscode.commands.executeCommand('vscode.open', vscode.Uri.parse(this.generateSparkHistoryUrl(sqlClusterConnection.host, sqlClusterConnection.port)));
+ }
+ else {
+ vscode.commands.executeCommand('vscode.open', vscode.Uri.parse(this.generateYarnHistoryUrl(sqlClusterConnection.host, sqlClusterConnection.port)));
+ }
+ } catch (error) {
+ this.appContext.apiWrapper.showErrorMessage(getErrorMessage(error));
+ }
+ }
+
+ private generateSparkHistoryUrl(host: string, port: string): string {
+ return `https://${host}:${port}/gateway/default/sparkhistory/`;
+ }
+
+ private generateYarnHistoryUrl(host: string, port: string): string {
+ return `https://${host}:${port}/gateway/default/yarn/cluster/apps`;
+ }
+}
diff --git a/extensions/mssql/src/sparkFeature/sparkUtils.ts b/extensions/mssql/src/sparkFeature/sparkUtils.ts
new file mode 100644
index 0000000000..f7c2519633
--- /dev/null
+++ b/extensions/mssql/src/sparkFeature/sparkUtils.ts
@@ -0,0 +1,220 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as childProcess from 'child_process';
+import * as fs from 'fs-extra';
+import * as nls from 'vscode-nls';
+import * as path from 'path';
+import * as sqlops from 'sqlops';
+import * as vscode from 'vscode';
+import * as which from 'which';
+import * as Constants from '../constants';
+
+const localize = nls.loadMessageBundle();
+
+export function getDropdownValue(dropdownValue: string | sqlops.CategoryValue): string {
+ if (typeof(dropdownValue) === 'string') {
+ return dropdownValue;
+ } else {
+ return dropdownValue ? (dropdownValue).name : undefined;
+ }
+}
+
+export function getServerAddressFromName(connection: sqlops.ConnectionInfo | string): string {
+ // Strip TDS port number from the server URI
+ if ((connection).options && (connection).options['host']) {
+ return (connection).options['host'].split(',')[0].split(':')[0];
+ } else if ((connection).options && (connection).options['server']) {
+ return (connection).options['server'].split(',')[0].split(':')[0];
+ } else {
+ return (connection).split(',')[0].split(':')[0];
+ }
+}
+
+export function getKnoxUrl(host: string, port: string): string {
+ return `https://${host}:${port}/gateway`;
+}
+
+export function getLivyUrl(serverName: string, port: string): string {
+ return this.getKnoxUrl(serverName, port) + '/default/livy/v1/';
+}
+
+export function getTemplatePath(extensionPath: string, templateName: string): string {
+ return path.join(extensionPath, 'resources', templateName);
+}
+export function shellWhichResolving(cmd: string): Promise {
+ return new Promise(resolve => {
+ which(cmd, (err, foundPath) => {
+ if (err) {
+ resolve(undefined);
+ } else {
+ // NOTE: Using realpath b/c some system installs are symlinked from */bin
+ resolve(fs.realpathSync(foundPath));
+ }
+ });
+ });
+}
+
+export async function mkDir(dirPath: string, outputChannel?: vscode.OutputChannel): Promise {
+ if (!await fs.exists(dirPath)) {
+ if (outputChannel) {
+ outputChannel.appendLine(localize('mkdirOutputMsg', '... Creating {0}', dirPath));
+ }
+ await fs.ensureDir(dirPath);
+ }
+}
+
+export function getErrorMessage(error: Error | string): string {
+ return (error instanceof Error) ? error.message : error;
+}
+
+// COMMAND EXECUTION HELPERS ///////////////////////////////////////////////
+export function executeBufferedCommand(cmd: string, options: childProcess.ExecOptions, outputChannel?: vscode.OutputChannel): Thenable {
+ return new Promise((resolve, reject) => {
+ if (outputChannel) {
+ outputChannel.appendLine(` > ${cmd}`);
+ }
+
+ let child = childProcess.exec(cmd, options, (err, stdout) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(stdout);
+ }
+ });
+
+ // Add listeners to print stdout and stderr if an output channel was provided
+ if (outputChannel) {
+ child.stdout.on('data', data => { outputDataChunk(data, outputChannel, ' stdout: '); });
+ child.stderr.on('data', data => { outputDataChunk(data, outputChannel, ' stderr: '); });
+ }
+ });
+}
+
+export function executeExitCodeCommand(cmd: string, outputChannel?: vscode.OutputChannel): Thenable {
+ return new Promise((resolve, reject) => {
+ if (outputChannel) {
+ outputChannel.appendLine(` > ${cmd}`);
+ }
+
+ let child = childProcess.spawn(cmd, [], { shell: true, detached: false });
+
+ // Add listeners for the process to exit
+ child.on('error', reject);
+ child.on('exit', (code: number) => { resolve(code); });
+
+ // Add listeners to print stdout and stderr if an output channel was provided
+ if (outputChannel) {
+ child.stdout.on('data', data => { outputDataChunk(data, outputChannel, ' stdout: '); });
+ child.stderr.on('data', data => { outputDataChunk(data, outputChannel, ' stderr: '); });
+ }
+ });
+}
+
+export function executeStreamedCommand(cmd: string, outputChannel?: vscode.OutputChannel): Thenable {
+ return new Promise((resolve, reject) => {
+ // Start the command
+ if (outputChannel) {
+ outputChannel.appendLine(` > ${cmd}`);
+ }
+ let child = childProcess.spawn(cmd, [], { shell: true, detached: false });
+
+ // Add listeners to resolve/reject the promise on exit
+ child.on('error', reject);
+ child.on('exit', (code: number) => {
+ if (code === 0) {
+ resolve();
+ } else {
+ reject(localize('executeCommandProcessExited', 'Process exited with code {0}', code));
+ }
+ });
+
+ // Add listeners to print stdout and stderr if an output channel was provided
+ if (outputChannel) {
+ child.stdout.on('data', data => { outputDataChunk(data, outputChannel, ' stdout: '); });
+ child.stderr.on('data', data => { outputDataChunk(data, outputChannel, ' stderr: '); });
+ }
+ });
+}
+
+export function isObjectExplorerContext(object: any): object is sqlops.ObjectExplorerContext {
+ return 'connectionProfile' in object && 'isConnectionNode' in object;
+}
+
+export function getUserHome(): string {
+ return process.env.HOME || process.env.USERPROFILE;
+}
+
+export enum Platform {
+ Mac,
+ Linux,
+ Windows,
+ Others
+}
+
+export function getOSPlatform(): Platform {
+ switch (process.platform) {
+ case 'win32':
+ return Platform.Windows;
+ case 'darwin':
+ return Platform.Mac;
+ case 'linux':
+ return Platform.Linux;
+ default:
+ return Platform.Others;
+ }
+}
+
+export function getOSPlatformId(): string {
+ var platformId = undefined;
+ switch (process.platform) {
+ case 'win32':
+ platformId = 'win-x64';
+ break;
+ case 'darwin':
+ platformId = 'osx';
+ break;
+ default:
+ platformId = 'linux-x64';
+ break;
+ }
+ return platformId;
+}
+
+// PRIVATE HELPERS /////////////////////////////////////////////////////////
+function outputDataChunk(data: string | Buffer, outputChannel: vscode.OutputChannel, header: string): void {
+ data.toString().split(/\r?\n/)
+ .forEach(line => {
+ outputChannel.appendLine(header + line);
+ });
+}
+
+export function clone(obj: T): T {
+ if (!obj || typeof obj !== 'object') {
+ return obj;
+ }
+ if (obj instanceof RegExp) {
+ // See https://github.com/Microsoft/TypeScript/issues/10990
+ return obj as any;
+ }
+ const result = (Array.isArray(obj)) ? [] : {};
+ Object.keys(obj).forEach(key => {
+ if (obj[key] && typeof obj[key] === 'object') {
+ result[key] = clone(obj[key]);
+ } else {
+ result[key] = obj[key];
+ }
+ });
+ return result;
+}
+
+export function isValidNumber(maybeNumber: any) {
+ return maybeNumber !== undefined
+ && maybeNumber !== null
+ && maybeNumber !== ''
+ && !isNaN(Number(maybeNumber.toString()));
+}
diff --git a/extensions/mssql/src/sqlClusterLookUp.ts b/extensions/mssql/src/sqlClusterLookUp.ts
new file mode 100644
index 0000000000..57736c0398
--- /dev/null
+++ b/extensions/mssql/src/sqlClusterLookUp.ts
@@ -0,0 +1,146 @@
+/*---------------------------------------------------------------------------------------------
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the Source EULA. See License.txt in the project root for license information.
+ *--------------------------------------------------------------------------------------------*/
+
+'use strict';
+
+import * as sqlops from 'sqlops';
+import * as constants from './constants';
+import * as UUID from 'vscode-languageclient/lib/utils/uuid';
+import { AppContext } from './appContext';
+import { SqlClusterConnection } from './objectExplorerNodeProvider/connection';
+import { ICommandObjectExplorerContext } from './objectExplorerNodeProvider/command';
+import { MssqlObjectExplorerNodeProvider } from './objectExplorerNodeProvider/objectExplorerNodeProvider';
+
+
+export function findSqlClusterConnection(
+ obj: ICommandObjectExplorerContext | sqlops.IConnectionProfile,
+ appContext: AppContext) : SqlClusterConnection {
+
+ if (!obj || !appContext) { return undefined; }
+
+ let sqlConnProfile: sqlops.IConnectionProfile;
+ if ('type' in obj && obj.type === constants.ObjectExplorerService
+ && 'explorerContext' in obj && obj.explorerContext && obj.explorerContext.connectionProfile) {
+ sqlConnProfile = obj.explorerContext.connectionProfile;
+ } else if ('options' in obj) {
+ sqlConnProfile = obj;
+ }
+
+ let sqlClusterConnection: SqlClusterConnection = undefined;
+ if (sqlConnProfile) {
+ sqlClusterConnection = findSqlClusterConnectionBySqlConnProfile(sqlConnProfile, appContext);
+ }
+ return sqlClusterConnection;
+}
+
+function findSqlClusterConnectionBySqlConnProfile(sqlConnProfile: sqlops.IConnectionProfile, appContext: AppContext): SqlClusterConnection {
+ if (!sqlConnProfile || !appContext) { return undefined; }
+
+ let sqlOeNodeProvider = appContext.getService(constants.ObjectExplorerService);
+ if (!sqlOeNodeProvider) { return undefined; }
+
+ let sqlClusterSession = sqlOeNodeProvider.findSqlClusterSessionBySqlConnProfile(sqlConnProfile);
+ if (!sqlClusterSession) { return undefined; }
+
+ return sqlClusterSession.sqlClusterConnection;
+}
+
+export async function getSqlClusterConnection(
+ obj: sqlops.IConnectionProfile | sqlops.connection.Connection | ICommandObjectExplorerContext): Promise {
+
+ if (!obj) { return undefined; }
+
+ let sqlClusterConnInfo: ConnectionParam = undefined;
+ if ('providerName' in obj) {
+ if (obj.providerName === constants.mssqlClusterProviderName) {
+ sqlClusterConnInfo = 'id' in obj ? connProfileToConnectionParam(obj) : connToConnectionParam(obj);
+ } else {
+ sqlClusterConnInfo = await createSqlClusterConnInfo(obj);
+ }
+ } else {
+ sqlClusterConnInfo = await createSqlClusterConnInfo(obj.explorerContext.connectionProfile);
+ }
+
+ return sqlClusterConnInfo;
+}
+
+async function createSqlClusterConnInfo(sqlConnInfo: sqlops.IConnectionProfile | sqlops.connection.Connection): Promise {
+ if (!sqlConnInfo) { return undefined; }
+
+ let connectionId: string = 'id' in sqlConnInfo ? sqlConnInfo.id : sqlConnInfo.connectionId;
+ if (!connectionId) { return undefined; }
+
+ let serverInfo = await sqlops.connection.getServerInfo(connectionId);
+ if (!serverInfo || !serverInfo.options) { return undefined; }
+
+ let endpoints: IEndpoint[] = serverInfo.options[constants.clusterEndpointsProperty];
+ if (!endpoints || endpoints.length === 0) { return undefined; }
+
+ let index = endpoints.findIndex(ep => ep.serviceName === constants.hadoopKnoxEndpointName);
+ if (index < 0) { return undefined; }
+
+ let credentials = await sqlops.connection.getCredentials(connectionId);
+ if (!credentials) { return undefined; }
+
+ let clusterConnInfo = {
+ providerName: constants.mssqlClusterProviderName,
+ connectionId: UUID.generateUuid(),
+ options: {}
+ };
+
+ clusterConnInfo.options[constants.hostPropName] = endpoints[index].ipAddress;
+ clusterConnInfo.options[constants.knoxPortPropName] = endpoints[index].port;
+ clusterConnInfo.options[constants.userPropName] = 'root'; //should be the same user as sql master
+ clusterConnInfo.options[constants.passwordPropName] = credentials.password;
+ clusterConnInfo = connToConnectionParam(clusterConnInfo);
+
+ return clusterConnInfo;
+}
+
+function connProfileToConnectionParam(connectionProfile: sqlops.IConnectionProfile): ConnectionParam {
+ let result = Object.assign(connectionProfile, { connectionId: connectionProfile.id });
+ return result;
+}
+
+function connToConnectionParam(connection: sqlops.connection.Connection): ConnectionParam {
+ let connectionId = connection.connectionId;
+ let options = connection.options;
+ let result = Object.assign(connection,
+ {
+ serverName: `${options[constants.hostPropName]},${options[constants.knoxPortPropName]}`,
+ userName: options[constants.userPropName],
+ password: options[constants.passwordPropName],
+ id: connectionId,
+ }
+ );
+ return result;
+}
+
+interface IEndpoint {
+ serviceName: string;
+ ipAddress: string;
+ port: number;
+}
+
+class ConnectionParam implements sqlops.connection.Connection, sqlops.IConnectionProfile, sqlops.ConnectionInfo
+{
+ public connectionName: string;
+ public serverName: string;
+ public databaseName: string;
+ public userName: string;
+ public password: string;
+ public authenticationType: string;
+ public savePassword: boolean;
+ public groupFullName: string;
+ public groupId: string;
+ public saveProfile: boolean;
+ public id: string;
+ public azureTenantId?: string;
+
+ public providerName: string;
+ public connectionId: string;
+
+ public options: { [name: string]: any; };
+}
\ No newline at end of file
diff --git a/extensions/mssql/src/typings/refs.d.ts b/extensions/mssql/src/typings/refs.d.ts
index 0bbbbb3b5e..3a7c7e8150 100644
--- a/extensions/mssql/src/typings/refs.d.ts
+++ b/extensions/mssql/src/typings/refs.d.ts
@@ -5,4 +5,4 @@
///
///
-///
\ No newline at end of file
+///
diff --git a/extensions/mssql/src/utils.ts b/extensions/mssql/src/utils.ts
index bf4f613744..e47b11eca1 100644
--- a/extensions/mssql/src/utils.ts
+++ b/extensions/mssql/src/utils.ts
@@ -5,19 +5,18 @@
'use strict';
import * as sqlops from 'sqlops';
-
+import * as vscode from 'vscode';
import * as path from 'path';
import * as crypto from 'crypto';
import * as os from 'os';
-import { workspace, WorkspaceConfiguration } from 'vscode';
import * as findRemoveSync from 'find-remove';
-import { IEndpoint } from './objectExplorerNodeProvider/objectExplorerNodeProvider';
import * as constants from './constants';
const configTracingLevel = 'tracingLevel';
const configLogRetentionMinutes = 'logRetentionMinutes';
const configLogFilesRemovalLimit = 'logFilesRemovalLimit';
const extensionConfigSectionName = 'mssql';
+const configLogDebugInfo = 'logDebugInfo';
// The function is a duplicate of \src\paths.js. IT would be better to import path.js but it doesn't
// work for now because the extension is running in different process.
@@ -35,8 +34,8 @@ export function removeOldLogFiles(prefix: string): JSON {
return findRemoveSync(getDefaultLogDir(), { prefix: `${prefix}_`, age: { seconds: getConfigLogRetentionSeconds() }, limit: getConfigLogFilesRemovalLimit() });
}
-export function getConfiguration(config: string = extensionConfigSectionName): WorkspaceConfiguration {
- return workspace.getConfiguration(extensionConfigSectionName);
+export function getConfiguration(config: string = extensionConfigSectionName): vscode.WorkspaceConfiguration {
+ return vscode.workspace.getConfiguration(extensionConfigSectionName);
}
export function getConfigLogFilesRemovalLimit(): number {
@@ -203,4 +202,31 @@ export async function getClusterEndpoint(profileId: string, serviceName: string)
port: endpoints[index].port
};
return clusterEndpoint;
-}
\ No newline at end of file
+}
+
+interface IEndpoint {
+ serviceName: string;
+ ipAddress: string;
+ port: number;
+}
+
+export function isValidNumber(maybeNumber: any) {
+ return maybeNumber !== undefined
+ && maybeNumber !== null
+ && maybeNumber !== ''
+ && !isNaN(Number(maybeNumber.toString()));
+}
+
+/**
+ * Helper to log messages to the developer console if enabled
+ * @param msg Message to log to the console
+ */
+export function logDebug(msg: any): void {
+ let config = vscode.workspace.getConfiguration(extensionConfigSectionName);
+ let logDebugInfo = config[configLogDebugInfo];
+ if (logDebugInfo === true) {
+ let currentTime = new Date().toLocaleTimeString();
+ let outputMsg = '[' + currentTime + ']: ' + msg ? msg.toString() : '';
+ console.log(outputMsg);
+ }
+}
diff --git a/extensions/mssql/yarn.lock b/extensions/mssql/yarn.lock
index b81b4addb9..c6c9844466 100644
--- a/extensions/mssql/yarn.lock
+++ b/extensions/mssql/yarn.lock
@@ -72,6 +72,11 @@ bl@^1.0.0:
readable-stream "^2.3.5"
safe-buffer "^5.1.1"
+bluebird@^3.5.0:
+ version "3.5.3"
+ resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.3.tgz#7d01c6f9616c9a51ab0f8c549a79dfe6ec33efa7"
+ integrity sha512-/qKPUQlaW1OyR51WeCPBvRnAlnZFUJkCSG5HzGnuIqhgyJtF+T94lFnn33eiazjRm2LAHVy2guNnaq48X9SJuw==
+
buffer-alloc-unsafe@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0"
@@ -436,6 +441,11 @@ inherits@~2.0.3:
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
+ip-regex@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-3.0.0.tgz#0a934694b4066558c46294244a23cc33116bf732"
+ integrity sha512-T8wDtjy+Qf2TAPDQmBp0eGKJ8GavlWlUnamr3wRn6vvdZlKVuJXXMlSncYFRYgVHOM3If5NR1H4+OvVQU9Idvg==
+
is-arrayish@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
@@ -492,6 +502,11 @@ jsprim@^1.2.2:
json-schema "0.2.3"
verror "1.10.0"
+lodash@^4.13.1:
+ version "4.17.11"
+ resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.11.tgz#b39ea6229ef607ecd89e2c8df12536891cac9b8d"
+ integrity sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==
+
lru-cache@^4.0.1:
version "4.1.5"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd"
@@ -618,7 +633,7 @@ pseudomap@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
-psl@^1.1.24:
+psl@^1.1.24, psl@^1.1.28:
version "1.1.31"
resolved "https://registry.yarnpkg.com/psl/-/psl-1.1.31.tgz#e9aa86d0101b5b105cbe93ac6b784cd547276184"
@@ -626,7 +641,7 @@ punycode@^1.4.1:
version "1.4.1"
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
-punycode@^2.1.0:
+punycode@^2.1.0, punycode@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
@@ -647,6 +662,23 @@ readable-stream@^2.1.4, readable-stream@^2.3.0, readable-stream@^2.3.5:
string_decoder "~1.1.1"
util-deprecate "~1.0.1"
+request-promise-core@1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.1.tgz#3eee00b2c5aa83239cfb04c5700da36f81cd08b6"
+ integrity sha1-Pu4AssWqgyOc+wTFcA2jb4HNCLY=
+ dependencies:
+ lodash "^4.13.1"
+
+request-promise@^4.2.2:
+ version "4.2.2"
+ resolved "https://registry.yarnpkg.com/request-promise/-/request-promise-4.2.2.tgz#d1ea46d654a6ee4f8ee6a4fea1018c22911904b4"
+ integrity sha1-0epG1lSm7k+O5qT+oQGMIpEZBLQ=
+ dependencies:
+ bluebird "^3.5.0"
+ request-promise-core "1.1.1"
+ stealthy-require "^1.1.0"
+ tough-cookie ">=2.3.3"
+
request@^2.74.0:
version "2.88.0"
resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef"
@@ -732,6 +764,11 @@ sshpk@^1.7.0:
safer-buffer "^2.0.2"
tweetnacl "~0.14.0"
+stealthy-require@^1.1.0:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b"
+ integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=
+
stream-meter@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/stream-meter/-/stream-meter-1.0.4.tgz#52af95aa5ea760a2491716704dbff90f73afdd1d"
@@ -786,6 +823,15 @@ to-buffer@^1.1.1:
resolved "https://registry.yarnpkg.com/to-buffer/-/to-buffer-1.1.1.tgz#493bd48f62d7c43fcded313a03dcadb2e1213a80"
integrity sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==
+tough-cookie@>=2.3.3:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-3.0.0.tgz#d2bceddebde633153ff20a52fa844a0dc71dacef"
+ integrity sha512-LHMvg+RBP/mAVNqVbOX8t+iJ+tqhBA/t49DuI7+IDAWHrASnesqSu1vWbKB7UrE2yk+HMFUBMadRGMkB4VCfog==
+ dependencies:
+ ip-regex "^3.0.0"
+ psl "^1.1.28"
+ punycode "^2.1.1"
+
tough-cookie@~2.4.3:
version "2.4.3"
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781"