mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Add more advanced properties for spark job submission dialog (#12732)
* Add more advanced properties for spark job submission dialog * Add queue
This commit is contained in:
@@ -7,6 +7,28 @@ import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
/**
|
||||
* Configuration values for the advanced tab of the spark job submission dialog.
|
||||
* See https://livy.incubator.apache.org/docs/latest/rest-api.html for more information
|
||||
* on the specific values
|
||||
*/
|
||||
export interface SparkAdvancedConfigModel {
|
||||
jarFiles?: string,
|
||||
pyFiles?: string,
|
||||
otherFiles?: string,
|
||||
driverMemory?: string,
|
||||
driverCores?: number,
|
||||
executorMemory?: string,
|
||||
executeCores?: number,
|
||||
executorCount?: number,
|
||||
queueName?: string,
|
||||
configValues?: string
|
||||
}
|
||||
|
||||
const baseFormItemLayout: azdata.FormItemLayout = {
|
||||
horizontal: false,
|
||||
componentWidth: '400px'
|
||||
};
|
||||
export class SparkAdvancedTab {
|
||||
private _tab: azdata.window.DialogTab;
|
||||
public get tab(): azdata.window.DialogTab { return this._tab; }
|
||||
@@ -14,58 +36,157 @@ export class SparkAdvancedTab {
|
||||
private _referenceFilesInputBox: azdata.InputBoxComponent;
|
||||
private _referenceJARFilesInputBox: azdata.InputBoxComponent;
|
||||
private _referencePyFilesInputBox: azdata.InputBoxComponent;
|
||||
private _driverMemoryInputBox: azdata.InputBoxComponent;
|
||||
private _driverCoresInputBox: azdata.InputBoxComponent;
|
||||
private _executorMemoryInputBox: azdata.InputBoxComponent;
|
||||
private _executorCoresInputBox: azdata.InputBoxComponent;
|
||||
private _executorCountInputBox: azdata.InputBoxComponent;
|
||||
private _queueInputBox: azdata.InputBoxComponent;
|
||||
private _configValuesInputBox: azdata.InputBoxComponent;
|
||||
|
||||
constructor() {
|
||||
this._tab = azdata.window.createTab(localize('sparkJobSubmission.AdvancedTabName', "ADVANCED"));
|
||||
|
||||
this._tab.registerContent(async (modelView) => {
|
||||
let builder = modelView.modelBuilder;
|
||||
let parentLayout: azdata.FormItemLayout = {
|
||||
horizontal: false,
|
||||
componentWidth: '400px'
|
||||
};
|
||||
|
||||
let formContainer = builder.formContainer();
|
||||
|
||||
this._referenceJARFilesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._referenceJARFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferenceJarList', "Reference Jars")
|
||||
},
|
||||
Object.assign(
|
||||
{
|
||||
info: localize('sparkJobSubmission.ReferenceJarListToolTip',
|
||||
"Jars to be placed in executor working directory. The Jar path needs to be an HDFS Path. Multiple paths should be split by semicolon (;)")
|
||||
},
|
||||
parentLayout));
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._referenceJARFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferenceJarList', "Reference Jars")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.ReferenceJarListToolTip',
|
||||
"Jars to be placed in executor working directory. The Jar path needs to be an HDFS Path. Multiple paths should be split by semicolon (;)")
|
||||
});
|
||||
|
||||
this._referencePyFilesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._referencePyFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferencePyList', "Reference py Files")
|
||||
},
|
||||
Object.assign(
|
||||
{
|
||||
info: localize('sparkJobSubmission.ReferencePyListTooltip',
|
||||
"Py Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)")
|
||||
},
|
||||
parentLayout));
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.ReferencePyListTooltip',
|
||||
"Py Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)")
|
||||
});
|
||||
|
||||
this._referenceFilesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._referenceFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferenceFilesList', "Reference Files")
|
||||
},
|
||||
Object.assign({
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._referenceFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferenceFilesList', "Reference Files")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.ReferenceFilesListTooltip',
|
||||
"Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)")
|
||||
}, parentLayout));
|
||||
});
|
||||
|
||||
this._driverMemoryInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._driverMemoryInputBox,
|
||||
title: localize('sparkJobSubmission.driverMemory', "Driver Memory")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.driverMemoryTooltip', "Amount of memory to allocate to the driver. Specify units as part of value. Example 512M or 2G.")
|
||||
});
|
||||
|
||||
this._driverCoresInputBox = builder.inputBox()
|
||||
.withProperties<azdata.InputBoxProperties>({ inputType: 'number', min: 1 })
|
||||
.component();
|
||||
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._driverCoresInputBox,
|
||||
title: localize('sparkJobSubmission.driverCores', "Driver Cores")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.driverCoresTooltip', "Amount of CPU cores to allocate to the driver.")
|
||||
});
|
||||
|
||||
this._executorMemoryInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._executorMemoryInputBox,
|
||||
title: localize('sparkJobSubmission.executorMemory', "Executor Memory")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.executorMemoryTooltip', "Amount of memory to allocate to the executor. Specify units as part of value. Example 512M or 2G.")
|
||||
});
|
||||
|
||||
this._executorCoresInputBox = builder.inputBox()
|
||||
.withProperties<azdata.InputBoxProperties>({ inputType: 'number', min: 1 })
|
||||
.component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._executorCoresInputBox,
|
||||
title: localize('sparkJobSubmission.executorCores', "Executor Cores")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.executorCoresTooltip', "Amount of CPU cores to allocate to the executor.")
|
||||
});
|
||||
|
||||
this._executorCountInputBox = builder.inputBox()
|
||||
.withProperties<azdata.InputBoxProperties>({ inputType: 'number', min: 1 })
|
||||
.component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._executorCountInputBox,
|
||||
title: localize('sparkJobSubmission.executorCount', "Executor Count")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.executorCountTooltip', "Number of instances of the executor to run.")
|
||||
});
|
||||
|
||||
this._queueInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._queueInputBox,
|
||||
title: localize('sparkJobSubmission.queueName', "Queue Name")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.queueNameTooltip', "Name of the Spark queue to execute the session in.")
|
||||
});
|
||||
|
||||
this._configValuesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem(
|
||||
{
|
||||
component: this._configValuesInputBox,
|
||||
title: localize('sparkJobSubmission.configValues', "Configuration Values")
|
||||
},
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.configValuesTooltip', "List of name value pairs containing Spark configuration values. Encoded as JSON dictionary. Example: '{\"name\":\"value\", \"name2\":\"value2\"}'.")
|
||||
});
|
||||
|
||||
await modelView.initializeModel(formContainer.component());
|
||||
});
|
||||
}
|
||||
|
||||
public getInputValues(): string[] {
|
||||
return [this._referenceJARFilesInputBox.value, this._referencePyFilesInputBox.value, this._referenceFilesInputBox.value];
|
||||
public getAdvancedConfigValues(): SparkAdvancedConfigModel {
|
||||
return {
|
||||
jarFiles: this._referenceJARFilesInputBox.value,
|
||||
pyFiles: this._referencePyFilesInputBox.value,
|
||||
otherFiles: this._referenceFilesInputBox.value,
|
||||
driverMemory: this._driverMemoryInputBox.value,
|
||||
driverCores: +this._driverCoresInputBox.value,
|
||||
executorMemory: this._executorMemoryInputBox.value,
|
||||
executeCores: +this._executorCoresInputBox.value,
|
||||
executorCount: +this._executorCountInputBox.value,
|
||||
queueName: this._queueInputBox.value,
|
||||
configValues: this._configValuesInputBox.value
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,22 @@ import { SparkFileSource } from './sparkJobSubmissionService';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
/**
|
||||
* Configuration values for the general tab of the spark job submission dialog.
|
||||
* See https://livy.incubator.apache.org/docs/latest/rest-api.html for more information
|
||||
* on the specific values
|
||||
*/
|
||||
export interface SparkConfigModel {
|
||||
jobName: string,
|
||||
mainClass: string,
|
||||
arguments: string
|
||||
}
|
||||
|
||||
const baseFormItemLayout: azdata.FormItemLayout = {
|
||||
horizontal: false,
|
||||
componentWidth: '400px'
|
||||
};
|
||||
|
||||
export class SparkConfigurationTab {
|
||||
private _tab: azdata.window.DialogTab;
|
||||
public get tab(): azdata.window.DialogTab { return this._tab; }
|
||||
@@ -37,10 +53,6 @@ export class SparkConfigurationTab {
|
||||
|
||||
this._tab.registerContent(async (modelView) => {
|
||||
let builder = modelView.modelBuilder;
|
||||
let parentLayout: azdata.FormItemLayout = {
|
||||
horizontal: false,
|
||||
componentWidth: '400px'
|
||||
};
|
||||
|
||||
let formContainer = builder.formContainer();
|
||||
|
||||
@@ -53,7 +65,7 @@ export class SparkConfigurationTab {
|
||||
component: this._jobNameInputBox,
|
||||
title: localize('sparkJobSubmission.JobName', "Job Name"),
|
||||
required: true
|
||||
}, parentLayout);
|
||||
}, baseFormItemLayout);
|
||||
|
||||
this._sparkContextLabel = builder.text().withProperties({
|
||||
value: this._dataModel.getSparkClusterUrl()
|
||||
@@ -61,7 +73,7 @@ export class SparkConfigurationTab {
|
||||
formContainer.addFormItem({
|
||||
component: this._sparkContextLabel,
|
||||
title: localize('sparkJobSubmission.SparkCluster', "Spark Cluster")
|
||||
}, parentLayout);
|
||||
}, baseFormItemLayout);
|
||||
|
||||
this._fileSourceDropDown = builder.dropDown().withProperties<azdata.DropDownProperties>({
|
||||
values: [SparkFileSource.Local.toString(), SparkFileSource.HDFS.toString()],
|
||||
@@ -161,23 +173,24 @@ export class SparkConfigurationTab {
|
||||
component: this._sourceFlexContainerWithHint,
|
||||
title: localize('sparkJobSubmission.MainFilePath', "JAR/py File"),
|
||||
required: true
|
||||
}, parentLayout);
|
||||
}, baseFormItemLayout);
|
||||
|
||||
this._mainClassInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._mainClassInputBox,
|
||||
title: localize('sparkJobSubmission.MainClass', "Main Class"),
|
||||
required: true
|
||||
}, parentLayout);
|
||||
}, baseFormItemLayout);
|
||||
|
||||
this._argumentsInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._argumentsInputBox,
|
||||
title: localize('sparkJobSubmission.Arguments', "Arguments")
|
||||
},
|
||||
Object.assign(
|
||||
{ info: localize('sparkJobSubmission.ArgumentsTooltip', "Command line arguments used in your main class, multiple arguments should be split by space.") },
|
||||
parentLayout));
|
||||
{
|
||||
...baseFormItemLayout,
|
||||
info: localize('sparkJobSubmission.ArgumentsTooltip', "Command line arguments used in your main class, multiple arguments should be split by space.")
|
||||
});
|
||||
|
||||
await modelView.initializeModel(formContainer.component());
|
||||
});
|
||||
@@ -242,8 +255,12 @@ export class SparkConfigurationTab {
|
||||
}
|
||||
}
|
||||
|
||||
public getInputValues(): string[] {
|
||||
return [this._jobNameInputBox.value, this._mainClassInputBox.value, this._argumentsInputBox.value];
|
||||
public getSparkConfigValues(): SparkConfigModel {
|
||||
return {
|
||||
jobName: this._jobNameInputBox.value ?? '',
|
||||
mainClass: this._mainClassInputBox.value ?? '',
|
||||
arguments: this._argumentsInputBox.value ?? ''
|
||||
};
|
||||
}
|
||||
|
||||
public async pickFile(): Promise<string> {
|
||||
|
||||
@@ -56,7 +56,7 @@ export class SparkJobSubmissionDialog {
|
||||
|
||||
private onClickOk(): void {
|
||||
let jobName = localize('sparkJobSubmission.SubmitSparkJob', "{0} Spark Job Submission:",
|
||||
this._sparkConfigTab.getInputValues()[0]);
|
||||
this._sparkConfigTab.getSparkConfigValues().jobName);
|
||||
azdata.tasks.startBackgroundOperation(
|
||||
{
|
||||
connection: this.sqlClusterConnection.connection,
|
||||
@@ -96,8 +96,8 @@ export class SparkJobSubmissionDialog {
|
||||
|
||||
// 2. Submit job to cluster.
|
||||
let submissionSettings: SparkJobSubmissionInput = this.getSubmissionInput();
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.jobName)));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.jobName));
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.config.jobName)));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.config.jobName));
|
||||
let livyBatchId = await this._dataModel.submitBatchJobByLivy(submissionSettings);
|
||||
vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted);
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted));
|
||||
@@ -146,10 +146,14 @@ export class SparkJobSubmissionDialog {
|
||||
}
|
||||
|
||||
private getSubmissionInput(): SparkJobSubmissionInput {
|
||||
let generalConfig = this._sparkConfigTab.getInputValues();
|
||||
let advancedConfig = this._sparkAdvancedTab.getInputValues();
|
||||
return new SparkJobSubmissionInput(generalConfig[0], this._dataModel.hdfsSubmitFilePath, generalConfig[1], generalConfig[2],
|
||||
advancedConfig[0], advancedConfig[1], advancedConfig[2]);
|
||||
const generalConfig = this._sparkConfigTab.getSparkConfigValues();
|
||||
const advancedConfig = this._sparkAdvancedTab.getAdvancedConfigValues();
|
||||
return new SparkJobSubmissionInput(
|
||||
{
|
||||
sparkFile: this._dataModel.hdfsSubmitFilePath,
|
||||
...generalConfig,
|
||||
...advancedConfig
|
||||
});
|
||||
}
|
||||
|
||||
private addInfoTag(info: string): string {
|
||||
|
||||
@@ -14,74 +14,112 @@ import * as request from 'request-light';
|
||||
|
||||
export class SparkJobSubmissionService {
|
||||
public async submitBatchJob(submissionArgs: SparkJobSubmissionInput): Promise<string> {
|
||||
try {
|
||||
let livyUrl: string = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/`;
|
||||
let livyUrl: string = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/`;
|
||||
|
||||
// Get correct authentication headers
|
||||
let headers = await this.getAuthenticationHeaders(submissionArgs);
|
||||
// Get correct authentication headers
|
||||
let headers = await this.getAuthenticationHeaders(submissionArgs);
|
||||
|
||||
let options: request.XHROptions = {
|
||||
url: livyUrl,
|
||||
type: 'POST',
|
||||
strictSSL: !auth.getIgnoreSslVerificationConfigSetting(),
|
||||
data: {
|
||||
file: submissionArgs.sparkFile,
|
||||
proxyUser: submissionArgs.user,
|
||||
className: submissionArgs.mainClass,
|
||||
name: submissionArgs.jobName
|
||||
},
|
||||
// authentication headers
|
||||
headers: headers
|
||||
};
|
||||
let options: request.XHROptions = {
|
||||
url: livyUrl,
|
||||
type: 'POST',
|
||||
strictSSL: !auth.getIgnoreSslVerificationConfigSetting(),
|
||||
data: {
|
||||
file: submissionArgs.config.sparkFile,
|
||||
proxyUser: submissionArgs.user,
|
||||
className: submissionArgs.config.mainClass,
|
||||
name: submissionArgs.config.jobName
|
||||
},
|
||||
// authentication headers
|
||||
headers: headers
|
||||
};
|
||||
|
||||
// Set arguments
|
||||
if (submissionArgs.jobArguments && submissionArgs.jobArguments.trim()) {
|
||||
let argsList = submissionArgs.jobArguments.split(' ');
|
||||
if (argsList.length > 0) {
|
||||
options.data['args'] = argsList;
|
||||
}
|
||||
// Now set the other parameters based on the user configuration - see
|
||||
// https://livy.incubator.apache.org/docs/latest/rest-api.html for more detailed information
|
||||
|
||||
// Set arguments
|
||||
const args = submissionArgs.config.arguments?.trim();
|
||||
if (arguments) {
|
||||
const argsList = args.split(' ');
|
||||
if (argsList.length > 0) {
|
||||
options.data['args'] = argsList;
|
||||
}
|
||||
|
||||
// Set jars files
|
||||
if (submissionArgs.jarFileList && submissionArgs.jarFileList.trim()) {
|
||||
let jarList = submissionArgs.jarFileList.split(';');
|
||||
if (jarList.length > 0) {
|
||||
options.data['jars'] = jarList;
|
||||
}
|
||||
}
|
||||
|
||||
// Set py files
|
||||
if (submissionArgs.pyFileList && submissionArgs.pyFileList.trim()) {
|
||||
let pyList = submissionArgs.pyFileList.split(';');
|
||||
if (pyList.length > 0) {
|
||||
options.data['pyFiles'] = pyList;
|
||||
}
|
||||
}
|
||||
|
||||
// Set other files
|
||||
if (submissionArgs.otherFileList && submissionArgs.otherFileList.trim()) {
|
||||
let otherList = submissionArgs.otherFileList.split(';');
|
||||
if (otherList.length > 0) {
|
||||
options.data['files'] = otherList;
|
||||
}
|
||||
}
|
||||
|
||||
options.data = JSON.stringify(options.data);
|
||||
|
||||
// Note this is currently required to be called each time since request-light is overwriting
|
||||
// the setting passed in through the options. If/when that gets fixed this can be removed
|
||||
request.configure(null, !auth.getIgnoreSslVerificationConfigSetting());
|
||||
|
||||
const response = JSON.parse((await request.xhr(options)).responseText);
|
||||
if (response && utils.isValidNumber(response.id)) {
|
||||
return response.id;
|
||||
}
|
||||
|
||||
return Promise.reject(new Error(localize('sparkJobSubmission.LivyNoBatchIdReturned',
|
||||
"No Spark job batch id is returned from response.{0}[Error] {1}", os.EOL, JSON.stringify(response))));
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
|
||||
// Set jars files
|
||||
const jarFiles = submissionArgs.config.jarFiles?.trim();
|
||||
if (jarFiles) {
|
||||
const jarList = jarFiles.split(';');
|
||||
if (jarList.length > 0) {
|
||||
options.data['jars'] = jarList;
|
||||
}
|
||||
}
|
||||
|
||||
// Set py files
|
||||
if (submissionArgs.config.pyFiles?.trim()) {
|
||||
const pyList = submissionArgs.config.pyFiles.split(';');
|
||||
if (pyList.length > 0) {
|
||||
options.data['pyFiles'] = pyList;
|
||||
}
|
||||
}
|
||||
|
||||
// Set other files
|
||||
const otherFiles = submissionArgs.config.otherFiles?.trim();
|
||||
if (otherFiles) {
|
||||
const otherList = otherFiles.split(';');
|
||||
if (otherList.length > 0) {
|
||||
options.data['files'] = otherList;
|
||||
}
|
||||
}
|
||||
|
||||
// Set driver memory
|
||||
const driverMemory = submissionArgs.config.driverMemory?.trim();
|
||||
if (driverMemory) {
|
||||
options.data['driverMemory'] = driverMemory;
|
||||
}
|
||||
|
||||
// Set driver cores
|
||||
if (submissionArgs.config.driverCores) {
|
||||
options.data['driverCores'] = submissionArgs.config.driverCores;
|
||||
}
|
||||
|
||||
// Set executor memory
|
||||
const executorMemory = submissionArgs.config.executorMemory?.trim();
|
||||
if (executorMemory) {
|
||||
options.data['executorMemory'] = executorMemory;
|
||||
}
|
||||
|
||||
// Set executor cores
|
||||
if (submissionArgs.config.executorCores) {
|
||||
options.data['executorCores'] = submissionArgs.config.executorCores;
|
||||
}
|
||||
|
||||
// Set executor count
|
||||
if (submissionArgs.config.executorCount) {
|
||||
options.data['numExecutors'] = submissionArgs.config.executorCount;
|
||||
}
|
||||
|
||||
if (submissionArgs.config.queueName) {
|
||||
options.data['queue'] = submissionArgs.config.queueName;
|
||||
}
|
||||
// Set driver memory
|
||||
const configurationValues = submissionArgs.config.configValues?.trim();
|
||||
if (configurationValues) {
|
||||
options.data['conf'] = configurationValues;
|
||||
}
|
||||
|
||||
options.data = JSON.stringify(options.data);
|
||||
|
||||
// Note this is currently required to be called each time since request-light is overwriting
|
||||
// the setting passed in through the options. If/when that gets fixed this can be removed
|
||||
request.configure(null, !auth.getIgnoreSslVerificationConfigSetting());
|
||||
|
||||
const response = JSON.parse((await request.xhr(options)).responseText);
|
||||
if (response && utils.isValidNumber(response.id)) {
|
||||
return response.id;
|
||||
}
|
||||
|
||||
throw new Error(localize('sparkJobSubmission.LivyNoBatchIdReturned',
|
||||
"No Spark job batch id is returned from response.{0}[Error] {1}", os.EOL, JSON.stringify(response)));
|
||||
}
|
||||
|
||||
private async getAuthenticationHeaders(submissionArgs: SparkJobSubmissionInput) {
|
||||
@@ -97,32 +135,28 @@ export class SparkJobSubmissionService {
|
||||
}
|
||||
|
||||
public async getYarnAppId(submissionArgs: SparkJobSubmissionInput, livyBatchId: string): Promise<LivyLogResponse> {
|
||||
try {
|
||||
let livyUrl = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/${livyBatchId}/log`;
|
||||
let headers = await this.getAuthenticationHeaders(submissionArgs);
|
||||
let livyUrl = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/${livyBatchId}/log`;
|
||||
let headers = await this.getAuthenticationHeaders(submissionArgs);
|
||||
|
||||
let options: request.XHROptions = {
|
||||
url: livyUrl,
|
||||
type: 'GET',
|
||||
strictSSL: !auth.getIgnoreSslVerificationConfigSetting(),
|
||||
// authentication headers
|
||||
headers: headers
|
||||
};
|
||||
let options: request.XHROptions = {
|
||||
url: livyUrl,
|
||||
type: 'GET',
|
||||
strictSSL: !auth.getIgnoreSslVerificationConfigSetting(),
|
||||
// authentication headers
|
||||
headers: headers
|
||||
};
|
||||
|
||||
// Note this is currently required to be called each time since request-light is overwriting
|
||||
// the setting passed in through the options. If/when that gets fixed this can be removed
|
||||
request.configure(null, !auth.getIgnoreSslVerificationConfigSetting());
|
||||
// Note this is currently required to be called each time since request-light is overwriting
|
||||
// the setting passed in through the options. If/when that gets fixed this can be removed
|
||||
request.configure(null, !auth.getIgnoreSslVerificationConfigSetting());
|
||||
|
||||
const response = JSON.parse((await request.xhr(options)).responseText);
|
||||
if (response && response.log) {
|
||||
return this.extractYarnAppIdFromLog(response.log);
|
||||
}
|
||||
|
||||
return Promise.reject(localize('sparkJobSubmission.LivyNoLogReturned',
|
||||
"No log is returned within response.{0}[Error] {1}", os.EOL, JSON.stringify(response)));
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
const response = JSON.parse((await request.xhr(options)).responseText);
|
||||
if (response && response.log) {
|
||||
return this.extractYarnAppIdFromLog(response.log);
|
||||
}
|
||||
|
||||
throw new Error(localize('sparkJobSubmission.LivyNoLogReturned',
|
||||
"No log is returned within response.{0}[Error] {1}", os.EOL, JSON.stringify(response)));
|
||||
}
|
||||
|
||||
|
||||
@@ -148,6 +182,27 @@ export class SparkJobSubmissionService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The configuration values for the spark job submission. See https://livy.incubator.apache.org/docs/latest/rest-api.html
|
||||
* for more detailed information.
|
||||
*/
|
||||
export interface SparkJobSubmissionConfig {
|
||||
readonly jobName: string,
|
||||
readonly sparkFile: string,
|
||||
readonly mainClass: string,
|
||||
readonly arguments?: string,
|
||||
readonly jarFiles?: string,
|
||||
readonly pyFiles?: string,
|
||||
readonly otherFiles?: string,
|
||||
readonly driverMemory?: string,
|
||||
readonly driverCores?: number,
|
||||
readonly executorMemory?: string,
|
||||
readonly executorCores?: number,
|
||||
readonly executorCount?: number,
|
||||
readonly queueName?: string,
|
||||
readonly configValues?: string
|
||||
}
|
||||
|
||||
export class SparkJobSubmissionInput {
|
||||
public setSparkClusterInfo(sqlClusterConnection: SqlClusterConnection): void {
|
||||
this._host = sqlClusterConnection.host;
|
||||
@@ -159,28 +214,14 @@ export class SparkJobSubmissionInput {
|
||||
}
|
||||
|
||||
constructor(
|
||||
private readonly _jobName: string,
|
||||
private readonly _sparkFile: string,
|
||||
private readonly _mainClass: string,
|
||||
private readonly _arguments: string,
|
||||
private readonly _jarFileList: string,
|
||||
private readonly _pyFileList: string,
|
||||
private readonly _otherFileList: string,
|
||||
public readonly config: SparkJobSubmissionConfig,
|
||||
private _host?: string,
|
||||
private _port?: number,
|
||||
private _livyPath?: string,
|
||||
private _user?: string,
|
||||
private _password?: string,
|
||||
private _isIntegratedAuth?: boolean) {
|
||||
}
|
||||
private _isIntegratedAuth?: boolean) { }
|
||||
|
||||
public get jobName(): string { return this._jobName; }
|
||||
public get sparkFile(): string { return this._sparkFile; }
|
||||
public get mainClass(): string { return this._mainClass; }
|
||||
public get jobArguments(): string { return this._arguments; }
|
||||
public get jarFileList(): string { return this._jarFileList; }
|
||||
public get otherFileList(): string { return this._otherFileList; }
|
||||
public get pyFileList(): string { return this._pyFileList; }
|
||||
public get host(): string { return this._host; }
|
||||
public get port(): number { return this._port; }
|
||||
public get livyPath(): string { return this._livyPath; }
|
||||
@@ -190,8 +231,8 @@ export class SparkJobSubmissionInput {
|
||||
}
|
||||
|
||||
export enum SparkFileSource {
|
||||
HDFS = <any>'HDFS',
|
||||
Local = <any>'Local'
|
||||
HDFS = 'HDFS',
|
||||
Local = 'Local'
|
||||
}
|
||||
|
||||
export class LivyLogResponse {
|
||||
|
||||
Reference in New Issue
Block a user