mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 18:46:40 -05:00
Hygiene linting for extensions + new rule (#7843)
* linting for extensions + new rule * Remove unneeded array * Fix spelling mistake * Fix bad merge
This commit is contained in:
@@ -7,7 +7,7 @@ import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
// HDFS Constants //////////////////////////////////////////////////////////
|
||||
export const msgMissingNodeContext = localize('msgMissingNodeContext', 'Node Command called without any node passed');
|
||||
export const msgMissingNodeContext = localize('msgMissingNodeContext', "Node Command called without any node passed");
|
||||
|
||||
// HDFS Manage Access Dialog Constants ////////////////////////////////////
|
||||
|
||||
@@ -37,16 +37,16 @@ export const applyRecursivelyText = localize('mssql.applyRecursively', "Apply Re
|
||||
export function errorApplyingAclChanges(errMsg: string): string { return localize('mssql.errorApplyingAclChanges', "Unexpected error occurred while applying changes : {0}", errMsg); }
|
||||
|
||||
// Spark Job Submission Constants //////////////////////////////////////////
|
||||
export const sparkLocalFileDestinationHint = localize('sparkJobSubmission.LocalFileDestinationHint', 'Local file will be uploaded to HDFS. ');
|
||||
export const sparkJobSubmissionEndMessage = localize('sparkJobSubmission.SubmissionEndMessage', '.......................... Submit Spark Job End ............................');
|
||||
export function sparkJobSubmissionPrepareUploadingFile(localPath: string, clusterFolder: string): string { return localize('sparkJobSubmission.PrepareUploadingFile', 'Uploading file from local {0} to HDFS folder: {1}', localPath, clusterFolder); }
|
||||
export const sparkJobSubmissionUploadingFileSucceeded = localize('sparkJobSubmission.UploadingFileSucceeded', 'Upload file to cluster Succeeded!');
|
||||
export function sparkJobSubmissionUploadingFileFailed(err: string): string { return localize('sparkJobSubmission.UploadingFileFailed', 'Upload file to cluster Failed. {0}', err); }
|
||||
export function sparkJobSubmissionPrepareSubmitJob(jobName: string): string { return localize('sparkJobSubmission.PrepareSubmitJob', 'Submitting job {0} ... ', jobName); }
|
||||
export const sparkJobSubmissionSparkJobHasBeenSubmitted = localize('sparkJobSubmission.SubmitJobFinished', 'The Spark Job has been submitted.');
|
||||
export function sparkJobSubmissionSubmitJobFailed(err: string): string { return localize('sparkJobSubmission.SubmitJobFailed', 'Spark Job Submission Failed. {0} ', err); }
|
||||
export function sparkJobSubmissionYarnUIMessage(yarnUIURL: string): string { return localize('sparkJobSubmission.YarnUIMessage', 'YarnUI Url: {0} ', yarnUIURL); }
|
||||
export function sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryLink: string): string { return localize('sparkJobSubmission.SparkHistoryLinkMessage', 'Spark History Url: {0} ', sparkHistoryLink); }
|
||||
export function sparkJobSubmissionGetApplicationIdFailed(err: string): string { return localize('sparkJobSubmission.GetApplicationIdFailed', 'Get Application Id Failed. {0}', err); }
|
||||
export function sparkJobSubmissionLocalFileNotExisted(path: string): string { return localize('sparkJobSubmission.LocalFileNotExisted', 'Local file {0} does not existed. ', path); }
|
||||
export const sparkJobSubmissionNoSqlBigDataClusterFound = localize('sparkJobSubmission.NoSqlBigDataClusterFound', 'No SQL Server Big Data Cluster found.');
|
||||
export const sparkLocalFileDestinationHint = localize('sparkJobSubmission.LocalFileDestinationHint', "Local file will be uploaded to HDFS. ");
|
||||
export const sparkJobSubmissionEndMessage = localize('sparkJobSubmission.SubmissionEndMessage', ".......................... Submit Spark Job End ............................");
|
||||
export function sparkJobSubmissionPrepareUploadingFile(localPath: string, clusterFolder: string): string { return localize('sparkJobSubmission.PrepareUploadingFile', "Uploading file from local {0} to HDFS folder: {1}", localPath, clusterFolder); }
|
||||
export const sparkJobSubmissionUploadingFileSucceeded = localize('sparkJobSubmission.UploadingFileSucceeded', "Upload file to cluster Succeeded!");
|
||||
export function sparkJobSubmissionUploadingFileFailed(err: string): string { return localize('sparkJobSubmission.UploadingFileFailed', "Upload file to cluster Failed. {0}", err); }
|
||||
export function sparkJobSubmissionPrepareSubmitJob(jobName: string): string { return localize('sparkJobSubmission.PrepareSubmitJob', "Submitting job {0} ... ", jobName); }
|
||||
export const sparkJobSubmissionSparkJobHasBeenSubmitted = localize('sparkJobSubmission.SubmitJobFinished', "The Spark Job has been submitted.");
|
||||
export function sparkJobSubmissionSubmitJobFailed(err: string): string { return localize('sparkJobSubmission.SubmitJobFailed', "Spark Job Submission Failed. {0} ", err); }
|
||||
export function sparkJobSubmissionYarnUIMessage(yarnUIURL: string): string { return localize('sparkJobSubmission.YarnUIMessage', "YarnUI Url: {0} ", yarnUIURL); }
|
||||
export function sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryLink: string): string { return localize('sparkJobSubmission.SparkHistoryLinkMessage', "Spark History Url: {0} ", sparkHistoryLink); }
|
||||
export function sparkJobSubmissionGetApplicationIdFailed(err: string): string { return localize('sparkJobSubmission.GetApplicationIdFailed', "Get Application Id Failed. {0}", err); }
|
||||
export function sparkJobSubmissionLocalFileNotExisted(path: string): string { return localize('sparkJobSubmission.LocalFileNotExisted', "Local file {0} does not existed. ", path); }
|
||||
export const sparkJobSubmissionNoSqlBigDataClusterFound = localize('sparkJobSubmission.NoSqlBigDataClusterFound', "No SQL Server Big Data Cluster found.");
|
||||
|
||||
@@ -33,7 +33,7 @@ import { IconPathHelper } from './iconHelper';
|
||||
import * as nls from 'vscode-nls';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', 'This sample code loads the file into a data frame and shows the first 10 results.');
|
||||
const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', "This sample code loads the file into a data frame and shows the first 10 results.");
|
||||
|
||||
export async function activate(context: vscode.ExtensionContext): Promise<IExtension> {
|
||||
// lets make sure we support this platform first
|
||||
@@ -180,7 +180,7 @@ async function handleNewNotebookTask(oeContext?: azdata.ObjectExplorerContext, p
|
||||
}
|
||||
|
||||
async function handleOpenNotebookTask(profile: azdata.IConnectionProfile): Promise<void> {
|
||||
let notebookFileTypeName = localize('notebookFileType', 'Notebooks');
|
||||
let notebookFileTypeName = localize('notebookFileType', "Notebooks");
|
||||
let filter = {};
|
||||
filter[notebookFileTypeName] = 'ipynb';
|
||||
let uris = await vscode.window.showOpenDialog({
|
||||
@@ -193,7 +193,7 @@ async function handleOpenNotebookTask(profile: azdata.IConnectionProfile): Promi
|
||||
// Verify this is a .ipynb file since this isn't actually filtered on Mac/Linux
|
||||
if (path.extname(fileUri.fsPath) !== '.ipynb') {
|
||||
// in the future might want additional supported types
|
||||
vscode.window.showErrorMessage(localize('unsupportedFileType', 'Only .ipynb Notebooks are supported'));
|
||||
vscode.window.showErrorMessage(localize('unsupportedFileType', "Only .ipynb Notebooks are supported"));
|
||||
} else {
|
||||
await azdata.nb.showNotebookDocument(fileUri, {
|
||||
connectionProfile: profile,
|
||||
|
||||
@@ -18,7 +18,7 @@ export class CancelableStream extends Transform {
|
||||
|
||||
public _transform(chunk: any, encoding: string, callback: Function): void {
|
||||
if (this.cancelationToken && this.cancelationToken.token.isCancellationRequested) {
|
||||
callback(new Error(localize('streamCanceled', 'Stream operation canceled by the user')));
|
||||
callback(new Error(localize('streamCanceled', "Stream operation canceled by the user")));
|
||||
} else {
|
||||
this.push(chunk);
|
||||
callback();
|
||||
|
||||
@@ -140,7 +140,7 @@ export abstract class ProgressCommand extends Command {
|
||||
const tokenSource = new vscode.CancellationTokenSource();
|
||||
const statusBarItem = this.apiWrapper.createStatusBarItem(vscode.StatusBarAlignment.Left);
|
||||
disposables.push(vscode.Disposable.from(statusBarItem));
|
||||
statusBarItem.text = localize('progress', '$(sync~spin) {0}...', label);
|
||||
statusBarItem.text = localize('progress', "$(sync~spin) {0}...", label);
|
||||
if (isCancelable) {
|
||||
const cancelCommandId = `cancelProgress${ProgressCommand.progressId++}`;
|
||||
disposables.push(this.apiWrapper.registerCommand(cancelCommandId, async () => {
|
||||
@@ -148,7 +148,7 @@ export abstract class ProgressCommand extends Command {
|
||||
tokenSource.cancel();
|
||||
}
|
||||
}));
|
||||
statusBarItem.tooltip = localize('cancelTooltip', 'Cancel');
|
||||
statusBarItem.tooltip = localize('cancelTooltip', "Cancel");
|
||||
statusBarItem.command = cancelCommandId;
|
||||
}
|
||||
statusBarItem.show();
|
||||
@@ -170,7 +170,7 @@ export abstract class ProgressCommand extends Command {
|
||||
private async confirmCancel(): Promise<boolean> {
|
||||
return await this.prompter.promptSingle<boolean>(<IQuestion>{
|
||||
type: QuestionTypes.confirm,
|
||||
message: localize('cancel', 'Cancel operation?'),
|
||||
message: localize('cancel', "Cancel operation?"),
|
||||
default: true
|
||||
});
|
||||
}
|
||||
@@ -179,7 +179,7 @@ export abstract class ProgressCommand extends Command {
|
||||
export function registerSearchServerCommand(appContext: AppContext): void {
|
||||
appContext.apiWrapper.registerCommand('mssql.searchServers', () => {
|
||||
vscode.window.showInputBox({
|
||||
placeHolder: localize('mssql.searchServers', 'Search Server Names')
|
||||
placeHolder: localize('mssql.searchServers', "Search Server Names")
|
||||
}).then((stringSearch) => {
|
||||
if (stringSearch) {
|
||||
vscode.commands.executeCommand('registeredServers.searchServer', (stringSearch));
|
||||
|
||||
@@ -84,15 +84,15 @@ export class SqlClusterConnection {
|
||||
|
||||
private validate(connectionInfo: azdata.ConnectionInfo): void {
|
||||
if (!connectionInfo) {
|
||||
throw new Error(localize('connectionInfoUndefined', 'ConnectionInfo is undefined.'));
|
||||
throw new Error(localize('connectionInfoUndefined', "ConnectionInfo is undefined."));
|
||||
}
|
||||
if (!connectionInfo.options) {
|
||||
throw new Error(localize('connectionInfoOptionsUndefined', 'ConnectionInfo.options is undefined.'));
|
||||
throw new Error(localize('connectionInfoOptionsUndefined', "ConnectionInfo.options is undefined."));
|
||||
}
|
||||
let missingProperties: string[] = this.getMissingProperties(connectionInfo);
|
||||
if (missingProperties && missingProperties.length > 0) {
|
||||
throw new Error(localize('connectionInfoOptionsMissingProperties',
|
||||
'Some missing properties in connectionInfo.options: {0}',
|
||||
"Some missing properties in connectionInfo.options: {0}",
|
||||
missingProperties.join(', ')));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +78,7 @@ export class UploadFilesCommand extends ProgressCommand {
|
||||
canSelectFiles: true,
|
||||
canSelectFolders: false,
|
||||
canSelectMany: true,
|
||||
openLabel: localize('lblUploadFiles', 'Upload'),
|
||||
openLabel: localize('lblUploadFiles', "Upload"),
|
||||
filters: filter
|
||||
};
|
||||
let fileUris: vscode.Uri[] = await this.apiWrapper.showOpenDialog(options);
|
||||
@@ -86,8 +86,8 @@ export class UploadFilesCommand extends ProgressCommand {
|
||||
let files: IFile[] = await Promise.all(fileUris.map(uri => uri.fsPath).map(this.mapPathsToFiles()));
|
||||
await this.executeWithProgress(
|
||||
(cancelToken: vscode.CancellationTokenSource) => this.writeFiles(files, folderNode, cancelToken),
|
||||
localize('uploading', 'Uploading files to HDFS'), true,
|
||||
() => this.apiWrapper.showInformationMessage(localize('uploadCanceled', 'Upload operation was canceled')));
|
||||
localize('uploading', "Uploading files to HDFS"), true,
|
||||
() => this.apiWrapper.showInformationMessage(localize('uploadCanceled', "Upload operation was canceled")));
|
||||
if (context.type === constants.ObjectExplorerService) {
|
||||
let objectExplorerNode = await azdata.objectexplorer.getNode(context.explorerContext.connectionProfile.id, folderNode.getNodeInfo().nodePath);
|
||||
await objectExplorerNode.refresh();
|
||||
@@ -96,7 +96,7 @@ export class UploadFilesCommand extends ProgressCommand {
|
||||
}
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(
|
||||
localize('uploadError', 'Error uploading files: {0}', utils.getErrorMessage(err, true)));
|
||||
localize('uploadError', "Error uploading files: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -152,8 +152,8 @@ export class MkDirCommand extends ProgressCommand {
|
||||
if (fileName && fileName.length > 0) {
|
||||
await this.executeWithProgress(
|
||||
async (cancelToken: vscode.CancellationTokenSource) => this.mkDir(fileName, folderNode, cancelToken),
|
||||
localize('makingDir', 'Creating directory'), true,
|
||||
() => this.apiWrapper.showInformationMessage(localize('mkdirCanceled', 'Operation was canceled')));
|
||||
localize('makingDir', "Creating directory"), true,
|
||||
() => this.apiWrapper.showInformationMessage(localize('mkdirCanceled', "Operation was canceled")));
|
||||
if (context.type === constants.ObjectExplorerService) {
|
||||
let objectExplorerNode = await azdata.objectexplorer.getNode(context.explorerContext.connectionProfile.id, folderNode.getNodeInfo().nodePath);
|
||||
await objectExplorerNode.refresh();
|
||||
@@ -162,7 +162,7 @@ export class MkDirCommand extends ProgressCommand {
|
||||
}
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(
|
||||
localize('mkDirError', 'Error on making directory: {0}', utils.getErrorMessage(err, true)));
|
||||
localize('mkDirError', "Error on making directory: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -170,7 +170,7 @@ export class MkDirCommand extends ProgressCommand {
|
||||
return await this.prompter.promptSingle(<IQuestion>{
|
||||
type: QuestionTypes.input,
|
||||
name: 'enterDirName',
|
||||
message: localize('enterDirName', 'Enter directory name'),
|
||||
message: localize('enterDirName', "Enter directory name"),
|
||||
default: ''
|
||||
}).then(confirmed => <string>confirmed);
|
||||
}
|
||||
@@ -220,7 +220,7 @@ export class DeleteFilesCommand extends Command {
|
||||
}
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(
|
||||
localize('deleteError', 'Error on deleting files: {0}', utils.getErrorMessage(err, true)));
|
||||
localize('deleteError', "Error on deleting files: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -234,7 +234,7 @@ export class DeleteFilesCommand extends Command {
|
||||
|
||||
private async deleteFolder(node: FolderNode): Promise<void> {
|
||||
if (node) {
|
||||
let confirmed = await this.confirmDelete(localize('msgDeleteFolder', 'Are you sure you want to delete this folder and its contents?'));
|
||||
let confirmed = await this.confirmDelete(localize('msgDeleteFolder', "Are you sure you want to delete this folder and its contents?"));
|
||||
if (confirmed) {
|
||||
// TODO prompt for recursive delete if non-empty?
|
||||
await node.delete(true);
|
||||
@@ -244,7 +244,7 @@ export class DeleteFilesCommand extends Command {
|
||||
|
||||
private async deleteFile(node: FileNode): Promise<void> {
|
||||
if (node) {
|
||||
let confirmed = await this.confirmDelete(localize('msgDeleteFile', 'Are you sure you want to delete this file?'));
|
||||
let confirmed = await this.confirmDelete(localize('msgDeleteFile', "Are you sure you want to delete this file?"));
|
||||
if (confirmed) {
|
||||
await node.delete();
|
||||
}
|
||||
@@ -273,15 +273,15 @@ export class SaveFileCommand extends ProgressCommand {
|
||||
if (fileUri) {
|
||||
await this.executeWithProgress(
|
||||
(cancelToken: vscode.CancellationTokenSource) => this.doSaveAndOpen(fileUri, fileNode, cancelToken),
|
||||
localize('saving', 'Saving HDFS Files'), true,
|
||||
() => this.apiWrapper.showInformationMessage(localize('saveCanceled', 'Save operation was canceled')));
|
||||
localize('saving', "Saving HDFS Files"), true,
|
||||
() => this.apiWrapper.showInformationMessage(localize('saveCanceled', "Save operation was canceled")));
|
||||
}
|
||||
} else {
|
||||
this.apiWrapper.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(
|
||||
localize('saveError', 'Error on saving file: {0}', utils.getErrorMessage(err, true)));
|
||||
localize('saveError', "Error on saving file: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -324,14 +324,14 @@ export class PreviewFileCommand extends ProgressCommand {
|
||||
await this.showNotebookDocument(fileName, connectionProfile, contents);
|
||||
}
|
||||
},
|
||||
localize('previewing', 'Generating preview'),
|
||||
localize('previewing', "Generating preview"),
|
||||
false);
|
||||
} else {
|
||||
this.apiWrapper.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(
|
||||
localize('previewError', 'Error on previewing file: {0}', utils.getErrorMessage(err, true)));
|
||||
localize('previewError', "Error on previewing file: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -388,7 +388,7 @@ export class CopyPathCommand extends Command {
|
||||
}
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(
|
||||
localize('copyPathError', 'Error on copying path: {0}', utils.getErrorMessage(err, true)));
|
||||
localize('copyPathError', "Error on copying path: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -140,7 +140,7 @@ export class FolderNode extends HdfsFileSourceNode {
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
this.children = [ErrorNode.create(localize('errorExpanding', 'Error: {0}', utils.getErrorMessage(error)), this, error.statusCode)];
|
||||
this.children = [ErrorNode.create(localize('errorExpanding', "Error: {0}", utils.getErrorMessage(error)), this, error.statusCode)];
|
||||
}
|
||||
}
|
||||
return this.children;
|
||||
@@ -242,7 +242,7 @@ export class ConnectionNode extends FolderNode {
|
||||
}
|
||||
|
||||
public async delete(): Promise<void> {
|
||||
throw new Error(localize('errDeleteConnectionNode', 'Cannot delete a connection. Only subfolders and files can be deleted.'));
|
||||
throw new Error(localize('errDeleteConnectionNode', "Cannot delete a connection. Only subfolders and files can be deleted."));
|
||||
}
|
||||
|
||||
async getTreeItem(): Promise<vscode.TreeItem> {
|
||||
|
||||
@@ -99,7 +99,7 @@ export class OpenSparkJobSubmissionDialogCommand extends Command {
|
||||
}
|
||||
}
|
||||
|
||||
let errorMsg = localize('sparkJobSubmission.NoSqlSelected', 'No SQL Server is selected.');
|
||||
let errorMsg = localize('sparkJobSubmission.NoSqlSelected', "No SQL Server is selected.");
|
||||
if (!selectedHost) { throw new Error(errorMsg); }
|
||||
|
||||
let sqlConnection = connectionMap.get(selectedHost);
|
||||
@@ -135,7 +135,7 @@ export class OpenSparkJobSubmissionDialogFromFileCommand extends Command {
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(localize('sparkJobSubmission.GetFilePathFromSelectedNodeFailed', 'Error Get File Path: {0}', err));
|
||||
this.apiWrapper.showErrorMessage(localize('sparkJobSubmission.GetFilePathFromSelectedNodeFailed', "Error Get File Path: {0}", err));
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ export class SparkAdvancedTab {
|
||||
}
|
||||
|
||||
constructor(private appContext: AppContext) {
|
||||
this._tab = this.apiWrapper.createTab(localize('sparkJobSubmission.AdvancedTabName', 'ADVANCED'));
|
||||
this._tab = this.apiWrapper.createTab(localize('sparkJobSubmission.AdvancedTabName', "ADVANCED"));
|
||||
|
||||
this._tab.registerContent(async (modelView) => {
|
||||
let builder = modelView.modelBuilder;
|
||||
@@ -37,35 +37,35 @@ export class SparkAdvancedTab {
|
||||
this._referenceJARFilesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._referenceJARFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferenceJarList', 'Reference Jars')
|
||||
title: localize('sparkJobSubmission.ReferenceJarList', "Reference Jars")
|
||||
},
|
||||
Object.assign(
|
||||
{
|
||||
info: localize('sparkJobSubmission.ReferenceJarListToolTip',
|
||||
'Jars to be placed in executor working directory. The Jar path needs to be an HDFS Path. Multiple paths should be split by semicolon (;)')
|
||||
"Jars to be placed in executor working directory. The Jar path needs to be an HDFS Path. Multiple paths should be split by semicolon (;)")
|
||||
},
|
||||
parentLayout));
|
||||
|
||||
this._referencePyFilesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._referencePyFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferencePyList', 'Reference py Files')
|
||||
title: localize('sparkJobSubmission.ReferencePyList', "Reference py Files")
|
||||
},
|
||||
Object.assign(
|
||||
{
|
||||
info: localize('sparkJobSubmission.ReferencePyListTooltip',
|
||||
'Py Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)')
|
||||
"Py Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)")
|
||||
},
|
||||
parentLayout));
|
||||
|
||||
this._referenceFilesInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._referenceFilesInputBox,
|
||||
title: localize('sparkJobSubmission.ReferenceFilesList', 'Reference Files')
|
||||
title: localize('sparkJobSubmission.ReferenceFilesList', "Reference Files")
|
||||
},
|
||||
Object.assign({
|
||||
info: localize('sparkJobSubmission.ReferenceFilesListTooltip',
|
||||
'Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)')
|
||||
"Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)")
|
||||
}, parentLayout));
|
||||
|
||||
await modelView.initializeModel(formContainer.component());
|
||||
|
||||
@@ -41,7 +41,7 @@ export class SparkConfigurationTab {
|
||||
|
||||
// If path is specified, means the default source setting for this tab is HDFS file, otherwise, it would be local file.
|
||||
constructor(private _dataModel: SparkJobSubmissionModel, private appContext: AppContext, private _path?: string) {
|
||||
this._tab = this.apiWrapper.createTab(localize('sparkJobSubmission.GeneralTabName', 'GENERAL'));
|
||||
this._tab = this.apiWrapper.createTab(localize('sparkJobSubmission.GeneralTabName', "GENERAL"));
|
||||
|
||||
this._tab.registerContent(async (modelView) => {
|
||||
let builder = modelView.modelBuilder;
|
||||
@@ -53,13 +53,13 @@ export class SparkConfigurationTab {
|
||||
let formContainer = builder.formContainer();
|
||||
|
||||
this._jobNameInputBox = builder.inputBox().withProperties({
|
||||
placeHolder: localize('sparkJobSubmission.JobNamePlaceHolder', 'Enter a name ...'),
|
||||
placeHolder: localize('sparkJobSubmission.JobNamePlaceHolder', "Enter a name ..."),
|
||||
value: (this._path) ? fspath.basename(this._path) : ''
|
||||
}).component();
|
||||
|
||||
formContainer.addFormItem({
|
||||
component: this._jobNameInputBox,
|
||||
title: localize('sparkJobSubmission.JobName', 'Job Name'),
|
||||
title: localize('sparkJobSubmission.JobName', "Job Name"),
|
||||
required: true
|
||||
}, parentLayout);
|
||||
|
||||
@@ -68,7 +68,7 @@ export class SparkConfigurationTab {
|
||||
}).component();
|
||||
formContainer.addFormItem({
|
||||
component: this._sparkContextLabel,
|
||||
title: localize('sparkJobSubmission.SparkCluster', 'Spark Cluster')
|
||||
title: localize('sparkJobSubmission.SparkCluster', "Spark Cluster")
|
||||
}, parentLayout);
|
||||
|
||||
this._fileSourceDropDown = builder.dropDown().withProperties<azdata.DropDownProperties>({
|
||||
@@ -102,7 +102,7 @@ export class SparkConfigurationTab {
|
||||
|
||||
this._sparkSourceFileInputBox = builder.inputBox().withProperties({
|
||||
required: true,
|
||||
placeHolder: localize('sparkJobSubmission.FilePathPlaceHolder', 'Path to a .jar or .py file'),
|
||||
placeHolder: localize('sparkJobSubmission.FilePathPlaceHolder', "Path to a .jar or .py file"),
|
||||
value: (this._path) ? this._path : ''
|
||||
}).component();
|
||||
this._sparkSourceFileInputBox.onTextChanged(text => {
|
||||
@@ -111,7 +111,7 @@ export class SparkConfigurationTab {
|
||||
if (this._localUploadDestinationLabel) {
|
||||
if (text) {
|
||||
this._localUploadDestinationLabel.value = localize('sparkJobSubmission.LocalFileDestinationHintWithPath',
|
||||
'The selected local file will be uploaded to HDFS: {0}', this._dataModel.hdfsSubmitFilePath);
|
||||
"The selected local file will be uploaded to HDFS: {0}", this._dataModel.hdfsSubmitFilePath);
|
||||
} else {
|
||||
this._localUploadDestinationLabel.value = LocalizedConstants.sparkLocalFileDestinationHint;
|
||||
}
|
||||
@@ -167,24 +167,24 @@ export class SparkConfigurationTab {
|
||||
|
||||
formContainer.addFormItem({
|
||||
component: this._sourceFlexContainerWithHint,
|
||||
title: localize('sparkJobSubmission.MainFilePath', 'JAR/py File'),
|
||||
title: localize('sparkJobSubmission.MainFilePath', "JAR/py File"),
|
||||
required: true
|
||||
}, parentLayout);
|
||||
|
||||
this._mainClassInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._mainClassInputBox,
|
||||
title: localize('sparkJobSubmission.MainClass', 'Main Class'),
|
||||
title: localize('sparkJobSubmission.MainClass', "Main Class"),
|
||||
required: true
|
||||
}, parentLayout);
|
||||
|
||||
this._argumentsInputBox = builder.inputBox().component();
|
||||
formContainer.addFormItem({
|
||||
component: this._argumentsInputBox,
|
||||
title: localize('sparkJobSubmission.Arguments', 'Arguments')
|
||||
title: localize('sparkJobSubmission.Arguments', "Arguments")
|
||||
},
|
||||
Object.assign(
|
||||
{ info: localize('sparkJobSubmission.ArgumentsTooltip', 'Command line arguments used in your main class, multiple arguments should be split by space.') },
|
||||
{ info: localize('sparkJobSubmission.ArgumentsTooltip', "Command line arguments used in your main class, multiple arguments should be split by space.") },
|
||||
parentLayout));
|
||||
|
||||
await modelView.initializeModel(formContainer.component());
|
||||
@@ -193,7 +193,7 @@ export class SparkConfigurationTab {
|
||||
|
||||
public async validate(): Promise<boolean> {
|
||||
if (!this._jobNameInputBox.value) {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyJobName', 'Property Job Name is not specified.'));
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyJobName', "Property Job Name is not specified."));
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -202,7 +202,7 @@ export class SparkConfigurationTab {
|
||||
this._dataModel.isMainSourceFromLocal = true;
|
||||
this._dataModel.updateModelByLocalPath(this._sparkSourceFileInputBox.value);
|
||||
} else {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyJARPYPath', 'Property JAR/py File is not specified.'));
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyJARPYPath', "Property JAR/py File is not specified."));
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
@@ -210,13 +210,13 @@ export class SparkConfigurationTab {
|
||||
this._dataModel.isMainSourceFromLocal = false;
|
||||
this._dataModel.hdfsSubmitFilePath = this._sparkSourceFileInputBox.value;
|
||||
} else {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyJARPYPath', 'Property JAR/py File is not specified.'));
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyJARPYPath', "Property JAR/py File is not specified."));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (this._dataModel.isJarFile() && !this._mainClassInputBox.value) {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyMainClass', 'Property Main Class is not specified.'));
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.NotSpecifyMainClass', "Property Main Class is not specified."));
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -231,11 +231,11 @@ export class SparkConfigurationTab {
|
||||
try {
|
||||
let isFileExisted = await this._dataModel.isClusterFileExisted(this._dataModel.hdfsSubmitFilePath);
|
||||
if (!isFileExisted) {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.HDFSFileNotExistedWithPath', '{0} does not exist in Cluster or exception thrown. ', this._dataModel.hdfsSubmitFilePath));
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.HDFSFileNotExistedWithPath', "{0} does not exist in Cluster or exception thrown. ", this._dataModel.hdfsSubmitFilePath));
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.HDFSFileNotExisted', 'The specified HDFS file does not exist. '));
|
||||
this._dataModel.showDialogError(localize('sparkJobSubmission.HDFSFileNotExisted', "The specified HDFS file does not exist. "));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -261,7 +261,7 @@ export class SparkConfigurationTab {
|
||||
canSelectFiles: true,
|
||||
canSelectFolders: false,
|
||||
canSelectMany: false,
|
||||
openLabel: localize('sparkSelectLocalFile', 'Select'),
|
||||
openLabel: localize('sparkSelectLocalFile', "Select"),
|
||||
filters: filter
|
||||
};
|
||||
|
||||
@@ -272,7 +272,7 @@ export class SparkConfigurationTab {
|
||||
|
||||
return undefined;
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(localize('sparkJobSubmission.SelectFileError', 'Error in locating the file due to Error: {0}', utils.getErrorMessage(err)));
|
||||
this.apiWrapper.showErrorMessage(localize('sparkJobSubmission.SelectFileError', "Error in locating the file due to Error: {0}", utils.getErrorMessage(err)));
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,12 +36,12 @@ export class SparkJobSubmissionDialog {
|
||||
private outputChannel: vscode.OutputChannel) {
|
||||
if (!this.sqlClusterConnection || !this.appContext || !this.outputChannel) {
|
||||
throw new Error(localize('sparkJobSubmission.SparkJobSubmissionDialogInitializeError',
|
||||
'Parameters for SparkJobSubmissionDialog is illegal'));
|
||||
"Parameters for SparkJobSubmissionDialog is illegal"));
|
||||
}
|
||||
}
|
||||
|
||||
public async openDialog(path?: string): Promise<void> {
|
||||
this._dialog = this.apiWrapper.createDialog(localize('sparkJobSubmission.DialogTitleNewJob', 'New Job'));
|
||||
this._dialog = this.apiWrapper.createDialog(localize('sparkJobSubmission.DialogTitleNewJob', "New Job"));
|
||||
|
||||
this._dataModel = new SparkJobSubmissionModel(this.sqlClusterConnection, this._dialog, this.appContext);
|
||||
|
||||
@@ -50,9 +50,9 @@ export class SparkJobSubmissionDialog {
|
||||
|
||||
this._dialog.content = [this._sparkConfigTab.tab, this._sparkAdvancedTab.tab];
|
||||
|
||||
this._dialog.cancelButton.label = localize('sparkJobSubmission.DialogCancelButton', 'Cancel');
|
||||
this._dialog.cancelButton.label = localize('sparkJobSubmission.DialogCancelButton', "Cancel");
|
||||
|
||||
this._dialog.okButton.label = localize('sparkJobSubmission.DialogSubmitButton', 'Submit');
|
||||
this._dialog.okButton.label = localize('sparkJobSubmission.DialogSubmitButton', "Submit");
|
||||
this._dialog.okButton.onClick(() => this.onClickOk());
|
||||
|
||||
this._dialog.registerCloseValidator(() => this.handleValidate());
|
||||
@@ -61,7 +61,7 @@ export class SparkJobSubmissionDialog {
|
||||
}
|
||||
|
||||
private onClickOk(): void {
|
||||
let jobName = localize('sparkJobSubmission.SubmitSparkJob', '{0} Spark Job Submission:',
|
||||
let jobName = localize('sparkJobSubmission.SubmitSparkJob', "{0} Spark Job Submission:",
|
||||
this._sparkConfigTab.getInputValues()[0]);
|
||||
this.apiWrapper.startBackgroundOperation(
|
||||
{
|
||||
@@ -80,7 +80,7 @@ export class SparkJobSubmissionDialog {
|
||||
try {
|
||||
this.outputChannel.show();
|
||||
let msg = localize('sparkJobSubmission.SubmissionStartMessage',
|
||||
'.......................... Submit Spark Job Start ..........................');
|
||||
".......................... Submit Spark Job Start ..........................");
|
||||
this.outputChannel.appendLine(msg);
|
||||
// 1. Upload local file to HDFS for local source.
|
||||
if (this._dataModel.isMainSourceFromLocal) {
|
||||
|
||||
@@ -42,7 +42,7 @@ export class SparkJobSubmissionModel {
|
||||
|
||||
if (!this._sqlClusterConnection || !this._dialog || !this._appContext) {
|
||||
throw new Error(localize('sparkJobSubmission.SparkJobSubmissionModelInitializeError',
|
||||
'Parameters for SparkJobSubmissionModel is illegal'));
|
||||
"Parameters for SparkJobSubmissionModel is illegal"));
|
||||
}
|
||||
|
||||
this._dialogService = new SparkJobSubmissionService(requestService);
|
||||
@@ -89,7 +89,7 @@ export class SparkJobSubmissionModel {
|
||||
public async submitBatchJobByLivy(submissionArgs: SparkJobSubmissionInput): Promise<string> {
|
||||
try {
|
||||
if (!submissionArgs) {
|
||||
return Promise.reject(localize('sparkJobSubmission.submissionArgsIsInvalid', 'submissionArgs is invalid. '));
|
||||
return Promise.reject(localize('sparkJobSubmission.submissionArgsIsInvalid', "submissionArgs is invalid. "));
|
||||
}
|
||||
|
||||
submissionArgs.setSparkClusterInfo(this._sqlClusterConnection);
|
||||
@@ -104,11 +104,11 @@ export class SparkJobSubmissionModel {
|
||||
// TODO: whether set timeout as 15000ms
|
||||
try {
|
||||
if (!submissionArgs) {
|
||||
return Promise.reject(localize('sparkJobSubmission.submissionArgsIsInvalid', 'submissionArgs is invalid. '));
|
||||
return Promise.reject(localize('sparkJobSubmission.submissionArgsIsInvalid', "submissionArgs is invalid. "));
|
||||
}
|
||||
|
||||
if (!utils.isValidNumber(livyBatchId)) {
|
||||
return Promise.reject(new Error(localize('sparkJobSubmission.LivyBatchIdIsInvalid', 'livyBatchId is invalid. ')));
|
||||
return Promise.reject(new Error(localize('sparkJobSubmission.LivyBatchIdIsInvalid', "livyBatchId is invalid. ")));
|
||||
}
|
||||
|
||||
if (!retryTime) {
|
||||
@@ -125,7 +125,7 @@ export class SparkJobSubmissionModel {
|
||||
} while (response.appId === '' && timeOutCount < retryTime);
|
||||
|
||||
if (response.appId === '') {
|
||||
return Promise.reject(localize('sparkJobSubmission.GetApplicationIdTimeOut', 'Get Application Id time out. {0}[Log] {1}', os.EOL, response.log));
|
||||
return Promise.reject(localize('sparkJobSubmission.GetApplicationIdTimeOut', "Get Application Id time out. {0}[Log] {1}", os.EOL, response.log));
|
||||
} else {
|
||||
return response.appId;
|
||||
}
|
||||
@@ -137,7 +137,7 @@ export class SparkJobSubmissionModel {
|
||||
public async uploadFile(localFilePath: string, hdfsFolderPath: string): Promise<void> {
|
||||
try {
|
||||
if (!localFilePath || !hdfsFolderPath) {
|
||||
return Promise.reject(localize('sparkJobSubmission.localFileOrFolderNotSpecified.', 'Property localFilePath or hdfsFolderPath is not specified. '));
|
||||
return Promise.reject(localize('sparkJobSubmission.localFileOrFolderNotSpecified.', "Property localFilePath or hdfsFolderPath is not specified. "));
|
||||
}
|
||||
|
||||
if (!(await utils.exists(localFilePath))) {
|
||||
@@ -154,7 +154,7 @@ export class SparkJobSubmissionModel {
|
||||
public async isClusterFileExisted(path: string): Promise<boolean> {
|
||||
try {
|
||||
if (!path) {
|
||||
return Promise.reject(localize('sparkJobSubmission.PathNotSpecified.', 'Property Path is not specified. '));
|
||||
return Promise.reject(localize('sparkJobSubmission.PathNotSpecified.', "Property Path is not specified. "));
|
||||
}
|
||||
|
||||
let fileSource: IFileSource = await this._sqlClusterConnection.createHdfsFileSource();
|
||||
|
||||
@@ -87,7 +87,7 @@ export class SparkJobSubmissionService {
|
||||
}
|
||||
|
||||
return Promise.reject(new Error(localize('sparkJobSubmission.LivyNoBatchIdReturned',
|
||||
'No Spark job batch id is returned from response.{0}[Error] {1}', os.EOL, JSON.stringify(response))));
|
||||
"No Spark job batch id is returned from response.{0}[Error] {1}", os.EOL, JSON.stringify(response))));
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
@@ -125,7 +125,7 @@ export class SparkJobSubmissionService {
|
||||
}
|
||||
|
||||
return Promise.reject(localize('sparkJobSubmission.LivyNoLogReturned',
|
||||
'No log is returned within response.{0}[Error] {1}', os.EOL, JSON.stringify(response)));
|
||||
"No log is returned within response.{0}[Error] {1}", os.EOL, JSON.stringify(response)));
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
|
||||
@@ -59,7 +59,7 @@ export function shellWhichResolving(cmd: string): Promise<string> {
|
||||
export async function mkDir(dirPath: string, outputChannel?: vscode.OutputChannel): Promise<void> {
|
||||
if (!await fs.exists(dirPath)) {
|
||||
if (outputChannel) {
|
||||
outputChannel.appendLine(localize('mkdirOutputMsg', '... Creating {0}', dirPath));
|
||||
outputChannel.appendLine(localize('mkdirOutputMsg', "... Creating {0}", dirPath));
|
||||
}
|
||||
await fs.ensureDir(dirPath);
|
||||
}
|
||||
@@ -126,7 +126,7 @@ export function executeStreamedCommand(cmd: string, outputChannel?: vscode.Outpu
|
||||
if (code === 0) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(localize('executeCommandProcessExited', 'Process exited with code {0}', code));
|
||||
reject(localize('executeCommandProcessExited', "Process exited with code {0}", code));
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user