mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Add no-floating-promises for mssql extension (#16956)
This commit is contained in:
13
extensions/mssql/.eslintrc.json
Normal file
13
extensions/mssql/.eslintrc.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"parserOptions": {
|
||||
"project": "./extensions/mssql/tsconfig.json"
|
||||
},
|
||||
"rules": {
|
||||
"@typescript-eslint/no-floating-promises": [
|
||||
"error",
|
||||
{
|
||||
"ignoreVoid": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -62,19 +62,19 @@ export default class ContextProvider {
|
||||
}
|
||||
|
||||
if (iscloud === true || iscloud === false) {
|
||||
setCommandContext(ContextKeys.ISCLOUD, iscloud);
|
||||
void setCommandContext(ContextKeys.ISCLOUD, iscloud);
|
||||
}
|
||||
|
||||
if (!types.isUndefinedOrNull(edition)) {
|
||||
setCommandContext(ContextKeys.EDITIONID, edition);
|
||||
void setCommandContext(ContextKeys.EDITIONID, edition);
|
||||
}
|
||||
|
||||
if (!types.isUndefinedOrNull(isCluster)) {
|
||||
setCommandContext(ContextKeys.ISCLUSTER, isCluster);
|
||||
void setCommandContext(ContextKeys.ISCLUSTER, isCluster);
|
||||
}
|
||||
|
||||
if (!types.isUndefinedOrNull(serverMajorVersion)) {
|
||||
setCommandContext(ContextKeys.SERVERMAJORVERSION, serverMajorVersion);
|
||||
void setCommandContext(ContextKeys.SERVERMAJORVERSION, serverMajorVersion);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -88,9 +88,9 @@ export class CredentialStore {
|
||||
this._client.start();
|
||||
}
|
||||
|
||||
dispose() {
|
||||
async dispose(): Promise<void> {
|
||||
if (this._client) {
|
||||
this._client.stop();
|
||||
await this._client.stop();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -69,5 +69,5 @@ export function registerBooksWidget(bookContributionProvider: BookContributionPr
|
||||
}
|
||||
|
||||
function openBookViewlet(folderUri: vscode.Uri): void {
|
||||
vscode.commands.executeCommand('bookTreeView.openBook', folderUri.fsPath, true, undefined);
|
||||
void vscode.commands.executeCommand('bookTreeView.openBook', folderUri.fsPath, true, undefined);
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ export function registerServiceEndpoints(context: vscode.ExtensionContext): void
|
||||
const copyValueCell = view.modelBuilder.button().component();
|
||||
copyValueCell.iconPath = { light: context.asAbsolutePath('resources/light/copy.png'), dark: context.asAbsolutePath('resources/dark/copy_inverse.png') };
|
||||
copyValueCell.onDidClick(() => {
|
||||
vscode.env.clipboard.writeText(endpointInfo.endpoint);
|
||||
void vscode.env.clipboard.writeText(endpointInfo.endpoint);
|
||||
});
|
||||
copyValueCell.title = localize("copyText", "Copy");
|
||||
copyValueCell.iconHeight = '14px';
|
||||
|
||||
@@ -53,7 +53,7 @@ export class AccountFeature implements StaticFeature {
|
||||
|
||||
if (accountList.length < 1) {
|
||||
// TODO: Prompt user to add account
|
||||
window.showErrorMessage(localize('mssql.missingLinkedAzureAccount', "Azure Data Studio needs to contact Azure Key Vault to access a column master key for Always Encrypted, but no linked Azure account is available. Please add a linked Azure account and retry the query."));
|
||||
void window.showErrorMessage(localize('mssql.missingLinkedAzureAccount', "Azure Data Studio needs to contact Azure Key Vault to access a column master key for Always Encrypted, but no linked Azure account is available. Please add a linked Azure account and retry the query."));
|
||||
return undefined;
|
||||
} else if (accountList.length > 1) {
|
||||
let options: QuickPickOptions = {
|
||||
@@ -63,7 +63,7 @@ export class AccountFeature implements StaticFeature {
|
||||
let items = accountList.map(a => new AccountFeature.AccountQuickPickItem(a));
|
||||
let selectedItem = await window.showQuickPick(items, options);
|
||||
if (!selectedItem) { // The user canceled the selection.
|
||||
window.showErrorMessage(localize('mssql.canceledLinkedAzureAccountSelection', "Azure Data Studio needs to contact Azure Key Vault to access a column master key for Always Encrypted, but no linked Azure account was selected. Please retry the query and select a linked Azure account when prompted."));
|
||||
void window.showErrorMessage(localize('mssql.canceledLinkedAzureAccountSelection', "Azure Data Studio needs to contact Azure Key Vault to access a column master key for Always Encrypted, but no linked Azure account was selected. Please retry the query and select a linked Azure account when prompted."));
|
||||
return undefined;
|
||||
}
|
||||
account = selectedItem.account;
|
||||
@@ -74,13 +74,13 @@ export class AccountFeature implements StaticFeature {
|
||||
const tenant = account.properties.tenants.find(tenant => request.authority.includes(tenant.id));
|
||||
const unauthorizedMessage = localize('mssql.insufficientlyPrivelagedAzureAccount', "The configured Azure account for {0} does not have sufficient permissions for Azure Key Vault to access a column master key for Always Encrypted.", account.key.accountId);
|
||||
if (!tenant) {
|
||||
window.showErrorMessage(unauthorizedMessage);
|
||||
void window.showErrorMessage(unauthorizedMessage);
|
||||
return undefined;
|
||||
}
|
||||
const securityToken = await azdata.accounts.getAccountSecurityToken(account, tenant.id, azdata.AzureResource.AzureKeyVault);
|
||||
|
||||
if (!securityToken?.token) {
|
||||
window.showErrorMessage(unauthorizedMessage);
|
||||
void window.showErrorMessage(unauthorizedMessage);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ export class HdfsModel {
|
||||
public fileStatus: FileStatus;
|
||||
|
||||
constructor(private readonly fileSource: IFileSource, private readonly path: string) {
|
||||
this.refresh();
|
||||
this.refresh().catch(err => console.error('Error refreshing HDFS Model ', err));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -117,7 +117,7 @@ export class HdfsModel {
|
||||
);
|
||||
} catch (error) {
|
||||
const errMsg = localize('mssql.recursivePermissionOpError', "Error applying permission changes: {0}", (error instanceof Error ? error.message : error));
|
||||
vscode.window.showErrorMessage(errMsg);
|
||||
void vscode.window.showErrorMessage(errMsg);
|
||||
op.updateStatus(azdata.TaskStatus.Failed, errMsg);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ export class ManageAccessDialog {
|
||||
azdata.window.closeDialog(this.dialog);
|
||||
await this.hdfsModel.apply(true);
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(loc.errorApplyingAclChanges(err instanceof HdfsError ? err.message : err));
|
||||
void vscode.window.showErrorMessage(loc.errorApplyingAclChanges(err instanceof HdfsError ? err.message : err));
|
||||
}
|
||||
});
|
||||
this.dialog.customButtons = [this.applyRecursivelyButton];
|
||||
@@ -76,7 +76,7 @@ export class ManageAccessDialog {
|
||||
await this.hdfsModel.apply();
|
||||
return true;
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(loc.errorApplyingAclChanges(err instanceof HdfsError ? err.message : err));
|
||||
void vscode.window.showErrorMessage(loc.errorApplyingAclChanges(err instanceof HdfsError ? err.message : err));
|
||||
}
|
||||
return false;
|
||||
});
|
||||
@@ -331,7 +331,7 @@ export class ManageAccessDialog {
|
||||
|
||||
this.rootLoadingComponent.loading = false;
|
||||
|
||||
this.addUserOrGroupInput.focus();
|
||||
void this.addUserOrGroupInput.focus();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ export async function activate(context: vscode.ExtensionContext): Promise<IExten
|
||||
let supported = await Utils.verifyPlatform();
|
||||
|
||||
if (!supported) {
|
||||
vscode.window.showErrorMessage('Unsupported platform');
|
||||
void vscode.window.showErrorMessage('Unsupported platform');
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ export async function activate(context: vscode.ExtensionContext): Promise<IExten
|
||||
const untitledUri = vscode.Uri.parse(`untitled:${title}`);
|
||||
await azdata.nb.showNotebookDocument(untitledUri, { initialContent: result.content });
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(localize('mssql.errorConvertingToNotebook', "An error occurred converting the SQL document to a Notebook. Error : {0}", err.toString()));
|
||||
void vscode.window.showErrorMessage(localize('mssql.errorConvertingToNotebook', "An error occurred converting the SQL document to a Notebook. Error : {0}", err.toString()));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -100,7 +100,7 @@ export async function activate(context: vscode.ExtensionContext): Promise<IExten
|
||||
const result = await appContext.getService<INotebookConvertService>(Constants.NotebookConvertService).convertNotebookToSql(doc.getText());
|
||||
await azdata.queryeditor.openQueryDocument({ content: result.content });
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(localize('mssql.errorConvertingToSQL', "An error occurred converting the Notebook document to SQL. Error : {0}", err.toString()));
|
||||
void vscode.window.showErrorMessage(localize('mssql.errorConvertingToSQL', "An error occurred converting the Notebook document to SQL. Error : {0}", err.toString()));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -114,7 +114,7 @@ function registerLogCommand(context: vscode.ExtensionContext) {
|
||||
if (choice) {
|
||||
const document = await vscode.workspace.openTextDocument(vscode.Uri.file(path.join(context.logPath, choice)));
|
||||
if (document) {
|
||||
vscode.window.showTextDocument(document);
|
||||
void vscode.window.showTextDocument(document);
|
||||
}
|
||||
}
|
||||
}));
|
||||
@@ -190,7 +190,7 @@ async function handleNewNotebookTask(oeContext?: azdata.ObjectExplorerContext, p
|
||||
if (hdfsPath.length > 0) {
|
||||
let analyzeCommand = '#' + msgSampleCodeDataFrame + os.EOL + 'df = (spark.read.option("inferSchema", "true")'
|
||||
+ os.EOL + '.option("header", "true")' + os.EOL + '.csv("{0}"))' + os.EOL + 'df.show(10)';
|
||||
editor.edit(editBuilder => {
|
||||
await editor.edit(editBuilder => {
|
||||
editBuilder.replace(0, {
|
||||
cell_type: 'code',
|
||||
source: analyzeCommand.replace('{0}', hdfsPath)
|
||||
@@ -215,7 +215,7 @@ async function handleOpenNotebookTask(profile: azdata.IConnectionProfile): Promi
|
||||
// Verify this is a .ipynb file since this isn't actually filtered on Mac/Linux
|
||||
if (path.extname(fileUri.fsPath) !== '.ipynb') {
|
||||
// in the future might want additional supported types
|
||||
vscode.window.showErrorMessage(localize('unsupportedFileType', "Only .ipynb Notebooks are supported"));
|
||||
void vscode.window.showErrorMessage(localize('unsupportedFileType', "Only .ipynb Notebooks are supported"));
|
||||
} else {
|
||||
await azdata.nb.showNotebookDocument(fileUri, {
|
||||
connectionProfile: profile,
|
||||
@@ -229,11 +229,11 @@ async function handleOpenClusterDashboardTask(profile: azdata.IConnectionProfile
|
||||
const serverInfo = await azdata.connection.getServerInfo(profile.id);
|
||||
const controller = Utils.getClusterEndpoints(serverInfo).find(e => e.name === Endpoint.controller);
|
||||
if (!controller) {
|
||||
vscode.window.showErrorMessage(localize('noController', "Could not find the controller endpoint for this instance"));
|
||||
void vscode.window.showErrorMessage(localize('noController', "Could not find the controller endpoint for this instance"));
|
||||
return;
|
||||
}
|
||||
|
||||
vscode.commands.executeCommand('bigDataClusters.command.manageController',
|
||||
void vscode.commands.executeCommand('bigDataClusters.command.manageController',
|
||||
{
|
||||
url: controller.endpoint,
|
||||
auth: profile.authenticationType === 'Integrated' ? AuthType.Integrated : AuthType.Basic,
|
||||
|
||||
@@ -173,15 +173,15 @@ export abstract class ProgressCommand extends Command {
|
||||
|
||||
export function registerSearchServerCommand(appContext: AppContext): void {
|
||||
vscode.commands.registerCommand('mssql.searchServers', () => {
|
||||
vscode.window.showInputBox({
|
||||
void vscode.window.showInputBox({
|
||||
placeHolder: localize('mssql.searchServers', "Search Server Names")
|
||||
}).then((stringSearch) => {
|
||||
if (stringSearch) {
|
||||
vscode.commands.executeCommand('registeredServers.searchServer', (stringSearch));
|
||||
void vscode.commands.executeCommand('registeredServers.searchServer', (stringSearch));
|
||||
}
|
||||
});
|
||||
});
|
||||
vscode.commands.registerCommand('mssql.clearSearchServerResult', () => {
|
||||
vscode.commands.executeCommand('registeredServers.clearSearchServerResult');
|
||||
void vscode.commands.executeCommand('registeredServers.clearSearchServerResult');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -246,7 +246,7 @@ class HdfsFileSource implements IFileSource {
|
||||
'########################### ' + localize('maxSizeNotice', "NOTICE: This file has been truncated at {0} for preview. ", bytes(maxBytes)) + '############################### \r\n' +
|
||||
'#################################################################################################################### \r\n';
|
||||
data.splice(0, 0, Buffer.from(previewNote, 'utf-8'));
|
||||
vscode.window.showWarningMessage(localize('maxSizeReached', "The file has been truncated at {0} for preview.", bytes(maxBytes)));
|
||||
void vscode.window.showWarningMessage(localize('maxSizeReached', "The file has been truncated at {0} for preview.", bytes(maxBytes)));
|
||||
resolve(Buffer.concat(data));
|
||||
} else {
|
||||
reject(error);
|
||||
|
||||
@@ -94,7 +94,7 @@ export class UploadFilesCommand extends ProgressCommand {
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('uploadError', "Error uploading files: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
@@ -125,7 +125,7 @@ export class UploadFilesCommand extends ProgressCommand {
|
||||
let children: IFile[] = await Promise.all((await fs.readdir(file.path))
|
||||
.map(childFileName => joinHdfsPath(file.path, childFileName))
|
||||
.map(this.mapPathsToFiles()));
|
||||
this.writeFiles(children, subFolder, cancelToken);
|
||||
await this.writeFiles(children, subFolder, cancelToken);
|
||||
} else {
|
||||
await folderNode.writeFile(file);
|
||||
}
|
||||
@@ -160,7 +160,7 @@ export class MkDirCommand extends ProgressCommand {
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('mkDirError', "Error on making directory: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
@@ -215,10 +215,10 @@ export class DeleteFilesCommand extends Command {
|
||||
await oeNodeToRefresh.refresh();
|
||||
}
|
||||
} else {
|
||||
vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('deleteError', "Error on deleting files: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
@@ -276,10 +276,10 @@ export class SaveFileCommand extends ProgressCommand {
|
||||
() => vscode.window.showInformationMessage(localize('saveCanceled', "Save operation was canceled")));
|
||||
}
|
||||
} else {
|
||||
vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('saveError', "Error on saving file: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
@@ -330,10 +330,10 @@ export class PreviewFileCommand extends ProgressCommand {
|
||||
localize('previewing', "Generating preview"),
|
||||
false);
|
||||
} else {
|
||||
vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('previewError', "Error on previewing file: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
@@ -385,12 +385,12 @@ export class CopyPathCommand extends Command {
|
||||
let node = await getNode<HdfsFileSourceNode>(context, this.appContext);
|
||||
if (node) {
|
||||
let path = node.hdfsPath;
|
||||
vscode.env.clipboard.writeText(path);
|
||||
void vscode.env.clipboard.writeText(path);
|
||||
} else {
|
||||
vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('copyPathError', "Error on copying path: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
@@ -408,10 +408,10 @@ export class ManageAccessCommand extends Command {
|
||||
if (node) {
|
||||
new ManageAccessDialog(node.hdfsPath, await node.getFileSource()).openDialog();
|
||||
} else {
|
||||
vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
}
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('manageAccessError', "An unexpected error occurred while opening the Manage Access dialog: {0}", utils.getErrorMessage(err, true)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,11 +82,11 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements azd
|
||||
} else {
|
||||
setTimeout(() => {
|
||||
|
||||
// Running after promise resolution as we need the Ops Studio-side map to have been updated
|
||||
// Running after promise resolution as we need the ADS-side map to have been updated
|
||||
// Intentionally not awaiting or catching errors.
|
||||
// Any failure in startExpansion should be emitted in the expand complete result
|
||||
// We want this to be async and ideally return true before it completes
|
||||
this.startExpansion(session, nodeInfo, isRefresh);
|
||||
this.startExpansion(session, nodeInfo, isRefresh).catch(err => console.log('Error expanding Object Explorer Node ', err));
|
||||
}, 10);
|
||||
}
|
||||
return true;
|
||||
@@ -181,14 +181,14 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements azd
|
||||
}
|
||||
|
||||
notifyNodeChanged(node: TreeNode): void {
|
||||
this.notifyNodeChangesAsync(node);
|
||||
void this.notifyNodeChangesAsync(node);
|
||||
}
|
||||
|
||||
private async notifyNodeChangesAsync(node: TreeNode): Promise<void> {
|
||||
try {
|
||||
let session = this.getSqlClusterSessionForNode(node);
|
||||
if (!session) {
|
||||
vscode.window.showErrorMessage(localize('sessionNotFound', "Session for node {0} does not exist", node.nodePathValue));
|
||||
void vscode.window.showErrorMessage(localize('sessionNotFound', "Session for node {0} does not exist", node.nodePathValue));
|
||||
} else {
|
||||
let nodeInfo = node.getNodeInfo();
|
||||
let expandInfo: azdata.ExpandNodeInfo = {
|
||||
|
||||
@@ -64,7 +64,7 @@ export default class CodeAdapter implements IPrompter {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
window.showErrorMessage(err.message);
|
||||
void window.showErrorMessage(err.message);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,9 +92,9 @@ export class AzureResourceProvider {
|
||||
this._client.start();
|
||||
}
|
||||
|
||||
public dispose() {
|
||||
public async dispose(): Promise<void> {
|
||||
if (this._client) {
|
||||
this._client.stop();
|
||||
await this._client.stop();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ export class OpenSparkJobSubmissionDialogCommand extends Command {
|
||||
let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
|
||||
await dialog.openDialog();
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
void vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,11 +129,11 @@ export class OpenSparkJobSubmissionDialogFromFileCommand extends Command {
|
||||
if (node && node.hdfsPath) {
|
||||
path = node.hdfsPath;
|
||||
} else {
|
||||
vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(localize('sparkJobSubmission.GetFilePathFromSelectedNodeFailed', "Error Get File Path: {0}", err));
|
||||
void vscode.window.showErrorMessage(localize('sparkJobSubmission.GetFilePathFromSelectedNodeFailed', "Error Get File Path: {0}", err));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -148,7 +148,7 @@ export class OpenSparkJobSubmissionDialogFromFileCommand extends Command {
|
||||
let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
|
||||
await dialog.openDialog(path);
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
void vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -166,7 +166,7 @@ export class OpenSparkJobSubmissionDialogTask {
|
||||
let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
|
||||
await dialog.openDialog();
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
void vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,11 +80,11 @@ export class SparkConfigurationTab {
|
||||
value: (this._path) ? SparkFileSource.HDFS.toString() : SparkFileSource.Local.toString()
|
||||
}).component();
|
||||
|
||||
this._fileSourceDropDown.onValueChanged(selection => {
|
||||
this._fileSourceDropDown.onValueChanged(async selection => {
|
||||
let isLocal = selection.selected === SparkFileSource.Local.toString();
|
||||
// Disable browser button for cloud source.
|
||||
if (this._filePickerButton) {
|
||||
this._filePickerButton.updateProperties({
|
||||
await this._filePickerButton.updateProperties({
|
||||
enabled: isLocal,
|
||||
required: isLocal
|
||||
});
|
||||
@@ -109,7 +109,7 @@ export class SparkConfigurationTab {
|
||||
placeHolder: localize('sparkJobSubmission.FilePathPlaceHolder', "Path to a .jar or .py file"),
|
||||
value: (this._path) ? this._path : ''
|
||||
}).component();
|
||||
this._sparkSourceFileInputBox.onTextChanged(text => {
|
||||
this._sparkSourceFileInputBox.onTextChanged(async text => {
|
||||
if (this._fileSourceDropDown.value === SparkFileSource.Local.toString()) {
|
||||
this._dataModel.updateModelByLocalPath(text);
|
||||
if (this._localUploadDestinationLabel) {
|
||||
@@ -126,7 +126,7 @@ export class SparkConfigurationTab {
|
||||
|
||||
// main class disable/enable is according to whether it's jar file.
|
||||
let isJarFile = this._dataModel.isJarFile();
|
||||
this._mainClassInputBox.updateProperties({ enabled: isJarFile, required: isJarFile });
|
||||
await this._mainClassInputBox.updateProperties({ enabled: isJarFile, required: isJarFile });
|
||||
if (!isJarFile) {
|
||||
// Clear main class for py file.
|
||||
this._mainClassInputBox.value = '';
|
||||
@@ -281,7 +281,7 @@ export class SparkConfigurationTab {
|
||||
|
||||
return undefined;
|
||||
} catch (err) {
|
||||
vscode.window.showErrorMessage(localize('sparkJobSubmission.SelectFileError', "Error in locating the file due to Error: {0}", utils.getErrorMessage(err)));
|
||||
void vscode.window.showErrorMessage(localize('sparkJobSubmission.SelectFileError', "Error in locating the file due to Error: {0}", utils.getErrorMessage(err)));
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,7 +64,7 @@ export class SparkJobSubmissionDialog {
|
||||
description: jobName,
|
||||
isCancelable: false,
|
||||
operation: op => {
|
||||
this.onSubmit(op);
|
||||
void this.onSubmit(op);
|
||||
}
|
||||
}
|
||||
);
|
||||
@@ -82,11 +82,11 @@ export class SparkJobSubmissionDialog {
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionPrepareUploadingFile(this._dataModel.localFileSourcePath, this._dataModel.hdfsFolderDestinationPath)));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionPrepareUploadingFile(this._dataModel.localFileSourcePath, this._dataModel.hdfsFolderDestinationPath));
|
||||
await this._dataModel.uploadFile(this._dataModel.localFileSourcePath, this._dataModel.hdfsFolderDestinationPath);
|
||||
vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded);
|
||||
void vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded);
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionUploadingFileSucceeded);
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error)));
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(this.addErrorTag(LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error))));
|
||||
op.updateStatus(azdata.TaskStatus.Failed, LocalizedConstants.sparkJobSubmissionUploadingFileFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
|
||||
@@ -99,7 +99,7 @@ export class SparkJobSubmissionDialog {
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.config.jobName)));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionPrepareSubmitJob(submissionSettings.config.jobName));
|
||||
let livyBatchId = await this._dataModel.submitBatchJobByLivy(submissionSettings);
|
||||
vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted);
|
||||
void vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted);
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted));
|
||||
op.updateStatus(azdata.TaskStatus.InProgress, LocalizedConstants.sparkJobSubmissionSparkJobHasBeenSubmitted);
|
||||
|
||||
@@ -108,7 +108,7 @@ export class SparkJobSubmissionDialog {
|
||||
let appId = await this._dataModel.getApplicationID(submissionSettings, livyBatchId);
|
||||
|
||||
let sparkHistoryUrl = this._dataModel.generateSparkHistoryUIUrl(submissionSettings, appId);
|
||||
vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl));
|
||||
void vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl));
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl)));
|
||||
op.updateStatus(azdata.TaskStatus.Succeeded, LocalizedConstants.sparkJobSubmissionSparkHistoryLinkMessage(sparkHistoryUrl));
|
||||
|
||||
@@ -121,11 +121,11 @@ export class SparkJobSubmissionDialog {
|
||||
*/
|
||||
|
||||
let yarnUIUrl = this._dataModel.generateYarnUIUrl(submissionSettings, appId);
|
||||
vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl));
|
||||
void vscode.window.showInformationMessage(LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl));
|
||||
this.outputChannel.appendLine(this.addInfoTag(LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl)));
|
||||
op.updateStatus(azdata.TaskStatus.Succeeded, LocalizedConstants.sparkJobSubmissionYarnUIMessage(yarnUIUrl));
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error)));
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(this.addErrorTag(LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error))));
|
||||
op.updateStatus(azdata.TaskStatus.Failed, LocalizedConstants.sparkJobSubmissionGetApplicationIdFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
|
||||
@@ -134,7 +134,7 @@ export class SparkJobSubmissionDialog {
|
||||
|
||||
this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error)));
|
||||
void vscode.window.showErrorMessage(LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(this.addErrorTag(LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error))));
|
||||
op.updateStatus(azdata.TaskStatus.Failed, LocalizedConstants.sparkJobSubmissionSubmitJobFailed(utils.getErrorMessage(error)));
|
||||
this.outputChannel.appendLine(LocalizedConstants.sparkJobSubmissionEndMessage);
|
||||
|
||||
@@ -19,17 +19,17 @@ export class OpenSparkYarnHistoryTask {
|
||||
let sqlClusterConnection = await SqlClusterLookUp.findSqlClusterConnection(sqlConnProfile, this.appContext);
|
||||
if (!sqlClusterConnection) {
|
||||
let name = isSpark ? 'Spark' : 'Yarn';
|
||||
vscode.window.showErrorMessage(loc.sparkConnectionRequired(name));
|
||||
void vscode.window.showErrorMessage(loc.sparkConnectionRequired(name));
|
||||
return;
|
||||
}
|
||||
if (isSpark) {
|
||||
vscode.commands.executeCommand('vscode.open', vscode.Uri.parse(this.generateSparkHistoryUrl(sqlClusterConnection.host, sqlClusterConnection.port)));
|
||||
void vscode.commands.executeCommand('vscode.open', vscode.Uri.parse(this.generateSparkHistoryUrl(sqlClusterConnection.host, sqlClusterConnection.port)));
|
||||
}
|
||||
else {
|
||||
vscode.commands.executeCommand('vscode.open', vscode.Uri.parse(this.generateYarnHistoryUrl(sqlClusterConnection.host, sqlClusterConnection.port)));
|
||||
void vscode.commands.executeCommand('vscode.open', vscode.Uri.parse(this.generateYarnHistoryUrl(sqlClusterConnection.host, sqlClusterConnection.port)));
|
||||
}
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
void vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ export class SqlToolsServer {
|
||||
statusView.hide();
|
||||
}, 1500);
|
||||
vscode.commands.registerCommand('mssql.loadCompletionExtension', (params: CompletionExtensionParams) => {
|
||||
this.client.sendRequest(CompletionExtLoadRequest.type, params);
|
||||
return this.client.sendRequest(CompletionExtLoadRequest.type, params);
|
||||
});
|
||||
Telemetry.sendTelemetryEvent('startup/LanguageClientStarted', {
|
||||
installationTime: String(installationComplete - installationStart),
|
||||
@@ -70,7 +70,7 @@ export class SqlToolsServer {
|
||||
return this.client;
|
||||
} catch (e) {
|
||||
Telemetry.sendTelemetryEvent('ServiceInitializingFailed');
|
||||
vscode.window.showErrorMessage(localize('failedToStartServiceErrorMsg', "Failed to start {0}", Constants.serviceName));
|
||||
void vscode.window.showErrorMessage(localize('failedToStartServiceErrorMsg', "Failed to start {0}", Constants.serviceName));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -93,10 +93,10 @@ export class SqlToolsServer {
|
||||
return Promise.all([credsStore.start(), resourceProvider.start()]).then();
|
||||
}
|
||||
|
||||
dispose() {
|
||||
async dispose(): Promise<void> {
|
||||
this.disposables.forEach(d => d.dispose());
|
||||
if (this.client) {
|
||||
this.client.stop();
|
||||
await this.client.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -127,11 +127,11 @@ export class LanguageClientErrorHandler implements ErrorHandler {
|
||||
*/
|
||||
showOnErrorPrompt(): void {
|
||||
Telemetry.sendTelemetryEvent(Constants.serviceName + 'Crash');
|
||||
vscode.window.showErrorMessage(
|
||||
void vscode.window.showErrorMessage(
|
||||
localize('serviceCrashMessage', "{0} component exited unexpectedly. Please restart Azure Data Studio.", Constants.serviceName),
|
||||
viewKnownIssuesAction).then(action => {
|
||||
if (action && action === viewKnownIssuesAction) {
|
||||
vscode.env.openExternal(vscode.Uri.parse(Constants.serviceCrashLink));
|
||||
void vscode.env.openExternal(vscode.Uri.parse(Constants.serviceCrashLink));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -328,7 +328,7 @@ export async function getOrDownloadServer(config: IConfig, handleServerEvent?: (
|
||||
// Display message to the user so they know the override is active, but only once so we don't show too many
|
||||
if (!overrideMessageDisplayed) {
|
||||
overrideMessageDisplayed = true;
|
||||
vscode.window.showInformationMessage(overrideMessage);
|
||||
void vscode.window.showInformationMessage(overrideMessage);
|
||||
}
|
||||
console.log(overrideMessage);
|
||||
return serverFullPath;
|
||||
|
||||
Reference in New Issue
Block a user