mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-07 17:23:56 -05:00
Fix node update in bdc dashboard on reconnect (#8138)
* Fix node update in bdc dashboard on reconnect * Fix no floating promises * Fix opening from dashboard to always save controller node
This commit is contained in:
@@ -311,7 +311,6 @@ export class ClusterController {
|
||||
this._connectionPromise = this._dialog.showDialog();
|
||||
}
|
||||
const controller = await this._connectionPromise;
|
||||
this._connectionPromise = undefined;
|
||||
if (controller) {
|
||||
this._username = controller._username;
|
||||
this._password = controller._password;
|
||||
@@ -325,6 +324,8 @@ export class ClusterController {
|
||||
}
|
||||
} catch (error) {
|
||||
throw new ControllerError(error, errorMessage);
|
||||
} finally {
|
||||
this._connectionPromise = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import { AuthType } from '../constants';
|
||||
import { ConnectControllerDialog, ConnectControllerModel } from './connectControllerDialog';
|
||||
import { ControllerTreeDataProvider } from '../tree/controllerTreeDataProvider';
|
||||
|
||||
export type BdcDashboardOptions = { url: string, auth: AuthType, username: string, password: string };
|
||||
export type BdcDashboardOptions = { url: string, auth: AuthType, username: string, password: string, rememberPassword: boolean };
|
||||
|
||||
export type BdcErrorType = 'bdcStatus' | 'bdcEndpoints' | 'general';
|
||||
export type BdcErrorEvent = { error: Error, errorType: BdcErrorType };
|
||||
@@ -121,12 +121,20 @@ export class BdcDashboardModel {
|
||||
*/
|
||||
private async promptReconnect(): Promise<void> {
|
||||
this._clusterController = await new ConnectControllerDialog(new ConnectControllerModel(this._options)).showDialog();
|
||||
await this.updateController();
|
||||
}
|
||||
|
||||
private async updateController(): Promise<void> {
|
||||
if (!this._clusterController) {
|
||||
return;
|
||||
}
|
||||
this._treeDataProvider.addOrUpdateController(
|
||||
this._clusterController.url,
|
||||
this._clusterController.authType,
|
||||
this._clusterController.username,
|
||||
this._clusterController.password,
|
||||
/* Remember password */false);
|
||||
this._options.rememberPassword);
|
||||
await this._treeDataProvider.saveControllers();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -62,9 +62,20 @@ function registerCommands(context: vscode.ExtensionContext, treeDataProvider: Co
|
||||
treeDataProvider.notifyNodeChanged(node);
|
||||
});
|
||||
|
||||
vscode.commands.registerCommand(ManageControllerCommand, async (info: ControllerNode | BdcDashboardOptions) => {
|
||||
vscode.commands.registerCommand(ManageControllerCommand, async (info: ControllerNode | BdcDashboardOptions, addOrUpdateController: boolean = false) => {
|
||||
const title: string = `${localize('bdc.dashboard.title', "Big Data Cluster Dashboard -")} ${ControllerNode.toIpAndPort(info.url)}`;
|
||||
const dashboard: BdcDashboard = new BdcDashboard(title, new BdcDashboardModel(info, treeDataProvider));
|
||||
if (addOrUpdateController) {
|
||||
// The info may be wrong, but if it is then we'll prompt to reconnect when the dashboard is opened
|
||||
// and update with the correct info then
|
||||
treeDataProvider.addOrUpdateController(
|
||||
info.url,
|
||||
info.auth,
|
||||
info.username,
|
||||
info.password,
|
||||
info.rememberPassword);
|
||||
await treeDataProvider.saveControllers();
|
||||
}
|
||||
const dashboard: BdcDashboard = new BdcDashboard(title, new BdcDashboardModel(info, treeDataProvider, /*ignoreSslVerification*/true));
|
||||
dashboard.showDashboard();
|
||||
});
|
||||
|
||||
|
||||
@@ -112,14 +112,14 @@ function activateSparkFeatures(appContext: AppContext): void {
|
||||
let outputChannel: vscode.OutputChannel = mssqlOutputChannel;
|
||||
extensionContext.subscriptions.push(new OpenSparkJobSubmissionDialogCommand(appContext, outputChannel));
|
||||
extensionContext.subscriptions.push(new OpenSparkJobSubmissionDialogFromFileCommand(appContext, outputChannel));
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterLivySubmitSparkJobTask, (profile: azdata.IConnectionProfile) => {
|
||||
new OpenSparkJobSubmissionDialogTask(appContext, outputChannel).execute(profile);
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterLivySubmitSparkJobTask, async (profile: azdata.IConnectionProfile) => {
|
||||
await new OpenSparkJobSubmissionDialogTask(appContext, outputChannel).execute(profile);
|
||||
});
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterLivyOpenSparkHistory, (profile: azdata.IConnectionProfile) => {
|
||||
new OpenSparkYarnHistoryTask(appContext).execute(profile, true);
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterLivyOpenSparkHistory, async (profile: azdata.IConnectionProfile) => {
|
||||
await new OpenSparkYarnHistoryTask(appContext).execute(profile, true);
|
||||
});
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterLivyOpenYarnHistory, (profile: azdata.IConnectionProfile) => {
|
||||
new OpenSparkYarnHistoryTask(appContext).execute(profile, false);
|
||||
apiWrapper.registerTaskHandler(Constants.mssqlClusterLivyOpenYarnHistory, async (profile: azdata.IConnectionProfile) => {
|
||||
await new OpenSparkYarnHistoryTask(appContext).execute(profile, false);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -216,8 +216,9 @@ async function handleOpenClusterDashboardTask(profile: azdata.IConnectionProfile
|
||||
url: controller.endpoint,
|
||||
auth: profile.authenticationType === 'Integrated' ? AuthType.Integrated : AuthType.Basic,
|
||||
username: 'admin', // Default to admin as a best-guess, we'll prompt for re-entering credentials if that fails
|
||||
password: profile.password
|
||||
});
|
||||
password: profile.password,
|
||||
rememberPassword: true
|
||||
}, /*addOrUpdateController*/true);
|
||||
}
|
||||
|
||||
// this method is called when your extension is deactivated
|
||||
|
||||
Reference in New Issue
Block a user