mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-07 17:23:56 -05:00
Remove ApiWrapper from mssql extension (#11336)
This commit is contained in:
@@ -47,12 +47,12 @@ export class OpenSparkJobSubmissionDialogCommand extends Command {
|
||||
let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
|
||||
await dialog.openDialog();
|
||||
} catch (error) {
|
||||
this.appContext.apiWrapper.showErrorMessage(getErrorMessage(error));
|
||||
vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
|
||||
private async selectConnection(): Promise<SqlClusterConnection> {
|
||||
let connectionList: azdata.connection.Connection[] = await this.apiWrapper.getActiveConnections();
|
||||
let connectionList: azdata.connection.Connection[] = await azdata.connection.getActiveConnections();
|
||||
let connectionMap: Map<string, azdata.connection.Connection> = new Map();
|
||||
let selectedHost: string = undefined;
|
||||
let showConnectionDialog = false;
|
||||
@@ -129,11 +129,11 @@ export class OpenSparkJobSubmissionDialogFromFileCommand extends Command {
|
||||
if (node && node.hdfsPath) {
|
||||
path = node.hdfsPath;
|
||||
} else {
|
||||
this.apiWrapper.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
vscode.window.showErrorMessage(LocalizedConstants.msgMissingNodeContext);
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(localize('sparkJobSubmission.GetFilePathFromSelectedNodeFailed', "Error Get File Path: {0}", err));
|
||||
vscode.window.showErrorMessage(localize('sparkJobSubmission.GetFilePathFromSelectedNodeFailed', "Error Get File Path: {0}", err));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -148,7 +148,7 @@ export class OpenSparkJobSubmissionDialogFromFileCommand extends Command {
|
||||
let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
|
||||
await dialog.openDialog(path);
|
||||
} catch (error) {
|
||||
this.appContext.apiWrapper.showErrorMessage(getErrorMessage(error));
|
||||
vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -166,7 +166,7 @@ export class OpenSparkJobSubmissionDialogTask {
|
||||
let dialog = new SparkJobSubmissionDialog(sqlClusterConnection, this.appContext, this.outputChannel);
|
||||
await dialog.openDialog();
|
||||
} catch (error) {
|
||||
this.appContext.apiWrapper.showErrorMessage(getErrorMessage(error));
|
||||
vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,9 +7,6 @@ import * as azdata from 'azdata';
|
||||
import * as nls from 'vscode-nls';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
import { AppContext } from '../../../appContext';
|
||||
import { ApiWrapper } from '../../../apiWrapper';
|
||||
|
||||
export class SparkAdvancedTab {
|
||||
private _tab: azdata.window.DialogTab;
|
||||
public get tab(): azdata.window.DialogTab { return this._tab; }
|
||||
@@ -18,12 +15,8 @@ export class SparkAdvancedTab {
|
||||
private _referenceJARFilesInputBox: azdata.InputBoxComponent;
|
||||
private _referencePyFilesInputBox: azdata.InputBoxComponent;
|
||||
|
||||
private get apiWrapper(): ApiWrapper {
|
||||
return this.appContext.apiWrapper;
|
||||
}
|
||||
|
||||
constructor(private appContext: AppContext) {
|
||||
this._tab = this.apiWrapper.createTab(localize('sparkJobSubmission.AdvancedTabName', "ADVANCED"));
|
||||
constructor() {
|
||||
this._tab = azdata.window.createTab(localize('sparkJobSubmission.AdvancedTabName', "ADVANCED"));
|
||||
|
||||
this._tab.registerContent(async (modelView) => {
|
||||
let builder = modelView.modelBuilder;
|
||||
|
||||
@@ -11,8 +11,6 @@ import * as utils from '../../../utils';
|
||||
import * as LocalizedConstants from '../../../localizedConstants';
|
||||
import * as constants from '../../../constants';
|
||||
|
||||
import { AppContext } from '../../../appContext';
|
||||
import { ApiWrapper } from '../../../apiWrapper';
|
||||
import { SparkJobSubmissionModel } from './sparkJobSubmissionModel';
|
||||
import { SparkFileSource } from './sparkJobSubmissionService';
|
||||
|
||||
@@ -33,13 +31,9 @@ export class SparkConfigurationTab {
|
||||
private _mainClassInputBox: azdata.InputBoxComponent;
|
||||
private _argumentsInputBox: azdata.InputBoxComponent;
|
||||
|
||||
private get apiWrapper(): ApiWrapper {
|
||||
return this.appContext.apiWrapper;
|
||||
}
|
||||
|
||||
// If path is specified, means the default source setting for this tab is HDFS file, otherwise, it would be local file.
|
||||
constructor(private _dataModel: SparkJobSubmissionModel, private appContext: AppContext, private _path?: string) {
|
||||
this._tab = this.apiWrapper.createTab(localize('sparkJobSubmission.GeneralTabName', "GENERAL"));
|
||||
constructor(private _dataModel: SparkJobSubmissionModel, private _path?: string) {
|
||||
this._tab = azdata.window.createTab(localize('sparkJobSubmission.GeneralTabName', "GENERAL"));
|
||||
|
||||
this._tab.registerContent(async (modelView) => {
|
||||
let builder = modelView.modelBuilder;
|
||||
@@ -263,14 +257,14 @@ export class SparkConfigurationTab {
|
||||
filters: filter
|
||||
};
|
||||
|
||||
let fileUris: vscode.Uri[] = await this.apiWrapper.showOpenDialog(options);
|
||||
let fileUris: vscode.Uri[] = await vscode.window.showOpenDialog(options);
|
||||
if (fileUris && fileUris[0]) {
|
||||
return fileUris[0].fsPath;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
} catch (err) {
|
||||
this.apiWrapper.showErrorMessage(localize('sparkJobSubmission.SelectFileError', "Error in locating the file due to Error: {0}", utils.getErrorMessage(err)));
|
||||
vscode.window.showErrorMessage(localize('sparkJobSubmission.SelectFileError', "Error in locating the file due to Error: {0}", utils.getErrorMessage(err)));
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import * as utils from '../../../utils';
|
||||
import * as LocalizedConstants from '../../../localizedConstants';
|
||||
|
||||
import { AppContext } from '../../../appContext';
|
||||
import { ApiWrapper } from '../../../apiWrapper';
|
||||
import { SparkJobSubmissionModel } from './sparkJobSubmissionModel';
|
||||
import { SparkConfigurationTab } from './sparkConfigurationTab';
|
||||
import { SparkJobSubmissionInput } from './sparkJobSubmissionService';
|
||||
@@ -24,9 +23,6 @@ export class SparkJobSubmissionDialog {
|
||||
private _dataModel: SparkJobSubmissionModel;
|
||||
private _sparkConfigTab: SparkConfigurationTab;
|
||||
private _sparkAdvancedTab: SparkAdvancedTab;
|
||||
private get apiWrapper(): ApiWrapper {
|
||||
return this.appContext.apiWrapper;
|
||||
}
|
||||
|
||||
constructor(
|
||||
private sqlClusterConnection: SqlClusterConnection,
|
||||
@@ -39,12 +35,12 @@ export class SparkJobSubmissionDialog {
|
||||
}
|
||||
|
||||
public async openDialog(path?: string): Promise<void> {
|
||||
this._dialog = this.apiWrapper.createDialog(localize('sparkJobSubmission.DialogTitleNewJob', "New Job"));
|
||||
this._dialog = azdata.window.createModelViewDialog(localize('sparkJobSubmission.DialogTitleNewJob', "New Job"));
|
||||
|
||||
this._dataModel = new SparkJobSubmissionModel(this.sqlClusterConnection, this._dialog, this.appContext);
|
||||
|
||||
this._sparkConfigTab = new SparkConfigurationTab(this._dataModel, this.appContext, path);
|
||||
this._sparkAdvancedTab = new SparkAdvancedTab(this.appContext);
|
||||
this._sparkConfigTab = new SparkConfigurationTab(this._dataModel, path);
|
||||
this._sparkAdvancedTab = new SparkAdvancedTab();
|
||||
|
||||
this._dialog.content = [this._sparkConfigTab.tab, this._sparkAdvancedTab.tab];
|
||||
|
||||
@@ -55,13 +51,13 @@ export class SparkJobSubmissionDialog {
|
||||
|
||||
this._dialog.registerCloseValidator(() => this.handleValidate());
|
||||
|
||||
await this.apiWrapper.openDialog(this._dialog);
|
||||
azdata.window.openDialog(this._dialog);
|
||||
}
|
||||
|
||||
private onClickOk(): void {
|
||||
let jobName = localize('sparkJobSubmission.SubmitSparkJob', "{0} Spark Job Submission:",
|
||||
this._sparkConfigTab.getInputValues()[0]);
|
||||
this.apiWrapper.startBackgroundOperation(
|
||||
azdata.tasks.startBackgroundOperation(
|
||||
{
|
||||
connection: this.sqlClusterConnection.connection,
|
||||
displayName: jobName,
|
||||
|
||||
@@ -8,6 +8,7 @@ import * as vscode from 'vscode';
|
||||
import { AppContext } from '../appContext';
|
||||
import { getErrorMessage } from '../utils';
|
||||
import * as SqlClusterLookUp from '../sqlClusterLookUp';
|
||||
import * as loc from '../localizedConstants';
|
||||
|
||||
export class OpenSparkYarnHistoryTask {
|
||||
constructor(private appContext: AppContext) {
|
||||
@@ -18,7 +19,7 @@ export class OpenSparkYarnHistoryTask {
|
||||
let sqlClusterConnection = SqlClusterLookUp.findSqlClusterConnection(sqlConnProfile, this.appContext);
|
||||
if (!sqlClusterConnection) {
|
||||
let name = isSpark ? 'Spark' : 'Yarn';
|
||||
this.appContext.apiWrapper.showErrorMessage(`Please connect to the Spark cluster before View ${name} History.`);
|
||||
vscode.window.showErrorMessage(loc.sparkConnectionRequired(name));
|
||||
return;
|
||||
}
|
||||
if (isSpark) {
|
||||
@@ -28,7 +29,7 @@ export class OpenSparkYarnHistoryTask {
|
||||
vscode.commands.executeCommand('vscode.open', vscode.Uri.parse(this.generateYarnHistoryUrl(sqlClusterConnection.host, sqlClusterConnection.port)));
|
||||
}
|
||||
} catch (error) {
|
||||
this.appContext.apiWrapper.showErrorMessage(getErrorMessage(error));
|
||||
vscode.window.showErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user