mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-01-30 17:23:29 -05:00
Re-add AD auth for SQL big data cluster (#6759)
* Revert "Revert "Initial AD support for BDCs (#6741)" as it breaks linux (#6758)"
This reverts commit 51660b25ef.
* Install kerberos package on linux
This commit is contained in:
@@ -26,7 +26,6 @@ export class SqlClusterConnection {
|
||||
this._connection = this.toConnection(this._profile);
|
||||
} else {
|
||||
this._connection = connectionInfo;
|
||||
this._profile = this.toConnectionProfile(this._connection);
|
||||
}
|
||||
this._host = this._connection.options[constants.hostPropName];
|
||||
this._port = this._connection.options[constants.knoxPortPropName];
|
||||
@@ -35,7 +34,6 @@ export class SqlClusterConnection {
|
||||
}
|
||||
|
||||
public get connection(): azdata.connection.Connection { return this._connection; }
|
||||
public get profile(): azdata.IConnectionProfile { return this._profile; }
|
||||
public get host(): string { return this._host; }
|
||||
public get port(): number { return this._port ? Number.parseInt(this._port) : constants.defaultKnoxPort; }
|
||||
public get user(): string { return this._user; }
|
||||
@@ -50,7 +48,7 @@ export class SqlClusterConnection {
|
||||
.every(e => options1[e] === options2[e]);
|
||||
}
|
||||
|
||||
public createHdfsFileSource(): IFileSource {
|
||||
public async createHdfsFileSource(): Promise<IFileSource> {
|
||||
let options: IHdfsOptions = {
|
||||
protocol: 'https',
|
||||
host: this.host,
|
||||
@@ -58,13 +56,24 @@ export class SqlClusterConnection {
|
||||
user: this.user,
|
||||
path: 'gateway/default/webhdfs/v1',
|
||||
requestParams: {
|
||||
auth: {
|
||||
user: this.user,
|
||||
pass: this.password
|
||||
}
|
||||
}
|
||||
};
|
||||
return FileSourceFactory.instance.createHdfsFileSource(options);
|
||||
if (this.isIntegratedAuth()) {
|
||||
options.requestParams.isKerberos = this.isIntegratedAuth();
|
||||
options.requestParams.auth = undefined;
|
||||
} else {
|
||||
options.requestParams.auth = {
|
||||
user: this.user,
|
||||
pass: this.password
|
||||
};
|
||||
}
|
||||
let fileSource = await FileSourceFactory.instance.createHdfsFileSource(options);
|
||||
return fileSource;
|
||||
}
|
||||
|
||||
public isIntegratedAuth(): boolean {
|
||||
let authType: string = this._connection.options[constants.authenticationTypePropName];
|
||||
return authType && authType.toLowerCase() === constants.integratedAuth;
|
||||
}
|
||||
|
||||
public updatePassword(password: string): void {
|
||||
@@ -90,10 +99,12 @@ export class SqlClusterConnection {
|
||||
|
||||
private getMissingProperties(connectionInfo: azdata.ConnectionInfo): string[] {
|
||||
if (!connectionInfo || !connectionInfo.options) { return undefined; }
|
||||
return [
|
||||
constants.hostPropName, constants.knoxPortPropName,
|
||||
constants.userPropName, constants.passwordPropName
|
||||
].filter(e => connectionInfo.options[e] === undefined);
|
||||
let requiredProps = [constants.hostPropName, constants.knoxPortPropName];
|
||||
let authType = connectionInfo.options[constants.authenticationTypePropName] && connectionInfo.options[constants.authenticationTypePropName].toLowerCase();
|
||||
if (authType !== constants.integratedAuth) {
|
||||
requiredProps.push(constants.userPropName, constants.passwordPropName);
|
||||
}
|
||||
return requiredProps.filter(e => connectionInfo.options[e] === undefined);
|
||||
}
|
||||
|
||||
private toConnection(connProfile: azdata.IConnectionProfile): azdata.connection.Connection {
|
||||
@@ -101,18 +112,4 @@ export class SqlClusterConnection {
|
||||
{ connectionId: this._profile.id });
|
||||
return connection;
|
||||
}
|
||||
|
||||
private toConnectionProfile(connectionInfo: azdata.connection.Connection): azdata.IConnectionProfile {
|
||||
let options = connectionInfo.options;
|
||||
let connProfile: azdata.IConnectionProfile = Object.assign(<azdata.IConnectionProfile>{},
|
||||
connectionInfo,
|
||||
{
|
||||
serverName: `${options[constants.hostPropName]},${options[constants.knoxPortPropName]}`,
|
||||
userName: options[constants.userPropName],
|
||||
password: options[constants.passwordPropName],
|
||||
id: connectionInfo.connectionId,
|
||||
}
|
||||
);
|
||||
return connProfile;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import * as nls from 'vscode-nls';
|
||||
|
||||
import * as constants from '../constants';
|
||||
import { WebHDFS, HdfsError } from './webhdfs';
|
||||
import * as auth from '../util/auth';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
@@ -84,11 +85,13 @@ export interface IHdfsOptions {
|
||||
|
||||
export interface IRequestParams {
|
||||
auth?: IHttpAuthentication;
|
||||
isKerberos?: boolean;
|
||||
/**
|
||||
* Timeout in milliseconds to wait for response
|
||||
*/
|
||||
timeout?: number;
|
||||
agent?: https.Agent;
|
||||
headers?: {};
|
||||
}
|
||||
|
||||
export interface IHdfsFileStatus {
|
||||
@@ -106,10 +109,10 @@ export class FileSourceFactory {
|
||||
return FileSourceFactory._instance;
|
||||
}
|
||||
|
||||
public createHdfsFileSource(options: IHdfsOptions): IFileSource {
|
||||
public async createHdfsFileSource(options: IHdfsOptions): Promise<IFileSource> {
|
||||
options = options && options.host ? FileSourceFactory.removePortFromHost(options) : options;
|
||||
let requestParams: IRequestParams = options.requestParams ? options.requestParams : {};
|
||||
if (requestParams.auth) {
|
||||
if (requestParams.auth || requestParams.isKerberos) {
|
||||
// TODO Remove handling of unsigned cert once we have real certs in our Knox service
|
||||
let agentOptions = {
|
||||
host: options.host,
|
||||
@@ -119,6 +122,11 @@ export class FileSourceFactory {
|
||||
};
|
||||
let agent = new https.Agent(agentOptions);
|
||||
requestParams['agent'] = agent;
|
||||
|
||||
}
|
||||
if (requestParams.isKerberos) {
|
||||
let kerberosToken = await auth.authenticateKerberos(options.host);
|
||||
requestParams.headers = { Authorization: `Negotiate ${kerberosToken}` };
|
||||
}
|
||||
return new HdfsFileSource(WebHDFS.createClient(options, requestParams));
|
||||
}
|
||||
|
||||
@@ -63,9 +63,9 @@ export class HdfsProvider implements vscode.TreeDataProvider<TreeNode>, ITreeCha
|
||||
}
|
||||
}
|
||||
|
||||
addHdfsConnection(options: IHdfsOptions): void {
|
||||
public async addHdfsConnection(options: IHdfsOptions): Promise<void> {
|
||||
let displayName = `${options.user}@${options.host}:${options.port}`;
|
||||
let fileSource = FileSourceFactory.instance.createHdfsFileSource(options);
|
||||
let fileSource = await FileSourceFactory.instance.createHdfsFileSource(options);
|
||||
this.addConnection(displayName, fileSource);
|
||||
}
|
||||
|
||||
|
||||
@@ -120,10 +120,10 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements azd
|
||||
if (children.length === 1 && this.hasExpansionError(children)) {
|
||||
if (children[0].errorStatusCode === 401) {
|
||||
//Prompt for password
|
||||
let password: string = await this.promptPassword(localize('prmptPwd', 'Please provide the password to connect to HDFS:'));
|
||||
let password: string = await this.promptPassword(localize('prmptPwd', "Please provide the password to connect to HDFS:"));
|
||||
if (password && password.length > 0) {
|
||||
session.sqlClusterConnection.updatePassword(password);
|
||||
node.updateFileSource(session.sqlClusterConnection);
|
||||
await node.updateFileSource(session.sqlClusterConnection);
|
||||
children = await node.getChildren(true);
|
||||
}
|
||||
}
|
||||
@@ -181,7 +181,7 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements azd
|
||||
try {
|
||||
let session = this.getSqlClusterSessionForNode(node);
|
||||
if (!session) {
|
||||
this.appContext.apiWrapper.showErrorMessage(localize('sessionNotFound', 'Session for node {0} does not exist', node.nodePathValue));
|
||||
this.appContext.apiWrapper.showErrorMessage(localize('sessionNotFound', "Session for node {0} does not exist", node.nodePathValue));
|
||||
} else {
|
||||
let nodeInfo = node.getNodeInfo();
|
||||
let expandInfo: azdata.ExpandNodeInfo = {
|
||||
@@ -191,7 +191,7 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements azd
|
||||
await this.refreshNode(expandInfo);
|
||||
}
|
||||
} catch (err) {
|
||||
mssqlOutputChannel.appendLine(localize('notifyError', 'Error notifying of node change: {0}', err));
|
||||
mssqlOutputChannel.appendLine(localize('notifyError', "Error notifying of node change: {0}", err));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -295,7 +295,7 @@ class SqlClusterRootNode extends TreeNode {
|
||||
|
||||
getNodeInfo(): azdata.NodeInfo {
|
||||
let nodeInfo: azdata.NodeInfo = {
|
||||
label: localize('rootLabel', 'Root'),
|
||||
label: localize('rootLabel', "Root"),
|
||||
isLeaf: false,
|
||||
errorMessage: undefined,
|
||||
metadata: undefined,
|
||||
@@ -325,22 +325,27 @@ class DataServicesNode extends TreeNode {
|
||||
|
||||
public getChildren(refreshChildren: boolean): TreeNode[] | Promise<TreeNode[]> {
|
||||
if (refreshChildren || !this._children) {
|
||||
this._children = [];
|
||||
let fileSource: IFileSource = this.session.sqlClusterConnection.createHdfsFileSource();
|
||||
let hdfsNode = new ConnectionNode(this._context, localize('hdfsFolder', 'HDFS'), fileSource);
|
||||
hdfsNode.parent = this;
|
||||
this._children.push(hdfsNode);
|
||||
return this.refreshChildren();
|
||||
}
|
||||
return this._children;
|
||||
}
|
||||
|
||||
private async refreshChildren(): Promise<TreeNode[]> {
|
||||
this._children = [];
|
||||
let fileSource: IFileSource = await this.session.sqlClusterConnection.createHdfsFileSource();
|
||||
let hdfsNode = new ConnectionNode(this._context, localize('hdfsFolder', "HDFS"), fileSource);
|
||||
hdfsNode.parent = this;
|
||||
this._children.push(hdfsNode);
|
||||
return this._children;
|
||||
}
|
||||
|
||||
getTreeItem(): vscode.TreeItem | Promise<vscode.TreeItem> {
|
||||
throw new Error('Not intended for use in a file explorer view.');
|
||||
}
|
||||
|
||||
getNodeInfo(): azdata.NodeInfo {
|
||||
let nodeInfo: azdata.NodeInfo = {
|
||||
label: localize('dataServicesLabel', 'Data Services'),
|
||||
label: localize('dataServicesLabel', "Data Services"),
|
||||
isLeaf: false,
|
||||
errorMessage: undefined,
|
||||
metadata: undefined,
|
||||
@@ -352,4 +357,4 @@ class DataServicesNode extends TreeNode {
|
||||
};
|
||||
return nodeInfo;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,8 +78,8 @@ export abstract class TreeNode implements ITreeNode {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
public updateFileSource(connection: SqlClusterConnection): void {
|
||||
this.fileSource = connection.createHdfsFileSource();
|
||||
public async updateFileSource(connection: SqlClusterConnection): Promise<void> {
|
||||
this.fileSource = await connection.createHdfsFileSource();
|
||||
}
|
||||
/**
|
||||
* The value to use for this node in the node path
|
||||
|
||||
@@ -482,15 +482,17 @@ export class WebHDFS {
|
||||
|
||||
let stream = undefined;
|
||||
let canResume: boolean = true;
|
||||
let params = Object.assign(
|
||||
let params: any = Object.assign(
|
||||
{
|
||||
method: append ? 'POST' : 'PUT',
|
||||
url: endpoint,
|
||||
json: true,
|
||||
headers: { 'content-type': 'application/octet-stream' }
|
||||
},
|
||||
this._requestParams
|
||||
);
|
||||
params.headers = params.headers || {};
|
||||
params.headers['content-type'] = 'application/octet-stream';
|
||||
|
||||
|
||||
let req = request(params, (error, response, body) => {
|
||||
// Handle redirect only if there was not an error (e.g. res is defined)
|
||||
|
||||
Reference in New Issue
Block a user