Revert "Initial AD support for BDCs (#6741)" as it breaks linux (#6758)

This reverts commit 52f8984a99.
This commit is contained in:
Kevin Cunnane
2019-08-14 19:16:52 -07:00
committed by GitHub
parent 52f8984a99
commit 51660b25ef
31 changed files with 189 additions and 639 deletions

View File

@@ -26,6 +26,7 @@ export class SqlClusterConnection {
this._connection = this.toConnection(this._profile);
} else {
this._connection = connectionInfo;
this._profile = this.toConnectionProfile(this._connection);
}
this._host = this._connection.options[constants.hostPropName];
this._port = this._connection.options[constants.knoxPortPropName];
@@ -34,6 +35,7 @@ export class SqlClusterConnection {
}
public get connection(): azdata.connection.Connection { return this._connection; }
public get profile(): azdata.IConnectionProfile { return this._profile; }
public get host(): string { return this._host; }
public get port(): number { return this._port ? Number.parseInt(this._port) : constants.defaultKnoxPort; }
public get user(): string { return this._user; }
@@ -48,7 +50,7 @@ export class SqlClusterConnection {
.every(e => options1[e] === options2[e]);
}
public async createHdfsFileSource(): Promise<IFileSource> {
public createHdfsFileSource(): IFileSource {
let options: IHdfsOptions = {
protocol: 'https',
host: this.host,
@@ -56,24 +58,13 @@ export class SqlClusterConnection {
user: this.user,
path: 'gateway/default/webhdfs/v1',
requestParams: {
auth: {
user: this.user,
pass: this.password
}
}
};
if (this.isIntegratedAuth()) {
options.requestParams.isKerberos = this.isIntegratedAuth();
options.requestParams.auth = undefined;
} else {
options.requestParams.auth = {
user: this.user,
pass: this.password
};
}
let fileSource = await FileSourceFactory.instance.createHdfsFileSource(options);
return fileSource;
}
public isIntegratedAuth(): boolean {
let authType: string = this._connection.options[constants.authenticationTypePropName];
return authType && authType.toLowerCase() === constants.integratedAuth;
return FileSourceFactory.instance.createHdfsFileSource(options);
}
public updatePassword(password: string): void {
@@ -99,12 +90,10 @@ export class SqlClusterConnection {
private getMissingProperties(connectionInfo: azdata.ConnectionInfo): string[] {
if (!connectionInfo || !connectionInfo.options) { return undefined; }
let requiredProps = [constants.hostPropName, constants.knoxPortPropName];
let authType = connectionInfo.options[constants.authenticationTypePropName] && connectionInfo.options[constants.authenticationTypePropName].toLowerCase();
if (authType !== constants.integratedAuth) {
requiredProps.push(constants.userPropName, constants.passwordPropName);
}
return requiredProps.filter(e => connectionInfo.options[e] === undefined);
return [
constants.hostPropName, constants.knoxPortPropName,
constants.userPropName, constants.passwordPropName
].filter(e => connectionInfo.options[e] === undefined);
}
private toConnection(connProfile: azdata.IConnectionProfile): azdata.connection.Connection {
@@ -112,4 +101,18 @@ export class SqlClusterConnection {
{ connectionId: this._profile.id });
return connection;
}
private toConnectionProfile(connectionInfo: azdata.connection.Connection): azdata.IConnectionProfile {
let options = connectionInfo.options;
let connProfile: azdata.IConnectionProfile = Object.assign(<azdata.IConnectionProfile>{},
connectionInfo,
{
serverName: `${options[constants.hostPropName]},${options[constants.knoxPortPropName]}`,
userName: options[constants.userPropName],
password: options[constants.passwordPropName],
id: connectionInfo.connectionId,
}
);
return connProfile;
}
}

View File

@@ -16,7 +16,6 @@ import * as nls from 'vscode-nls';
import * as constants from '../constants';
import { WebHDFS, HdfsError } from './webhdfs';
import * as auth from '../util/auth';
const localize = nls.loadMessageBundle();
@@ -85,13 +84,11 @@ export interface IHdfsOptions {
export interface IRequestParams {
auth?: IHttpAuthentication;
isKerberos?: boolean;
/**
* Timeout in milliseconds to wait for response
*/
timeout?: number;
agent?: https.Agent;
headers?: {};
}
export interface IHdfsFileStatus {
@@ -109,10 +106,10 @@ export class FileSourceFactory {
return FileSourceFactory._instance;
}
public async createHdfsFileSource(options: IHdfsOptions): Promise<IFileSource> {
public createHdfsFileSource(options: IHdfsOptions): IFileSource {
options = options && options.host ? FileSourceFactory.removePortFromHost(options) : options;
let requestParams: IRequestParams = options.requestParams ? options.requestParams : {};
if (requestParams.auth || requestParams.isKerberos) {
if (requestParams.auth) {
// TODO Remove handling of unsigned cert once we have real certs in our Knox service
let agentOptions = {
host: options.host,
@@ -122,11 +119,6 @@ export class FileSourceFactory {
};
let agent = new https.Agent(agentOptions);
requestParams['agent'] = agent;
}
if (requestParams.isKerberos) {
let kerberosToken = await auth.authenticateKerberos(options.host);
requestParams.headers = { Authorization: `Negotiate ${kerberosToken}` };
}
return new HdfsFileSource(WebHDFS.createClient(options, requestParams));
}

View File

@@ -63,9 +63,9 @@ export class HdfsProvider implements vscode.TreeDataProvider<TreeNode>, ITreeCha
}
}
public async addHdfsConnection(options: IHdfsOptions): Promise<void> {
addHdfsConnection(options: IHdfsOptions): void {
let displayName = `${options.user}@${options.host}:${options.port}`;
let fileSource = await FileSourceFactory.instance.createHdfsFileSource(options);
let fileSource = FileSourceFactory.instance.createHdfsFileSource(options);
this.addConnection(displayName, fileSource);
}

View File

@@ -120,10 +120,10 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements azd
if (children.length === 1 && this.hasExpansionError(children)) {
if (children[0].errorStatusCode === 401) {
//Prompt for password
let password: string = await this.promptPassword(localize('prmptPwd', "Please provide the password to connect to HDFS:"));
let password: string = await this.promptPassword(localize('prmptPwd', 'Please provide the password to connect to HDFS:'));
if (password && password.length > 0) {
session.sqlClusterConnection.updatePassword(password);
await node.updateFileSource(session.sqlClusterConnection);
node.updateFileSource(session.sqlClusterConnection);
children = await node.getChildren(true);
}
}
@@ -181,7 +181,7 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements azd
try {
let session = this.getSqlClusterSessionForNode(node);
if (!session) {
this.appContext.apiWrapper.showErrorMessage(localize('sessionNotFound', "Session for node {0} does not exist", node.nodePathValue));
this.appContext.apiWrapper.showErrorMessage(localize('sessionNotFound', 'Session for node {0} does not exist', node.nodePathValue));
} else {
let nodeInfo = node.getNodeInfo();
let expandInfo: azdata.ExpandNodeInfo = {
@@ -191,7 +191,7 @@ export class MssqlObjectExplorerNodeProvider extends ProviderBase implements azd
await this.refreshNode(expandInfo);
}
} catch (err) {
mssqlOutputChannel.appendLine(localize('notifyError', "Error notifying of node change: {0}", err));
mssqlOutputChannel.appendLine(localize('notifyError', 'Error notifying of node change: {0}', err));
}
}
@@ -295,7 +295,7 @@ class SqlClusterRootNode extends TreeNode {
getNodeInfo(): azdata.NodeInfo {
let nodeInfo: azdata.NodeInfo = {
label: localize('rootLabel', "Root"),
label: localize('rootLabel', 'Root'),
isLeaf: false,
errorMessage: undefined,
metadata: undefined,
@@ -325,27 +325,22 @@ class DataServicesNode extends TreeNode {
public getChildren(refreshChildren: boolean): TreeNode[] | Promise<TreeNode[]> {
if (refreshChildren || !this._children) {
return this.refreshChildren();
this._children = [];
let fileSource: IFileSource = this.session.sqlClusterConnection.createHdfsFileSource();
let hdfsNode = new ConnectionNode(this._context, localize('hdfsFolder', 'HDFS'), fileSource);
hdfsNode.parent = this;
this._children.push(hdfsNode);
}
return this._children;
}
private async refreshChildren(): Promise<TreeNode[]> {
this._children = [];
let fileSource: IFileSource = await this.session.sqlClusterConnection.createHdfsFileSource();
let hdfsNode = new ConnectionNode(this._context, localize('hdfsFolder', "HDFS"), fileSource);
hdfsNode.parent = this;
this._children.push(hdfsNode);
return this._children;
}
getTreeItem(): vscode.TreeItem | Promise<vscode.TreeItem> {
throw new Error('Not intended for use in a file explorer view.');
}
getNodeInfo(): azdata.NodeInfo {
let nodeInfo: azdata.NodeInfo = {
label: localize('dataServicesLabel', "Data Services"),
label: localize('dataServicesLabel', 'Data Services'),
isLeaf: false,
errorMessage: undefined,
metadata: undefined,
@@ -357,4 +352,4 @@ class DataServicesNode extends TreeNode {
};
return nodeInfo;
}
}
}

View File

@@ -78,8 +78,8 @@ export abstract class TreeNode implements ITreeNode {
return undefined;
}
public async updateFileSource(connection: SqlClusterConnection): Promise<void> {
this.fileSource = await connection.createHdfsFileSource();
public updateFileSource(connection: SqlClusterConnection): void {
this.fileSource = connection.createHdfsFileSource();
}
/**
* The value to use for this node in the node path

View File

@@ -482,17 +482,15 @@ export class WebHDFS {
let stream = undefined;
let canResume: boolean = true;
let params: any = Object.assign(
let params = Object.assign(
{
method: append ? 'POST' : 'PUT',
url: endpoint,
json: true,
headers: { 'content-type': 'application/octet-stream' }
},
this._requestParams
);
params.headers = params.headers || {};
params.headers['content-type'] = 'application/octet-stream';
let req = request(params, (error, response, body) => {
// Handle redirect only if there was not an error (e.g. res is defined)