Initial AD support for BDCs (#6741)

Partially working AD support for BDCs with some known issues
- Plumbed through kerberos support to Notebooks.
  - Using "gateway-0" for service temporarily as service endpoints API doesn't yet return correct DNS name. Will update in separate PR once available
- Plumbed kerberos auth to HDFS, Spark. Only partially working as we use same token on each call 
  - Will fix in separate PR, as this requires a refactor of WebHDFS library. Will need to either get new token every time or set a cookie, both of which require refactors
- Fixed error when Data Service node expansion failed and blocked all OE expansion
- Support for SqlToolsService change to use new cluster endpoints DMV
  -  Updated API to add new endpoints field to replace IP + port
  - Added logic to handle case where endpoints for Yarn, Grafana etc. are in the list
  - Sort list and use expected new localized strings

- Updated SqlToolsService to include support for new DMV
- Add "gateway-0" handling in Jupyter session as workaround for lack of domain names in endpoints list
This commit is contained in:
Kevin Cunnane
2019-08-14 18:09:41 -07:00
committed by GitHub
parent 4e8c06f36d
commit 52f8984a99
31 changed files with 639 additions and 189 deletions

View File

@@ -147,7 +147,7 @@ export class SparkJobSubmissionModel {
return Promise.reject(LocalizedConstants.sparkJobSubmissionLocalFileNotExisted(localFilePath));
}
let fileSource: IFileSource = this._sqlClusterConnection.createHdfsFileSource();
let fileSource: IFileSource = await this._sqlClusterConnection.createHdfsFileSource();
await fileSource.writeFile(new File(localFilePath, false), hdfsFolderPath);
} catch (error) {
return Promise.reject(error);
@@ -160,7 +160,7 @@ export class SparkJobSubmissionModel {
return Promise.reject(localize('sparkJobSubmission_PathNotSpecified.', 'Property Path is not specified. '));
}
let fileSource: IFileSource = this._sqlClusterConnection.createHdfsFileSource();
let fileSource: IFileSource = await this._sqlClusterConnection.createHdfsFileSource();
return await fileSource.exists(path);
} catch (error) {
return Promise.reject(error);

View File

@@ -11,6 +11,7 @@ const localize = nls.loadMessageBundle();
import * as constants from '../../../constants';
import { SqlClusterConnection } from '../../../objectExplorerNodeProvider/connection';
import * as utils from '../../../utils';
import * as auth from '../../../util/auth';
export class SparkJobSubmissionService {
private _requestPromise: (args: any) => any;
@@ -28,6 +29,10 @@ export class SparkJobSubmissionService {
public async submitBatchJob(submissionArgs: SparkJobSubmissionInput): Promise<string> {
try {
let livyUrl: string = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/`;
// Get correct authentication headers
let headers = await this.getAuthenticationHeaders(submissionArgs);
let options = {
uri: livyUrl,
method: 'POST',
@@ -41,9 +46,7 @@ export class SparkJobSubmissionService {
name: submissionArgs.jobName
},
// authentication headers
headers: {
'Authorization': 'Basic ' + Buffer.from(submissionArgs.user + ':' + submissionArgs.password).toString('base64')
}
headers: headers
};
// Set arguments
@@ -90,18 +93,30 @@ export class SparkJobSubmissionService {
}
}
private async getAuthenticationHeaders(submissionArgs: SparkJobSubmissionInput) {
let headers = {};
if (submissionArgs.isIntegratedAuth) {
let kerberosToken = await auth.authenticateKerberos(submissionArgs.host);
headers = { Authorization: `Negotiate ${kerberosToken}` };
}
else {
headers = { Authorization: 'Basic ' + Buffer.from(submissionArgs.user + ':' + submissionArgs.password).toString('base64') };
}
return headers;
}
public async getYarnAppId(submissionArgs: SparkJobSubmissionInput, livyBatchId: string): Promise<LivyLogResponse> {
try {
let livyUrl = `https://${submissionArgs.host}:${submissionArgs.port}${submissionArgs.livyPath}/${livyBatchId}/log`;
let headers = await this.getAuthenticationHeaders(submissionArgs);
let options = {
uri: livyUrl,
method: 'GET',
json: true,
rejectUnauthorized: false,
// authentication headers
headers: {
'Authorization': 'Basic ' + Buffer.from(submissionArgs.user + ':' + submissionArgs.password).toString('base64')
}
headers: headers
};
const response = await this._requestPromise(options);
@@ -145,7 +160,8 @@ export class SparkJobSubmissionInput {
this._port = sqlClusterConnection.port;
this._livyPath = constants.mssqlClusterLivySubmitPath;
this._user = sqlClusterConnection.user;
this._passWord = sqlClusterConnection.password;
this._password = sqlClusterConnection.password;
this._isIntegratedAuth = sqlClusterConnection.isIntegratedAuth();
}
constructor(
@@ -160,7 +176,8 @@ export class SparkJobSubmissionInput {
private _port?: number,
private _livyPath?: string,
private _user?: string,
private _passWord?: string) {
private _password?: string,
private _isIntegratedAuth?: boolean) {
}
public get jobName(): string { return this._jobName; }
@@ -174,7 +191,8 @@ export class SparkJobSubmissionInput {
public get port(): number { return this._port; }
public get livyPath(): string { return this._livyPath; }
public get user(): string { return this._user; }
public get password(): string { return this._passWord; }
public get password(): string { return this._password; }
public get isIntegratedAuth(): boolean { return this._isIntegratedAuth; }
}
export enum SparkFileSource {

View File

@@ -5,12 +5,12 @@
import * as childProcess from 'child_process';
import * as fs from 'fs-extra';
import * as nls from 'vscode-nls';
import * as path from 'path';
import * as azdata from 'azdata';
import * as vscode from 'vscode';
import * as which from 'which';
import * as constants from '../constants';
import * as nls from 'vscode-nls';
const localize = nls.loadMessageBundle();
export function getDropdownValue(dropdownValue: string | azdata.CategoryValue): string {
@@ -23,8 +23,8 @@ export function getDropdownValue(dropdownValue: string | azdata.CategoryValue):
export function getServerAddressFromName(connection: azdata.ConnectionInfo | string): string {
// Strip TDS port number from the server URI
if ((<azdata.ConnectionInfo>connection).options && (<azdata.ConnectionInfo>connection).options['host']) {
return (<azdata.ConnectionInfo>connection).options['host'].split(',')[0].split(':')[0];
if ((<azdata.ConnectionInfo>connection).options && (<azdata.ConnectionInfo>connection).options[constants.hostPropName]) {
return (<azdata.ConnectionInfo>connection).options[constants.hostPropName].split(',')[0].split(':')[0];
} else if ((<azdata.ConnectionInfo>connection).options && (<azdata.ConnectionInfo>connection).options['server']) {
return (<azdata.ConnectionInfo>connection).options['server'].split(',')[0].split(':')[0];
} else {