mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Initial AD support for BDCs (#6741)
Partially working AD support for BDCs with some known issues - Plumbed through kerberos support to Notebooks. - Using "gateway-0" for service temporarily as service endpoints API doesn't yet return correct DNS name. Will update in separate PR once available - Plumbed kerberos auth to HDFS, Spark. Only partially working as we use same token on each call - Will fix in separate PR, as this requires a refactor of WebHDFS library. Will need to either get new token every time or set a cookie, both of which require refactors - Fixed error when Data Service node expansion failed and blocked all OE expansion - Support for SqlToolsService change to use new cluster endpoints DMV - Updated API to add new endpoints field to replace IP + port - Added logic to handle case where endpoints for Yarn, Grafana etc. are in the list - Sort list and use expected new localized strings - Updated SqlToolsService to include support for new DMV - Add "gateway-0" handling in Jupyter session as workaround for lack of domain names in endpoints list
This commit is contained in:
@@ -141,4 +141,4 @@ export function isEditorTitleFree(title: string): boolean {
|
||||
let hasTextDoc = vscode.workspace.textDocuments.findIndex(doc => doc.isUntitled && doc.fileName === title) > -1;
|
||||
let hasNotebookDoc = azdata.nb.notebookDocuments.findIndex(doc => doc.isUntitled && doc.fileName === title) > -1;
|
||||
return !hasTextDoc && !hasNotebookDoc;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,10 +55,11 @@ const configBase = {
|
||||
|
||||
const KNOX_ENDPOINT_SERVER = 'host';
|
||||
const KNOX_ENDPOINT_PORT = 'knoxport';
|
||||
const KNOX_ENDPOINT_KNOX = 'knox';
|
||||
const KNOX_ENDPOINT_GATEWAY = 'gateway';
|
||||
const SQL_PROVIDER = 'MSSQL';
|
||||
const USER = 'user';
|
||||
const AUTHTYPE = 'authenticationType';
|
||||
const INTEGRATED_AUTH = 'integrated';
|
||||
const DEFAULT_CLUSTER_USER_NAME = 'root';
|
||||
|
||||
export class JupyterSessionManager implements nb.SessionManager {
|
||||
@@ -242,13 +243,19 @@ export class JupyterSession implements nb.ISession {
|
||||
|
||||
//Update server info with bigdata endpoint - Unified Connection
|
||||
if (connection.providerName === SQL_PROVIDER) {
|
||||
let clusterEndpoint: utils.IEndpoint =
|
||||
await this.getClusterEndpoint(connection.id, KNOX_ENDPOINT_KNOX) ||
|
||||
await this.getClusterEndpoint(connection.id, KNOX_ENDPOINT_GATEWAY);
|
||||
let clusterEndpoint: utils.IEndpoint = await this.getClusterEndpoint(connection.id, KNOX_ENDPOINT_GATEWAY);
|
||||
if (!clusterEndpoint) {
|
||||
return Promise.reject(new Error(localize('connectionNotValid', "Spark kernels require a connection to a SQL Server big data cluster master instance.")));
|
||||
}
|
||||
connection.options[KNOX_ENDPOINT_SERVER] = clusterEndpoint.ipAddress;
|
||||
if (this.isIntegratedAuth(connection)) {
|
||||
// Hack: for now, we need to use gateway-0 for integrated auth
|
||||
let sqlDnsName: string = connection.options['server'].split(',')[0];
|
||||
let parts = sqlDnsName.split('.');
|
||||
parts[0] = 'gateway-0';
|
||||
connection.options[KNOX_ENDPOINT_SERVER] = parts.join('.');
|
||||
} else {
|
||||
connection.options[KNOX_ENDPOINT_SERVER] = clusterEndpoint.ipAddress;
|
||||
}
|
||||
connection.options[KNOX_ENDPOINT_PORT] = clusterEndpoint.port;
|
||||
connection.options[USER] = DEFAULT_CLUSTER_USER_NAME;
|
||||
}
|
||||
@@ -259,8 +266,9 @@ export class JupyterSession implements nb.ISession {
|
||||
this.setHostAndPort(',', connection);
|
||||
|
||||
let server = Uri.parse(utils.getLivyUrl(connection.options[KNOX_ENDPOINT_SERVER], connection.options[KNOX_ENDPOINT_PORT])).toString();
|
||||
let doNotCallChangeEndpointParams =
|
||||
`%_do_not_call_change_endpoint --username=${connection.options[USER]} --password=${connection.options['password']} --server=${server} --auth=Basic_Access`;
|
||||
let doNotCallChangeEndpointParams = this.isIntegratedAuth(connection) ?
|
||||
`%_do_not_call_change_endpoint --server=${server} --auth=Kerberos`
|
||||
: `%_do_not_call_change_endpoint --username=${connection.options[USER]} --password=${connection.options['password']} --server=${server} --auth=Basic_Access`;
|
||||
let future = this.sessionImpl.kernel.requestExecute({
|
||||
code: doNotCallChangeEndpointParams
|
||||
}, true);
|
||||
@@ -268,6 +276,10 @@ export class JupyterSession implements nb.ISession {
|
||||
}
|
||||
}
|
||||
|
||||
private isIntegratedAuth(connection: IConnectionProfile): boolean {
|
||||
return connection.options[AUTHTYPE] && connection.options[AUTHTYPE].toLowerCase() === INTEGRATED_AUTH.toLowerCase();
|
||||
}
|
||||
|
||||
private isSparkKernel(kernelName: string): boolean {
|
||||
return kernelName && kernelName.toLowerCase().indexOf('spark') > -1;
|
||||
}
|
||||
@@ -330,4 +342,4 @@ interface ISparkMagicConfig {
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user