mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-01-14 01:25:37 -05:00
Fix HDFS with AD auth for browse, read file scenarios (#6840)
* Fix HDFS with AD auth for browse, read - HDFS now fully supports expanding nodes for all levels, including using cookie for auth - HDFS now support reading files from HDFS - HDFS write file is broken and will be fixed (either in PR update or separate PR) - Removed hack to use gateway-0 instead of actual DNS name now these are supported. Needed for testing * Fix Jupyter error using new DMV with endpoints
This commit is contained in:
@@ -18,24 +18,6 @@
|
||||
"compile": "gulp compile-extension:mssql-client",
|
||||
"update-grammar": "node ../../build/npm/update-grammar.js Microsoft/vscode-mssql syntaxes/SQL.plist ./syntaxes/sql.tmLanguage.json"
|
||||
},
|
||||
"dependencies": {
|
||||
"buffer-stream-reader": "^0.1.1",
|
||||
"bytes": "^3.1.0",
|
||||
"dataprotocol-client": "github:Microsoft/sqlops-dataprotocolclient#1.0.0",
|
||||
"error-ex": "^1.3.2",
|
||||
"figures": "^2.0.0",
|
||||
"find-remove": "1.2.1",
|
||||
"fs-extra": "^3.0.1",
|
||||
"kerberos": "^1.1.2",
|
||||
"request": "^2.88.0",
|
||||
"request-promise": "^4.2.2",
|
||||
"service-downloader": "github:anthonydresser/service-downloader#0.1.6",
|
||||
"stream-meter": "^1.0.4",
|
||||
"uri-js": "^4.2.2",
|
||||
"vscode-extension-telemetry": "0.1.0",
|
||||
"vscode-languageclient": "5.2.1",
|
||||
"vscode-nls": "^4.0.0"
|
||||
},
|
||||
"contributes": {
|
||||
"commands": [
|
||||
{
|
||||
@@ -950,7 +932,29 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"buffer-stream-reader": "^0.1.1",
|
||||
"bytes": "^3.1.0",
|
||||
"dataprotocol-client": "github:Microsoft/sqlops-dataprotocolclient#1.0.0",
|
||||
"error-ex": "^1.3.2",
|
||||
"figures": "^2.0.0",
|
||||
"find-remove": "1.2.1",
|
||||
"fs-extra": "^3.0.1",
|
||||
"kerberos": "^1.1.2",
|
||||
"request": "^2.88.0",
|
||||
"request-promise": "^4.2.2",
|
||||
"service-downloader": "github:anthonydresser/service-downloader#0.1.6",
|
||||
"stream-meter": "^1.0.4",
|
||||
"through2": "^3.0.1",
|
||||
"tough-cookie": "^3.0.1",
|
||||
"uri-js": "^4.2.2",
|
||||
"vscode-extension-telemetry": "0.1.0",
|
||||
"vscode-languageclient": "5.2.1",
|
||||
"vscode-nls": "^4.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/kerberos": "^1.1.0"
|
||||
"@types/kerberos": "^1.1.0",
|
||||
"@types/request": "^2.48.2",
|
||||
"@types/through2": "^2.0.34"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ import * as nls from 'vscode-nls';
|
||||
|
||||
import * as constants from '../constants';
|
||||
import { WebHDFS, HdfsError } from './webhdfs';
|
||||
import * as auth from '../util/auth';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
@@ -124,10 +123,6 @@ export class FileSourceFactory {
|
||||
requestParams['agent'] = agent;
|
||||
|
||||
}
|
||||
if (requestParams.isKerberos) {
|
||||
let kerberosToken = await auth.authenticateKerberos(options.host);
|
||||
requestParams.headers = { Authorization: `Negotiate ${kerberosToken}` };
|
||||
}
|
||||
return new HdfsFileSource(WebHDFS.createClient(options, requestParams));
|
||||
}
|
||||
|
||||
|
||||
@@ -6,18 +6,20 @@ import * as fs from 'fs';
|
||||
import * as querystring from 'querystring';
|
||||
import * as request from 'request';
|
||||
import * as BufferStreamReader from 'buffer-stream-reader';
|
||||
import { Cookie } from 'tough-cookie';
|
||||
import * as through from 'through2';
|
||||
import * as nls from 'vscode-nls';
|
||||
import * as auth from '../util/auth';
|
||||
import { IHdfsOptions, IRequestParams } from './fileSources';
|
||||
|
||||
const localize = nls.loadMessageBundle();
|
||||
const ErrorMessageInvalidDataStructure =
|
||||
localize('webhdfs.invalidDataStructure', 'Invalid Data Structure');
|
||||
const ErrorMessageInvalidDataStructure = localize('webhdfs.invalidDataStructure', "Invalid Data Structure");
|
||||
|
||||
export class WebHDFS {
|
||||
private _requestParams: IRequestParams;
|
||||
private _opts: IHdfsOptions;
|
||||
private _url: any;
|
||||
|
||||
private _authCookie: Cookie;
|
||||
constructor(opts: IHdfsOptions, requestParams: IRequestParams) {
|
||||
if (!(this instanceof WebHDFS)) {
|
||||
return new WebHDFS(opts, requestParams);
|
||||
@@ -27,7 +29,7 @@ export class WebHDFS {
|
||||
.filter(p => !opts.hasOwnProperty(p) || !opts[p]);
|
||||
if (missingProps && missingProps.length > 0) {
|
||||
throw new Error(localize('webhdfs.missingProperties',
|
||||
'Unable to create WebHDFS client due to missing options: ${0}', missingProps.join(', ')));
|
||||
"Unable to create WebHDFS client due to missing options: ${0}", missingProps.join(', ')));
|
||||
}
|
||||
|
||||
this._requestParams = requestParams || {};
|
||||
@@ -44,7 +46,7 @@ export class WebHDFS {
|
||||
|
||||
private checkArgDefined(argName: string, argValue: any): void {
|
||||
if (!argValue) {
|
||||
throw new Error(localize('webhdfs.undefinedArgument', '\'${0}\' is undefined.', argName));
|
||||
throw new Error(localize('webhdfs.undefinedArgument', "'${0}' is undefined.", argName));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,11 +77,11 @@ export class WebHDFS {
|
||||
private toStatusMessage(statusCode: number): string {
|
||||
let statusMessage: string = undefined;
|
||||
switch (statusCode) {
|
||||
case 400: statusMessage = localize('webhdfs.httpError400', 'Bad Request'); break;
|
||||
case 401: statusMessage = localize('webhdfs.httpError401', 'Unauthorized'); break;
|
||||
case 403: statusMessage = localize('webhdfs.httpError403', 'Forbidden'); break;
|
||||
case 404: statusMessage = localize('webhdfs.httpError404', 'Not Found'); break;
|
||||
case 500: statusMessage = localize('webhdfs.httpError500', 'Internal Server Error'); break;
|
||||
case 400: statusMessage = localize('webhdfs.httpError400', "Bad Request");break;
|
||||
case 401: statusMessage = localize('webhdfs.httpError401', "Unauthorized"); break;
|
||||
case 403: statusMessage = localize('webhdfs.httpError403', "Forbidden"); break;
|
||||
case 404: statusMessage = localize('webhdfs.httpError404', "Not Found"); break;
|
||||
case 500: statusMessage = localize('webhdfs.httpError500', "Internal Server Error"); break;
|
||||
// TODO: define more messages here
|
||||
default: break;
|
||||
}
|
||||
@@ -136,7 +138,7 @@ export class WebHDFS {
|
||||
return statusMessage && remoteExceptionMessage ?
|
||||
`${statusMessage} (${remoteExceptionMessage})` :
|
||||
statusMessage || remoteExceptionMessage || messageFromError ||
|
||||
localize('webhdfs.unknownError', 'Unknown Error');
|
||||
localize('webhdfs.unknownError', "Unknown Error");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -196,36 +198,94 @@ export class WebHDFS {
|
||||
* @param opts Options for request
|
||||
* @returns void
|
||||
*/
|
||||
private sendRequest(method: string, url: string, opts: object,
|
||||
callback: (error: HdfsError, response: request.Response) => void): void {
|
||||
|
||||
private sendRequest(method: string, urlValue: string, opts: object, callback: (error: HdfsError, response: request.Response) => void): void {
|
||||
if (!callback) {
|
||||
return;
|
||||
}
|
||||
let requestParams = Object.assign(
|
||||
{ method: method, url: url, json: true },
|
||||
{ method: method, url: urlValue, json: true },
|
||||
this._requestParams,
|
||||
opts || {}
|
||||
);
|
||||
this.ensureCookie(requestParams);
|
||||
// Add a wrapper to handle unauthorized requests by adding kerberos auth steps
|
||||
let handler = (error, response) => {
|
||||
if (error && error.statusCode === 401 && this._requestParams.isKerberos) {
|
||||
this.requestWithKerberosSync(requestParams, callback);
|
||||
} else {
|
||||
callback(error, response);
|
||||
}
|
||||
};
|
||||
this.doSendRequest(requestParams, handler);
|
||||
}
|
||||
|
||||
private ensureCookie(requestParams: { headers?: {} }) {
|
||||
if (this._authCookie && this._authCookie.expiryTime() > Date.now()) {
|
||||
requestParams.headers = requestParams.headers || {};
|
||||
requestParams.headers['cookie'] = `${this._authCookie.key}=${this._authCookie.value}`;
|
||||
}
|
||||
}
|
||||
|
||||
private doSendRequest(requestParams: any, callback: (error: HdfsError, response: any) => void): void {
|
||||
request(requestParams, (error, response, body) => {
|
||||
if (!callback) { return; }
|
||||
|
||||
if (error || this.isError(response)) {
|
||||
let hdfsError = this.parseError(response, body, error);
|
||||
callback(hdfsError, response);
|
||||
} else if (this.isSuccess(response)) {
|
||||
}
|
||||
else if (this.isSuccess(response)) {
|
||||
callback(undefined, response);
|
||||
} else {
|
||||
let hdfsError = new HdfsError(
|
||||
localize('webhdfs.unexpectedRedirect', 'Unexpected Redirect'),
|
||||
response && response.statusCode,
|
||||
response && response.statusMessage,
|
||||
this.getRemoteExceptionMessage(body || response.body),
|
||||
error
|
||||
);
|
||||
}
|
||||
else {
|
||||
let hdfsError = new HdfsError(localize('webhdfs.unexpectedRedirect', "Unexpected Redirect"), response && response.statusCode, response && response.statusMessage, this.getRemoteExceptionMessage(body || response.body), error);
|
||||
callback(hdfsError, response);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticates using kerberos as part of a request, and saves cookie if successful.
|
||||
* Ideally would use request's built-in cookie functionality but this isn't working with non-public domains.
|
||||
* Instead, save the cookie in this module and reuse if not expired
|
||||
*/
|
||||
private requestWithKerberosSync(requestParams: any, callback: (error: HdfsError, response: request.Response) => void) {
|
||||
this.setKerberosAuthOnParams(requestParams).then(() => {
|
||||
this.doSendRequest(requestParams, (error, response) => {
|
||||
if (error) {
|
||||
// Pass on the callback
|
||||
callback(error, response);
|
||||
}
|
||||
else {
|
||||
// Capture cookie for future requests
|
||||
this.setAuthCookie(response);
|
||||
callback(error, response);
|
||||
}
|
||||
});
|
||||
}).catch((err) => {
|
||||
callback(err, undefined);
|
||||
});
|
||||
}
|
||||
|
||||
private async setKerberosAuthOnParams(requestParams: any): Promise<void> {
|
||||
let kerberosToken = await auth.authenticateKerberos(this._opts.host);
|
||||
requestParams.headers = { Authorization: `Negotiate ${kerberosToken}` };
|
||||
return requestParams;
|
||||
}
|
||||
|
||||
private setAuthCookie(response: request.Response) {
|
||||
try {
|
||||
if (response && response.headers && response.headers['set-cookie']) {
|
||||
let cookies: Cookie[];
|
||||
if (response.headers['set-cookie'] instanceof Array) {
|
||||
cookies = response.headers['set-cookie'].map(c => Cookie.parse(c));
|
||||
}
|
||||
else {
|
||||
cookies = [Cookie.parse(response.headers['set-cookie'])];
|
||||
}
|
||||
this._authCookie = cookies[0];
|
||||
}
|
||||
} catch { }
|
||||
}
|
||||
|
||||
/**
|
||||
* Change file permissions
|
||||
* @returns void
|
||||
@@ -457,17 +517,6 @@ export class WebHDFS {
|
||||
public createWriteStream(path: string, append?: boolean, opts?: object): fs.WriteStream {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let emitError = (instance, err) => {
|
||||
const isErrorEmitted = instance.errorEmitted;
|
||||
|
||||
if (!isErrorEmitted) {
|
||||
instance.emit('error', err);
|
||||
instance.emit('finish');
|
||||
}
|
||||
|
||||
instance.errorEmitted = true;
|
||||
};
|
||||
|
||||
let endpoint = this.getOperationEndpoint(
|
||||
append ? 'append' : 'create',
|
||||
path,
|
||||
@@ -480,8 +529,6 @@ export class WebHDFS {
|
||||
)
|
||||
);
|
||||
|
||||
let stream = undefined;
|
||||
let canResume: boolean = true;
|
||||
let params: any = Object.assign(
|
||||
{
|
||||
method: append ? 'POST' : 'PUT',
|
||||
@@ -493,43 +540,68 @@ export class WebHDFS {
|
||||
params.headers = params.headers || {};
|
||||
params.headers['content-type'] = 'application/octet-stream';
|
||||
|
||||
if (!this._requestParams.isKerberos) {
|
||||
return this.doCreateWriteStream(params);
|
||||
}
|
||||
// Else, must add kerberos token and handle redirects
|
||||
params.followRedirect = false;
|
||||
let replyStream = through();
|
||||
let handleErr = (err) => {
|
||||
replyStream.emit('error', err);
|
||||
replyStream.end();
|
||||
};
|
||||
let initRedirectedStream = () => {
|
||||
let redirectedStream = this.doCreateWriteStream(params);
|
||||
replyStream.pipe(redirectedStream);
|
||||
};
|
||||
this.requestWithRedirectAndAuth(params, initRedirectedStream, handleErr);
|
||||
return <fs.WriteStream><any>replyStream;
|
||||
}
|
||||
|
||||
private doCreateWriteStream(params: any): fs.WriteStream {
|
||||
let emitError = (instance, err) => {
|
||||
const isErrorEmitted = instance.errorEmitted;
|
||||
|
||||
if (!isErrorEmitted) {
|
||||
instance.emit('error', err);
|
||||
instance.emit('finish');
|
||||
}
|
||||
|
||||
instance.errorEmitted = true;
|
||||
};
|
||||
let canResume: boolean = true;
|
||||
let stream = undefined;
|
||||
let req = request(params, (error, response, body) => {
|
||||
// Handle redirect only if there was not an error (e.g. res is defined)
|
||||
if (response && this.isRedirect(response)) {
|
||||
let upload = request(
|
||||
Object.assign(params, { url: response.headers.location }),
|
||||
(err, res, bo) => {
|
||||
if (err || this.isError(res)) {
|
||||
emitError(req, this.parseError(res, bo, err));
|
||||
req.end();
|
||||
} else if (res.headers.hasOwnProperty('location')) {
|
||||
req.emit('finish', res.headers.location);
|
||||
} else {
|
||||
req.emit('finish');
|
||||
}
|
||||
let upload = request(Object.assign(params, { url: response.headers.location }), (err, res, bo) => {
|
||||
if (err || this.isError(res)) {
|
||||
emitError(req, this.parseError(res, bo, err));
|
||||
req.end();
|
||||
}
|
||||
);
|
||||
else if (res.headers.hasOwnProperty('location')) {
|
||||
req.emit('finish', res.headers.location);
|
||||
}
|
||||
else {
|
||||
req.emit('finish');
|
||||
}
|
||||
});
|
||||
canResume = true; // Enable resume
|
||||
stream.pipe(upload);
|
||||
stream.resume();
|
||||
}
|
||||
|
||||
if (error && !response) {
|
||||
// request failed, and req is not accessible in this case.
|
||||
throw this.parseError(undefined, undefined, error);
|
||||
}
|
||||
|
||||
if (error || this.isError(response)) {
|
||||
emitError(req, this.parseError(response, body, error));
|
||||
}
|
||||
});
|
||||
|
||||
req.on('pipe', (src) => {
|
||||
// Pause read stream
|
||||
stream = src;
|
||||
stream.pause();
|
||||
|
||||
// This is not an elegant solution but here we go
|
||||
// Basically we don't allow pipe() method to resume reading input
|
||||
// and set internal _readableState.flowing to false
|
||||
@@ -539,13 +611,11 @@ export class WebHDFS {
|
||||
stream._readableState.flowing = false;
|
||||
}
|
||||
});
|
||||
|
||||
// Unpipe initial request
|
||||
src.unpipe(req);
|
||||
req.end();
|
||||
});
|
||||
|
||||
return <fs.WriteStream><any>req;
|
||||
return <fs.WriteStream><any> req;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -575,7 +645,7 @@ export class WebHDFS {
|
||||
this.checkArgDefined('path', path);
|
||||
|
||||
let endpoint = this.getOperationEndpoint('open', path, opts);
|
||||
let params = Object.assign(
|
||||
let params: request.OptionsWithUrl = Object.assign(
|
||||
{
|
||||
method: 'GET',
|
||||
url: endpoint,
|
||||
@@ -583,13 +653,43 @@ export class WebHDFS {
|
||||
},
|
||||
this._requestParams
|
||||
);
|
||||
if (!this._requestParams.isKerberos) {
|
||||
return <fs.ReadStream><any>this.doCreateReadStream(params);
|
||||
}
|
||||
// Else, must add kerberos token and handle redirects
|
||||
params.followRedirect = false;
|
||||
let replyStream = through();
|
||||
let handleErr = (err) => {
|
||||
replyStream.emit('error', err);
|
||||
replyStream.end();
|
||||
};
|
||||
let initRedirectedStream = () => {
|
||||
let redirectedStream = this.doCreateReadStream(params);
|
||||
redirectedStream.pipe(replyStream);
|
||||
};
|
||||
this.requestWithRedirectAndAuth(params, initRedirectedStream, handleErr);
|
||||
|
||||
return <fs.ReadStream><any>replyStream;
|
||||
}
|
||||
|
||||
private requestWithRedirectAndAuth(params: request.OptionsWithUrl, onRedirected: () => void, handleErr: (err: any) => void) {
|
||||
this.requestWithKerberosSync(params, (err, response: request.Response) => {
|
||||
if (err && err.statusCode === 307 && response.headers['location']) {
|
||||
// It's a redirect
|
||||
params.url = response.headers['location'];
|
||||
this.setKerberosAuthOnParams(params)
|
||||
.then(onRedirected)
|
||||
.catch(handleErr);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private doCreateReadStream(params: request.OptionsWithUrl): fs.ReadStream {
|
||||
|
||||
let req: request.Request = request(params);
|
||||
|
||||
req.on('complete', (response) => {
|
||||
req.emit('finish');
|
||||
});
|
||||
|
||||
req.on('response', (response) => {
|
||||
// Handle remote exceptions
|
||||
// Remove all data handlers and parse error data
|
||||
@@ -599,19 +699,17 @@ export class WebHDFS {
|
||||
req.emit('error', this.parseError(response, data.toString()));
|
||||
req.end();
|
||||
});
|
||||
} else if (this.isRedirect(response)) {
|
||||
}
|
||||
else if (this.isRedirect(response)) {
|
||||
let download = request(params);
|
||||
|
||||
download.on('complete', (response) => {
|
||||
req.emit('finish');
|
||||
});
|
||||
|
||||
// Proxy data to original data handler
|
||||
// Not the nicest way but hey
|
||||
download.on('data', (dataChunk) => {
|
||||
req.emit('data', dataChunk);
|
||||
});
|
||||
|
||||
// Handle subrequest
|
||||
download.on('response', (response) => {
|
||||
if (this.isError(response)) {
|
||||
@@ -623,11 +721,9 @@ export class WebHDFS {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// No need to interrupt the request
|
||||
// data will be automatically sent to the data handler
|
||||
});
|
||||
|
||||
return <fs.ReadStream><any>req;
|
||||
}
|
||||
|
||||
|
||||
@@ -98,12 +98,6 @@ async function createSqlClusterConnInfo(sqlConnInfo: azdata.IConnectionProfile |
|
||||
if (authType && authType.toLowerCase() !== constants.integratedAuth) {
|
||||
clusterConnInfo.options[constants.userPropName] = 'root'; //should be the same user as sql master
|
||||
clusterConnInfo.options[constants.passwordPropName] = credentials.password;
|
||||
} else {
|
||||
// Hack: for now, we need to use gateway-0 for integrated auth
|
||||
let sqlDnsName: string = sqlConnInfo.options['server'].split(',')[0];
|
||||
let parts = sqlDnsName.split('.');
|
||||
parts[0] = 'gateway-0';
|
||||
clusterConnInfo.options[constants.hostPropName] = parts.join('.');
|
||||
}
|
||||
clusterConnInfo = connToConnectionParam(clusterConnInfo);
|
||||
|
||||
|
||||
@@ -2,11 +2,43 @@
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
"@types/caseless@*":
|
||||
version "0.12.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/caseless/-/caseless-0.12.2.tgz#f65d3d6389e01eeb458bd54dc8f52b95a9463bc8"
|
||||
integrity sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w==
|
||||
|
||||
"@types/kerberos@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/kerberos/-/kerberos-1.1.0.tgz#fb1e5bc4f7272d152f67714deb100d5de7cb3e48"
|
||||
integrity sha512-ixpV6PSSMnIVpMNCLQ0gWguC2+pBxc0LeUCv9Ugj54opVSVFXfPNYP6sMa7UHvicYGDXAyHQSAzQC8VYEIgdFQ==
|
||||
|
||||
"@types/node@*":
|
||||
version "12.7.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.2.tgz#c4e63af5e8823ce9cc3f0b34f7b998c2171f0c44"
|
||||
integrity sha512-dyYO+f6ihZEtNPDcWNR1fkoTDf3zAK3lAABDze3mz6POyIercH0lEUawUFXlG8xaQZmm1yEBON/4TsYv/laDYg==
|
||||
|
||||
"@types/request@^2.48.2":
|
||||
version "2.48.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/request/-/request-2.48.2.tgz#936374cbe1179d7ed529fc02543deb4597450fed"
|
||||
integrity sha512-gP+PSFXAXMrd5PcD7SqHeUjdGshAI8vKQ3+AvpQr3ht9iQea+59LOKvKITcQI+Lg+1EIkDP6AFSBUJPWG8GDyA==
|
||||
dependencies:
|
||||
"@types/caseless" "*"
|
||||
"@types/node" "*"
|
||||
"@types/tough-cookie" "*"
|
||||
form-data "^2.5.0"
|
||||
|
||||
"@types/through2@^2.0.34":
|
||||
version "2.0.34"
|
||||
resolved "https://registry.yarnpkg.com/@types/through2/-/through2-2.0.34.tgz#9c2a259a238dace2a05a2f8e94b786961bc27ac4"
|
||||
integrity sha512-nhRG8+RuG/L+0fAZBQYaRflXKjTrHOKH8MFTChnf+dNVMxA3wHYYrfj0tztK0W51ABXjGfRCDc0vRkecCOrsow==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/tough-cookie@*":
|
||||
version "2.3.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-2.3.5.tgz#9da44ed75571999b65c37b60c9b2b88db54c585d"
|
||||
integrity sha512-SCcK7mvGi3+ZNz833RRjFIxrn4gI1PPR3NtuIS+6vMkvmsGjosqTJwRt5bAEFLRz+wtJMWv8+uOnZf2hi2QXTg==
|
||||
|
||||
agent-base@4, agent-base@^4.1.0:
|
||||
version "4.2.1"
|
||||
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.2.1.tgz#d89e5999f797875674c07d87f260fc41e83e8ca9"
|
||||
@@ -458,6 +490,15 @@ forever-agent@~0.6.1:
|
||||
resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
|
||||
integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=
|
||||
|
||||
form-data@^2.5.0:
|
||||
version "2.5.0"
|
||||
resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.0.tgz#094ec359dc4b55e7d62e0db4acd76e89fe874d37"
|
||||
integrity sha512-WXieX3G/8side6VIqx44ablyULoGruSde5PNTxoUyo5CeyAMX6nVWUd0rgist/EuX655cjhUhTo1Fo3tRYqbcA==
|
||||
dependencies:
|
||||
asynckit "^0.4.0"
|
||||
combined-stream "^1.0.6"
|
||||
mime-types "^2.1.12"
|
||||
|
||||
form-data@~2.3.2:
|
||||
version "2.3.3"
|
||||
resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6"
|
||||
@@ -603,6 +644,11 @@ inherits@2, inherits@~2.0.3:
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
|
||||
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
|
||||
|
||||
inherits@^2.0.3:
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
|
||||
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
|
||||
ini@~1.3.0:
|
||||
version "1.3.5"
|
||||
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927"
|
||||
@@ -938,6 +984,15 @@ rc@^1.2.7:
|
||||
minimist "^1.2.0"
|
||||
strip-json-comments "~2.0.1"
|
||||
|
||||
"readable-stream@2 || 3":
|
||||
version "3.4.0"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc"
|
||||
integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==
|
||||
dependencies:
|
||||
inherits "^2.0.3"
|
||||
string_decoder "^1.1.1"
|
||||
util-deprecate "^1.0.1"
|
||||
|
||||
readable-stream@^2.0.6, readable-stream@^2.1.4, readable-stream@^2.3.0, readable-stream@^2.3.5:
|
||||
version "2.3.6"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf"
|
||||
@@ -1006,6 +1061,11 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.1.0,
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
|
||||
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
|
||||
|
||||
safe-buffer@~5.2.0:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519"
|
||||
integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==
|
||||
|
||||
safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
|
||||
@@ -1112,6 +1172,13 @@ string-width@^1.0.1:
|
||||
is-fullwidth-code-point "^2.0.0"
|
||||
strip-ansi "^4.0.0"
|
||||
|
||||
string_decoder@^1.1.1:
|
||||
version "1.3.0"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
|
||||
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
|
||||
dependencies:
|
||||
safe-buffer "~5.2.0"
|
||||
|
||||
string_decoder@~1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
|
||||
@@ -1168,6 +1235,13 @@ tar-stream@^1.1.2, tar-stream@^1.5.2:
|
||||
to-buffer "^1.1.1"
|
||||
xtend "^4.0.0"
|
||||
|
||||
through2@^3.0.1:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/through2/-/through2-3.0.1.tgz#39276e713c3302edf9e388dd9c812dd3b825bd5a"
|
||||
integrity sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==
|
||||
dependencies:
|
||||
readable-stream "2 || 3"
|
||||
|
||||
through@^2.3.8:
|
||||
version "2.3.8"
|
||||
resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"
|
||||
@@ -1185,7 +1259,7 @@ to-buffer@^1.1.1:
|
||||
resolved "https://registry.yarnpkg.com/to-buffer/-/to-buffer-1.1.1.tgz#493bd48f62d7c43fcded313a03dcadb2e1213a80"
|
||||
integrity sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==
|
||||
|
||||
tough-cookie@>=2.3.3:
|
||||
tough-cookie@>=2.3.3, tough-cookie@^3.0.1:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-3.0.1.tgz#9df4f57e739c26930a018184887f4adb7dca73b2"
|
||||
integrity sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==
|
||||
@@ -1234,7 +1308,7 @@ uri-js@^4.2.2:
|
||||
dependencies:
|
||||
punycode "^2.1.0"
|
||||
|
||||
util-deprecate@~1.0.1:
|
||||
util-deprecate@^1.0.1, util-deprecate@~1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
|
||||
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
|
||||
|
||||
@@ -94,10 +94,20 @@ export enum Platform {
|
||||
Others
|
||||
}
|
||||
|
||||
interface RawEndpoint {
|
||||
serviceName: string;
|
||||
description?: string;
|
||||
endpoint?: string;
|
||||
protocol?: string;
|
||||
ipAddress?: string;
|
||||
port?: number;
|
||||
}
|
||||
|
||||
export interface IEndpoint {
|
||||
serviceName: string;
|
||||
ipAddress: string;
|
||||
port: number;
|
||||
description: string;
|
||||
endpoint: string;
|
||||
protocol: string;
|
||||
}
|
||||
|
||||
export function getOSPlatform(): Platform {
|
||||
@@ -142,3 +152,39 @@ export function isEditorTitleFree(title: string): boolean {
|
||||
let hasNotebookDoc = azdata.nb.notebookDocuments.findIndex(doc => doc.isUntitled && doc.fileName === title) > -1;
|
||||
return !hasTextDoc && !hasNotebookDoc;
|
||||
}
|
||||
|
||||
export function getClusterEndpoints(serverInfo: azdata.ServerInfo): IEndpoint[] | undefined {
|
||||
let endpoints: RawEndpoint[] = serverInfo.options['clusterEndpoints'];
|
||||
if (!endpoints || endpoints.length === 0) { return []; }
|
||||
|
||||
return endpoints.map(e => {
|
||||
// If endpoint is missing, we're on CTP bits. All endpoints from the CTP serverInfo should be treated as HTTPS
|
||||
let endpoint = e.endpoint ? e.endpoint : `https://${e.ipAddress}:${e.port}`;
|
||||
let updatedEndpoint: IEndpoint = {
|
||||
serviceName: e.serviceName,
|
||||
description: e.description,
|
||||
endpoint: endpoint,
|
||||
protocol: e.protocol
|
||||
};
|
||||
return updatedEndpoint;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
export type HostAndIp = { host: string, port: string };
|
||||
|
||||
export function getHostAndPortFromEndpoint(endpoint: string): HostAndIp {
|
||||
let authority = vscode.Uri.parse(endpoint).authority;
|
||||
let hostAndPortRegex = /^(.*)([,:](\d+))/g;
|
||||
let match = hostAndPortRegex.exec(authority);
|
||||
if (match) {
|
||||
return {
|
||||
host: match[1],
|
||||
port: match[3]
|
||||
};
|
||||
}
|
||||
return {
|
||||
host: authority,
|
||||
port: undefined
|
||||
};
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import { nb, ServerInfo, connection, IConnectionProfile } from 'azdata';
|
||||
import { Session, Kernel } from '@jupyterlab/services';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as nls from 'vscode-nls';
|
||||
import { Uri } from 'vscode';
|
||||
import * as vscode from 'vscode';
|
||||
import * as path from 'path';
|
||||
import * as utils from '../common/utils';
|
||||
const localize = nls.loadMessageBundle();
|
||||
@@ -247,16 +247,9 @@ export class JupyterSession implements nb.ISession {
|
||||
if (!clusterEndpoint) {
|
||||
return Promise.reject(new Error(localize('connectionNotValid', "Spark kernels require a connection to a SQL Server big data cluster master instance.")));
|
||||
}
|
||||
if (this.isIntegratedAuth(connection)) {
|
||||
// Hack: for now, we need to use gateway-0 for integrated auth
|
||||
let sqlDnsName: string = connection.options['server'].split(',')[0];
|
||||
let parts = sqlDnsName.split('.');
|
||||
parts[0] = 'gateway-0';
|
||||
connection.options[KNOX_ENDPOINT_SERVER] = parts.join('.');
|
||||
} else {
|
||||
connection.options[KNOX_ENDPOINT_SERVER] = clusterEndpoint.ipAddress;
|
||||
}
|
||||
connection.options[KNOX_ENDPOINT_PORT] = clusterEndpoint.port;
|
||||
let hostAndPort = utils.getHostAndPortFromEndpoint(clusterEndpoint.endpoint);
|
||||
connection.options[KNOX_ENDPOINT_SERVER] = hostAndPort.host;
|
||||
connection.options[KNOX_ENDPOINT_PORT] = hostAndPort.port;
|
||||
connection.options[USER] = DEFAULT_CLUSTER_USER_NAME;
|
||||
}
|
||||
else {
|
||||
@@ -265,7 +258,7 @@ export class JupyterSession implements nb.ISession {
|
||||
this.setHostAndPort(':', connection);
|
||||
this.setHostAndPort(',', connection);
|
||||
|
||||
let server = Uri.parse(utils.getLivyUrl(connection.options[KNOX_ENDPOINT_SERVER], connection.options[KNOX_ENDPOINT_PORT])).toString();
|
||||
let server = vscode.Uri.parse(utils.getLivyUrl(connection.options[KNOX_ENDPOINT_SERVER], connection.options[KNOX_ENDPOINT_PORT])).toString();
|
||||
let doNotCallChangeEndpointParams = this.isIntegratedAuth(connection) ?
|
||||
`%_do_not_call_change_endpoint --server=${server} --auth=Kerberos`
|
||||
: `%_do_not_call_change_endpoint --username=${connection.options[USER]} --password=${connection.options['password']} --server=${server} --auth=Basic_Access`;
|
||||
@@ -316,7 +309,7 @@ export class JupyterSession implements nb.ISession {
|
||||
if (!serverInfo || !serverInfo.options) {
|
||||
return undefined;
|
||||
}
|
||||
let endpoints: utils.IEndpoint[] = serverInfo.options['clusterEndpoints'];
|
||||
let endpoints: utils.IEndpoint[] = utils.getClusterEndpoints(serverInfo);
|
||||
if (!endpoints || endpoints.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user