Add tests for Jupyter Session (#12053)

* add tests for Jupyter Session

* remove comment and fix description

* throw error if spark kernel provider is not mssql

* use localize strings and add pythonEnvVarPath test
This commit is contained in:
Lucy Zhang
2020-09-02 11:44:15 -07:00
committed by GitHub
parent f174dc1c77
commit bfe694763b
3 changed files with 71 additions and 14 deletions

View File

@@ -12,6 +12,8 @@ export const msgNo = localize('msgNo', "No");
// Jupyter Constants /////////////////////////////////////////////////////// // Jupyter Constants ///////////////////////////////////////////////////////
export const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', "This sample code loads the file into a data frame and shows the first 10 results."); export const msgSampleCodeDataFrame = localize('msgSampleCodeDataFrame', "This sample code loads the file into a data frame and shows the first 10 results.");
export const noBDCConnectionError = localize('noBDCConnectionError', "Spark kernels require a connection to a SQL Server Big Data Cluster master instance.");
export const providerNotValidError = localize('providerNotValidError', "Non-MSSQL providers are not supported for spark kernels.");
// Book view-let constants // Book view-let constants
export const allFiles = localize('allFiles', "All Files"); export const allFiles = localize('allFiles', "All Files");

View File

@@ -17,6 +17,7 @@ import { JupyterKernel } from './jupyterKernel';
import { Deferred } from '../common/promise'; import { Deferred } from '../common/promise';
import { JupyterServerInstallation } from './jupyterServerInstallation'; import { JupyterServerInstallation } from './jupyterServerInstallation';
import * as bdc from 'bdc'; import * as bdc from 'bdc';
import { noBDCConnectionError, providerNotValidError } from '../common/localizedConstants';
const configBase = { const configBase = {
'kernel_python_credentials': { 'kernel_python_credentials': {
@@ -287,7 +288,7 @@ export class JupyterSession implements nb.ISession {
const endpoints = await this.getClusterEndpoints(connectionProfile.id); const endpoints = await this.getClusterEndpoints(connectionProfile.id);
const gatewayEndpoint: utils.IEndpoint = endpoints?.find(ep => ep.serviceName.toLowerCase() === KNOX_ENDPOINT_GATEWAY); const gatewayEndpoint: utils.IEndpoint = endpoints?.find(ep => ep.serviceName.toLowerCase() === KNOX_ENDPOINT_GATEWAY);
if (!gatewayEndpoint) { if (!gatewayEndpoint) {
return Promise.reject(new Error(localize('connectionNotValid', "Spark kernels require a connection to a SQL Server Big Data Cluster master instance."))); throw new Error(noBDCConnectionError);
} }
let gatewayHostAndPort = utils.getHostAndPortFromEndpoint(gatewayEndpoint.endpoint); let gatewayHostAndPort = utils.getHostAndPortFromEndpoint(gatewayEndpoint.endpoint);
connectionProfile.options[KNOX_ENDPOINT_SERVER] = gatewayHostAndPort.host; connectionProfile.options[KNOX_ENDPOINT_SERVER] = gatewayHostAndPort.host;
@@ -308,7 +309,7 @@ export class JupyterSession implements nb.ISession {
} }
} }
else { else {
connectionProfile.options[KNOX_ENDPOINT_PORT] = this.getKnoxPortOrDefault(connectionProfile); throw new Error(providerNotValidError);
} }
this.setHostAndPort(':', connectionProfile); this.setHostAndPort(':', connectionProfile);
this.setHostAndPort(',', connectionProfile); this.setHostAndPort(',', connectionProfile);
@@ -355,14 +356,6 @@ export class JupyterSession implements nb.ISession {
config.ignore_ssl_errors = utils.getIgnoreSslVerificationConfigSetting(); config.ignore_ssl_errors = utils.getIgnoreSslVerificationConfigSetting();
} }
private getKnoxPortOrDefault(connectionProfile: IConnectionProfile): string {
let port = connectionProfile.options[KNOX_ENDPOINT_PORT];
if (!port) {
port = '30443';
}
return port;
}
private async getClusterEndpoints(profileId: string): Promise<utils.IEndpoint[]> { private async getClusterEndpoints(profileId: string): Promise<utils.IEndpoint[]> {
let serverInfo: ServerInfo = await connection.getServerInfo(profileId); let serverInfo: ServerInfo = await connection.getServerInfo(profileId);
if (!serverInfo || !serverInfo.options) { if (!serverInfo || !serverInfo.options) {

View File

@@ -18,6 +18,7 @@ import 'mocha';
import { JupyterSessionManager, JupyterSession } from '../../jupyter/jupyterSessionManager'; import { JupyterSessionManager, JupyterSession } from '../../jupyter/jupyterSessionManager';
import { Deferred } from '../../common/promise'; import { Deferred } from '../../common/promise';
import { SessionStub, KernelStub, FutureStub } from '../common'; import { SessionStub, KernelStub, FutureStub } from '../common';
import { noBDCConnectionError, providerNotValidError } from '../../common/localizedConstants';
export class TestClusterController implements bdc.IClusterController { export class TestClusterController implements bdc.IClusterController {
getClusterConfig(): Promise<any> { getClusterConfig(): Promise<any> {
@@ -232,7 +233,7 @@ describe('Jupyter Session', function (): void {
should(JSON.parse(result) === expectedResult); should(JSON.parse(result) === expectedResult);
}); });
it('should configure connection correctly', async function (): Promise<void> { it('should configure connection correctly for MSSQL and SqlLogin auth type', async function (): Promise<void> {
let connectionProfile: IConnectionProfile = { let connectionProfile: IConnectionProfile = {
authenticationType: '', authenticationType: '',
connectionName: '', connectionName: '',
@@ -256,9 +257,6 @@ describe('Jupyter Session', function (): void {
let credentials = { [ConnectionOptionSpecialType.password]: 'password' }; let credentials = { [ConnectionOptionSpecialType.password]: 'password' };
sinon.stub(connection, 'getCredentials').returns(Promise.resolve(credentials)); sinon.stub(connection, 'getCredentials').returns(Promise.resolve(credentials));
// Should throw error if there is no connection to big data cluster
await should(session.configureConnection(connectionProfile)).be.rejectedWith('Spark kernels require a connection to a SQL Server Big Data Cluster master instance.');
// Set up connection info to big data cluster // Set up connection info to big data cluster
const mockServerInfo: ServerInfo = { const mockServerInfo: ServerInfo = {
serverMajorVersion: 0, serverMajorVersion: 0,
@@ -306,4 +304,68 @@ describe('Jupyter Session', function (): void {
should(connectionProfile.options['knoxport']).equal('port'); should(connectionProfile.options['knoxport']).equal('port');
should(connectionProfile.options['user']).equal('knoxUsername'); should(connectionProfile.options['user']).equal('knoxUsername');
}); });
it('configure connection should throw error if there is no connection to big data cluster', async function (): Promise<void> {
let connectionProfile: IConnectionProfile = {
authenticationType: '',
connectionName: '',
databaseName: '',
id: 'id',
providerName: 'MSSQL',
options: {
authenticationType: 'SqlLogin',
},
password: '',
savePassword: false,
saveProfile: false,
serverName: '',
userName: ''
};
let futureMock = TypeMoq.Mock.ofType(FutureStub);
let kernelMock = TypeMoq.Mock.ofType(KernelStub);
kernelMock.setup(k => k.name).returns(() => 'spark');
kernelMock.setup(m => m.requestExecute(TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => futureMock.object);
mockJupyterSession.setup(s => s.kernel).returns(() => kernelMock.object);
let credentials = { [ConnectionOptionSpecialType.password]: 'password' };
sinon.stub(connection, 'getCredentials').returns(Promise.resolve(credentials));
await should(session.configureConnection(connectionProfile)).be.rejectedWith(noBDCConnectionError);
});
it('configure connection should throw error if provider is not MSSQL for spark kernel', async function (): Promise<void> {
let connectionProfile: IConnectionProfile = {
authenticationType: '',
connectionName: '',
databaseName: '',
id: 'id',
providerName: 'provider',
options: {
authenticationType: 'SqlLogin',
},
password: '',
savePassword: false,
saveProfile: false,
serverName: '',
userName: ''
};
let futureMock = TypeMoq.Mock.ofType(FutureStub);
let kernelMock = TypeMoq.Mock.ofType(KernelStub);
kernelMock.setup(k => k.name).returns(() => 'spark');
kernelMock.setup(m => m.requestExecute(TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => futureMock.object);
mockJupyterSession.setup(s => s.kernel).returns(() => kernelMock.object);
await should(session.configureConnection(connectionProfile)).be.rejectedWith(providerNotValidError);
});
it('should set environment variables correctly', function (): void {
let futureMock = TypeMoq.Mock.ofType(FutureStub);
let kernelMock = TypeMoq.Mock.ofType(KernelStub);
kernelMock.setup(m => m.requestExecute(TypeMoq.It.isAny(), TypeMoq.It.isAny())).returns(() => futureMock.object);
let spy = sinon.spy(kernelMock.object, 'requestExecute');
mockJupyterSession.setup(s => s.kernel).returns(() => kernelMock.object);
mockJupyterSession.setup(s => s.path).returns(() => 'path');
let newSession = new JupyterSession(mockJupyterSession.object, undefined, false, 'pythonEnvVarPath');
should(newSession).not.be.undefined();
sinon.assert.calledOnce(spy);
let args = spy.getCall(0).args;
should(args[0].code.includes('pythonEnvVarPath'));
});
}); });