Add notebook integration tests (#4652)

* Add notebook integration tests
This commit is contained in:
Yurong He
2019-03-22 10:39:44 -07:00
committed by GitHub
parent 756f77063a
commit 71db7e10b6
5 changed files with 153 additions and 3 deletions

View File

@@ -0,0 +1,63 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import 'mocha';
import assert = require('assert');
import * as azdata from 'azdata';
import * as vscode from 'vscode';
import { context } from './testContext';
import { sqlNotebookContent, writeNotebookToFile, pySpark3KernelMetadata, sqlKernelMetadata, pythonKernelMetadata, pySparkNotebookContent } from './notebook.util';
import { getBdcServer } from './testConfig';
import { connectToServer } from './utils';
if (context.RunTest) {
suite('Notebook integration test suite', async () => {
test('Sql NB test', async function () {
this.timeout(6000);
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata);
const expectedOutput0 = '(1 row affected)';
let cellOutputs = notebook.document.cells[0].contents.outputs;
assert(cellOutputs.length === 3, `Expected length: 3, Acutal: '${cellOutputs.length}'`);
let actualOutput0 = (<azdata.nb.IDisplayData>cellOutputs[0]).data['text/html'];
assert(actualOutput0 === expectedOutput0, `Expected row count: '${expectedOutput0}', Acutal: '${actualOutput0}'`);
let actualOutput2 = (<azdata.nb.IExecuteResult>cellOutputs[2]).data['application/vnd.dataresource+json'].data[0];
assert(actualOutput2[0] === '1', `Expected result: 1, Acutal: '${actualOutput2[0]}'`);
});
test('Python3 notebook test', async function () {
this.timeout(6000);
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata);
let cellOutputs = notebook.document.cells[0].contents.outputs;
let result = (<azdata.nb.IExecuteResult>cellOutputs[0]).data['text/plain'];
assert(result === '2', `Expected: 2, Acutal: '${result}'`);
});
test('PySpark3 notebook test', async function () {
this.timeout(12000);
let notebook = await openNotebook(pySparkNotebookContent, pySpark3KernelMetadata);
let cellOutputs = notebook.document.cells[0].contents.outputs;
let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
assert(sparkResult === '2', `Expected: 2, Acutal: '${sparkResult}'`);
});
});
}
async function openNotebook(content: azdata.nb.INotebookContents, kernelMetadata: any): Promise<azdata.nb.NotebookEditor> {
let notebookConfig = vscode.workspace.getConfiguration('notebook');
notebookConfig.update('pythonPath', process.env.PYTHON_TEST_PATH, 1);
let server = await getBdcServer();
await connectToServer(server, 6000);
let pythonNotebook = Object.assign({}, content, { metadata: kernelMetadata });
let uri = writeNotebookToFile(pythonNotebook);
let notebook = await azdata.nb.showNotebookDocument(uri);
assert(notebook.document.cells.length === 1, 'Notebook should have 1 cell');
let ran = await notebook.runCell(notebook.document.cells[0]);
assert(ran, 'Notebook runCell should succeed');
return notebook;
}

View File

@@ -0,0 +1,79 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import 'mocha';
import * as azdata from 'azdata';
import * as vscode from 'vscode';
import * as tempWrite from 'temp-write';
export class CellTypes {
public static readonly Code = 'code';
public static readonly Markdown = 'markdown';
public static readonly Raw = 'raw';
}
export const pySparkNotebookContent: azdata.nb.INotebookContents = {
cells: [{
cell_type: CellTypes.Code,
source: '1+1',
metadata: { language: 'python' },
execution_count: 1
}],
metadata: {
'kernelspec': {
'name': 'pyspark3kernel',
'display_name': 'PySpark3'
}
},
nbformat: 4,
nbformat_minor: 2
};
export const sqlNotebookContent: azdata.nb.INotebookContents = {
cells: [{
cell_type: CellTypes.Code,
source: 'select 1',
metadata: { language: 'sql' },
execution_count: 1
}],
metadata: {
'kernelspec': {
'name': 'SQL',
'display_name': 'SQL'
}
},
nbformat: 4,
nbformat_minor: 2
};
export const pySpark3KernelMetadata = {
'kernelspec': {
'name': 'pyspark3kernel',
'display_name': 'PySpark3'
}
};
export const sqlKernelMetadata = {
'kernelspec': {
'name': 'SQL',
'display_name': 'SQL'
}
};
export const pythonKernelMetadata = {
'kernelspec': {
'name': 'python3',
'display_name': 'Python 3'
}
};
export function writeNotebookToFile(pythonNotebook: azdata.nb.INotebookContents): vscode.Uri {
let notebookContentString = JSON.stringify(pythonNotebook);
let localFile = tempWrite.sync(notebookContentString, 'notebook.ipynb');
let uri = vscode.Uri.file(localFile);
return uri;
}

View File

@@ -8,6 +8,7 @@
import 'mocha';
import * as vscode from 'vscode';
import { context } from './testContext';
import assert = require('assert');
if (!context.RunTest) {
suite('integration test setup', () => {
@@ -16,6 +17,10 @@ if (!context.RunTest) {
await vscode.commands.executeCommand('test.setupIntegrationTest');
//Reload the window, this is required for some changes made by the 'test.setupIntegrationTest' to work
await vscode.commands.executeCommand('workbench.action.reloadWindow');
assert(process.env.BDC_BACKEND_HOSTNAME !== undefined &&
process.env.BDC_BACKEND_USERNAME !== undefined &&
process.env.BDC_BACKEND_PWD !== undefined, 'BDC_BACKEND_HOSTNAME, BDC_BACKEND_USERNAME, BDC_BACKEND_PWD must be set using ./scripts/setbackenvariables.sh or .\\scripts\\setbackendvaraibles.bat');
});
});
}

View File

@@ -1,5 +1,7 @@
@echo off pass in username password hostname of big data cluster
@echo off pass in username password hostname of big data cluster, please use "" for BDC_BACKEND_HOSTNAME to include port.
@echo For example: setbackendvariables.cmd sa pwd "23.101.143.196,31433" C:\Users\yuronhe\azuredatastudio-python
set BDC_BACKEND_USERNAME=%~1
set BDC_BACKEND_PWD=%~2
set BDC_BACKEND_HOSTNAME=%~3
@echo No problem reading %BDC_BACKEND_HOSTNAME%, password and %BDC_BACKEND_HOSTNAME%
set PYTHON_TEST_PATH=%~4
@echo No problem reading %BDC_BACKEND_USERNAME%, password, %BDC_BACKEND_HOSTNAME% and %PYTHON_TEST_PATH%

View File

@@ -2,5 +2,6 @@
export BDC_BACKEND_USERNAME=$1
export BDC_BACKEND_PWD=$2
export BDC_BACKEND_HOSTNAME=$3
echo No problem reading $BDC_BACKEND_USERNAME, password and $BDC_BACKEND_HOSTNAME
export PYTHON_TEST_PATH=$4
echo No problem reading $BDC_BACKEND_USERNAME, password, $BDC_BACKEND_HOSTNAME and $PYTHON_TEST_PATH
set