Added setup and teardown for test; add variable to control run python… (#4782)

* Added setup and teardown for test; add variable to control run python/pyspark tests; remove dup code in clearAlloutput tests

* Resolve PR comments
This commit is contained in:
Yurong He
2019-04-02 15:16:54 -07:00
committed by GitHub
parent f8706abebe
commit 22c62fb524
4 changed files with 87 additions and 74 deletions

View File

@@ -6,22 +6,39 @@
'use strict';
import 'mocha';
import assert = require('assert');
import * as assert from 'assert';
import * as azdata from 'azdata';
import * as vscode from 'vscode';
import { context } from './testContext';
import { sqlNotebookContent, writeNotebookToFile, sqlKernelMetadata } from './notebook.util';
import { sqlNotebookContent, writeNotebookToFile, sqlKernelMetadata, getFileName, pySparkNotebookContent, pySpark3KernelMetadata, pythonKernelMetadata } from './notebook.util';
import { getBdcServer } from './testConfig';
import { connectToServer } from './utils';
import * as fs from 'fs';
if (context.RunTest) {
suite('Notebook integration test suite', function() {
suite('Notebook integration test suite', function () {
setup(function () {
console.log(`Start "${this.currentTest.title}"`);
});
teardown(function () {
let testName = this.currentTest.title;
try {
let fileName = getFileName(testName);
if (fs.existsSync(fileName)) {
fs.unlinkSync(fileName);
console.log(`"${fileName}" is deleted.`);
}
}
catch (err) {
console.log(err);
}
finally {
console.log(`"${testName}" is done`);
}
});
test('Sql NB test', async function () {
let testName = this.test.title;
console.log(`Start "${testName}"`);
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata);
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata, this.test.title);
const expectedOutput0 = '(1 row affected)';
let cellOutputs = notebook.document.cells[0].contents.outputs;
console.log('Got cell outputs');
@@ -31,75 +48,46 @@ if (context.RunTest) {
assert(actualOutput0 === expectedOutput0, `Expected row count: ${expectedOutput0}, Actual: ${actualOutput0}`);
let actualOutput2 = (<azdata.nb.IExecuteResult>cellOutputs[2]).data['application/vnd.dataresource+json'].data[0];
assert(actualOutput2[0] === '1', `Expected result: 1, Actual: '${actualOutput2[0]}'`);
if (fs.existsSync(notebook.document.fileName)) {
fs.unlinkSync(notebook.document.fileName);
}
console.log(`"${testName}" done`);
});
// test('Python3 notebook test', async function () {
// console.log('Start Python3 NB test');
// let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata);
// let cellOutputs = notebook.document.cells[0].contents.outputs;
// console.log('Got cell outputs');
// let result = (<azdata.nb.IExecuteResult>cellOutputs[0]).data['text/plain'];
// assert(result === '2', `Expected: 2, Actual: ${result}`);
// console.log('Python3 NB done');
// });
// test('Clear all outputs - Python3 notebook ', async function () {
// let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata);
// //Check if at least one cell with output
// let cellWithOutputs = notebook.document.cells.find(cell => cell.contents && cell.contents.outputs && cell.contents.outputs.length > 0);
// console.log("Before clearing cell outputs");
// if (cellWithOutputs) {
// let clearedOutputs = await notebook.clearAllOutputs();
// let cells = notebook.document.cells;
// cells.forEach(cell => {
// assert(cell.contents && cell.contents.outputs && cell.contents.outputs.length === 0, `Expected Output: 0, Actual: '${cell.contents.outputs.length}'`);
// });
// assert(clearedOutputs, 'Outputs of all the code cells from Python notebook should be cleared');
// console.log("After clearing cell outputs");
// }
// assert(cellWithOutputs === undefined, 'Could not find notebook cells with outputs');
// });
test('Clear all outputs - SQL notebook ', async function () {
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata);
let cellWithOutputs = notebook.document.cells.find(cell => cell.contents && cell.contents.outputs && cell.contents.outputs.length > 0);
console.log('Before clearing cell outputs');
if (cellWithOutputs) {
let clearedOutputs = await notebook.clearAllOutputs();
let cells = notebook.document.cells;
cells.forEach(cell => {
assert(cell.contents && cell.contents.outputs && cell.contents.outputs.length === 0, `Expected cell outputs to be empty. Actual: '${cell.contents.outputs}'`);
});
assert(clearedOutputs, 'Outputs of all the code cells from SQL notebook should be cleared');
console.log('After clearing cell outputs');
}
else {
throw new Error('Could not find notebook cells with outputs');
}
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata, this.test.title);
await verifyClearAllOutputs(notebook);
});
if (process.env.RUN_PYTHON3_TEST === '1') {
test('Python3 notebook test', async function () {
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata, this.test.title);
let cellOutputs = notebook.document.cells[0].contents.outputs;
console.log('Got cell outputs');
let result = (<azdata.nb.IExecuteResult>cellOutputs[0]).data['text/plain'];
assert(result === '2', `Expected python result: 2, Actual: ${result}`);
});
// test('PySpark3 notebook test', async function () {
// this.timeout(12000);
// let notebook = await openNotebook(pySparkNotebookContent, pySpark3KernelMetadata);
// let cellOutputs = notebook.document.cells[0].contents.outputs;
// let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
// assert(sparkResult === '2', `Expected: 2, Actual: ${sparkResult}`);
// });
test('Clear all outputs - Python3 notebook ', async function () {
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata, this.test.title);
await verifyClearAllOutputs(notebook);
});
}
if (process.env.RUN_PYSPARK_TEST === '1') {
test('PySpark3 notebook test', async function () {
let notebook = await openNotebook(pySparkNotebookContent, pySpark3KernelMetadata, this.test.title);
let cellOutputs = notebook.document.cells[0].contents.outputs;
let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
assert(sparkResult === '2', `Expected spark result: 2, Actual: ${sparkResult}`);
});
}
});
}
async function openNotebook(content: azdata.nb.INotebookContents, kernelMetadata: any): Promise<azdata.nb.NotebookEditor> {
async function openNotebook(content: azdata.nb.INotebookContents, kernelMetadata: any, testName: string): Promise<azdata.nb.NotebookEditor> {
let notebookConfig = vscode.workspace.getConfiguration('notebook');
notebookConfig.update('pythonPath', process.env.PYTHON_TEST_PATH, 1);
let server = await getBdcServer();
await connectToServer(server, 6000);
let pythonNotebook = Object.assign({}, content, { metadata: kernelMetadata });
let uri = writeNotebookToFile(pythonNotebook);
let uri = writeNotebookToFile(pythonNotebook, testName);
console.log(uri);
let notebook = await azdata.nb.showNotebookDocument(uri);
console.log('Notebook is opened');
@@ -111,4 +99,16 @@ async function openNotebook(content: azdata.nb.INotebookContents, kernelMetadata
assert(notebook !== undefined && notebook !== null, 'Expected notebook object is defined');
return notebook;
}
async function verifyClearAllOutputs(notebook: azdata.nb.NotebookEditor) {
let cellWithOutputs = notebook.document.cells.find(cell => cell.contents && cell.contents.outputs && cell.contents.outputs.length > 0);
assert(cellWithOutputs !== undefined, 'Could not find notebook cells with outputs');
console.log('Before clearing cell outputs');
let clearedOutputs = await notebook.clearAllOutputs();
let cells = notebook.document.cells;
cells.forEach(cell => {
assert(cell.contents && cell.contents.outputs && cell.contents.outputs.length === 0, `Expected Output: 0, Actual: '${cell.contents.outputs.length}'`);
});
assert(clearedOutputs, 'Outputs of all the code cells from Python notebook should be cleared');
console.log('After clearing cell outputs');
}

View File

@@ -73,14 +73,18 @@ export const pythonKernelMetadata = {
}
};
export function writeNotebookToFile(pythonNotebook: azdata.nb.INotebookContents): vscode.Uri {
export function writeNotebookToFile(pythonNotebook: azdata.nb.INotebookContents, testName: string): vscode.Uri {
let fileName = getFileName(testName);
let notebookContentString = JSON.stringify(pythonNotebook);
let localFile = path.join(os.tmpdir(), 'notebook' + Math.floor(Math.random() * 101) + '.ipynb');
while (fs.existsSync(localFile)) {
localFile = path.join(os.tmpdir(), 'notebook' + Math.floor(Math.random() * 101) + '.ipynb');
}
fs.writeFileSync(localFile, notebookContentString);
console.log(`Local file is created: '${localFile}'`);
let uri = vscode.Uri.file(localFile);
fs.writeFileSync(fileName, notebookContentString);
console.log(`Local file is created: '${fileName}'`);
let uri = vscode.Uri.file(fileName);
return uri;
}
export function getFileName(testName: string): string {
if (testName) {
return path.join(os.tmpdir(), testName + '.ipynb');
}
return undefined;
}

View File

@@ -1,5 +1,5 @@
@echo off pass in username password hostname of big data cluster, please use "" for BDC_BACKEND_HOSTNAME to include port.
@echo For example: setbackendvariables.cmd sa pwd "23.101.143.196,31433" pythonPath standaloneSql standaloneSqlUser standaloneSqlPwd azureSql azureSqlUser azureSqlPwd
@echo For example: setbackendvariables.cmd sa pwd "23.101.143.196,31433" pythonPath standaloneSql standaloneSqlUser standaloneSqlPwd azureSql azureSqlUser azureSqlPwd 1 1
set BDC_BACKEND_USERNAME=%~1
set BDC_BACKEND_PWD=%~2
set BDC_BACKEND_HOSTNAME=%~3
@@ -13,7 +13,12 @@ set AZURE_SQL=%~8
set AZURE_SQL_USERNAME=%~9
shift
set AZURE_SQL_PWD=%~9
shift
set RUN_PYTHON3_TEST=%~9
shift
set RUN_PYSPARK_TEST=%~9
@echo No problem reading BDC cluster: %BDC_BACKEND_USERNAME%, bdc_password, %BDC_BACKEND_HOSTNAME% and %PYTHON_TEST_PATH%
@echo No problem reading Standalone SQL instance: %STANDALONE_SQL%, %STANDALONE_SQL_USERNAME% and standalone_sql_password
@echo No problem reading AZURE SQL instance: %AZURE_SQL%, %AZURE_SQL_USERNAME% and %AZURE_SQL_PWD%
@echo No problem reading AZURE SQL instance: %AZURE_SQL%, %AZURE_SQL_USERNAME% and %AZURE_SQL_PWD%
@echo No problem reading run python test: %RUN_PYTHON3_TEST% and %RUN_PYSPARK_TEST%

View File

@@ -12,8 +12,12 @@ export AZURE_SQL=%~8
export AZURE_SQL_USERNAME=%~9
export AZURE_SQL_PWD=%~10
echo No problem reading BDC cluster$BDC_BACKEND_USERNAME, password, $BDC_BACKEND_HOSTNAME and $PYTHON_TEST_PATH,
export RUN_PYTHON3_TEST=%11
export RUN_PYSPARK_TEST=%12
echo No problem reading BDC cluster$BDC_BACKEND_USERNAME, password, $BDC_BACKEND_HOSTNAME and $PYTHON_TEST_PATH
echo No problem reading Standalone SQL instance: $STANDALONE_SQL, $STANDALONE_SQL_USERNAME and $STANDALONE_SQL_PWD
echo No problem reading AZURE SQL instance: $AZURE_SQL, $AZURE_SQL_USERNAME and $AZURE_SQL_PWD
@echo No problem reading run python and pyspark test: $RUN_PYTHON3_TEST and $RUN_PYSPARK_TEST
set