mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 02:51:36 -05:00
Added setup and teardown for test; add variable to control run python… (#4782)
* Added setup and teardown for test; add variable to control run python/pyspark tests; remove dup code in clearAlloutput tests * Resolve PR comments
This commit is contained in:
@@ -6,22 +6,39 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
import 'mocha';
|
import 'mocha';
|
||||||
import assert = require('assert');
|
import * as assert from 'assert';
|
||||||
|
|
||||||
import * as azdata from 'azdata';
|
import * as azdata from 'azdata';
|
||||||
import * as vscode from 'vscode';
|
import * as vscode from 'vscode';
|
||||||
import { context } from './testContext';
|
import { context } from './testContext';
|
||||||
import { sqlNotebookContent, writeNotebookToFile, sqlKernelMetadata } from './notebook.util';
|
import { sqlNotebookContent, writeNotebookToFile, sqlKernelMetadata, getFileName, pySparkNotebookContent, pySpark3KernelMetadata, pythonKernelMetadata } from './notebook.util';
|
||||||
import { getBdcServer } from './testConfig';
|
import { getBdcServer } from './testConfig';
|
||||||
import { connectToServer } from './utils';
|
import { connectToServer } from './utils';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
|
|
||||||
if (context.RunTest) {
|
if (context.RunTest) {
|
||||||
suite('Notebook integration test suite', function() {
|
suite('Notebook integration test suite', function () {
|
||||||
|
setup(function () {
|
||||||
|
console.log(`Start "${this.currentTest.title}"`);
|
||||||
|
});
|
||||||
|
teardown(function () {
|
||||||
|
let testName = this.currentTest.title;
|
||||||
|
try {
|
||||||
|
let fileName = getFileName(testName);
|
||||||
|
if (fs.existsSync(fileName)) {
|
||||||
|
fs.unlinkSync(fileName);
|
||||||
|
console.log(`"${fileName}" is deleted.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
console.log(err);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
console.log(`"${testName}" is done`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test('Sql NB test', async function () {
|
test('Sql NB test', async function () {
|
||||||
let testName = this.test.title;
|
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata, this.test.title);
|
||||||
console.log(`Start "${testName}"`);
|
|
||||||
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata);
|
|
||||||
const expectedOutput0 = '(1 row affected)';
|
const expectedOutput0 = '(1 row affected)';
|
||||||
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||||
console.log('Got cell outputs');
|
console.log('Got cell outputs');
|
||||||
@@ -31,75 +48,46 @@ if (context.RunTest) {
|
|||||||
assert(actualOutput0 === expectedOutput0, `Expected row count: ${expectedOutput0}, Actual: ${actualOutput0}`);
|
assert(actualOutput0 === expectedOutput0, `Expected row count: ${expectedOutput0}, Actual: ${actualOutput0}`);
|
||||||
let actualOutput2 = (<azdata.nb.IExecuteResult>cellOutputs[2]).data['application/vnd.dataresource+json'].data[0];
|
let actualOutput2 = (<azdata.nb.IExecuteResult>cellOutputs[2]).data['application/vnd.dataresource+json'].data[0];
|
||||||
assert(actualOutput2[0] === '1', `Expected result: 1, Actual: '${actualOutput2[0]}'`);
|
assert(actualOutput2[0] === '1', `Expected result: 1, Actual: '${actualOutput2[0]}'`);
|
||||||
|
|
||||||
if (fs.existsSync(notebook.document.fileName)) {
|
|
||||||
fs.unlinkSync(notebook.document.fileName);
|
|
||||||
}
|
|
||||||
console.log(`"${testName}" done`);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// test('Python3 notebook test', async function () {
|
|
||||||
// console.log('Start Python3 NB test');
|
|
||||||
// let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata);
|
|
||||||
// let cellOutputs = notebook.document.cells[0].contents.outputs;
|
|
||||||
// console.log('Got cell outputs');
|
|
||||||
// let result = (<azdata.nb.IExecuteResult>cellOutputs[0]).data['text/plain'];
|
|
||||||
// assert(result === '2', `Expected: 2, Actual: ${result}`);
|
|
||||||
// console.log('Python3 NB done');
|
|
||||||
// });
|
|
||||||
|
|
||||||
// test('Clear all outputs - Python3 notebook ', async function () {
|
|
||||||
// let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata);
|
|
||||||
// //Check if at least one cell with output
|
|
||||||
// let cellWithOutputs = notebook.document.cells.find(cell => cell.contents && cell.contents.outputs && cell.contents.outputs.length > 0);
|
|
||||||
// console.log("Before clearing cell outputs");
|
|
||||||
// if (cellWithOutputs) {
|
|
||||||
// let clearedOutputs = await notebook.clearAllOutputs();
|
|
||||||
// let cells = notebook.document.cells;
|
|
||||||
// cells.forEach(cell => {
|
|
||||||
// assert(cell.contents && cell.contents.outputs && cell.contents.outputs.length === 0, `Expected Output: 0, Actual: '${cell.contents.outputs.length}'`);
|
|
||||||
// });
|
|
||||||
// assert(clearedOutputs, 'Outputs of all the code cells from Python notebook should be cleared');
|
|
||||||
// console.log("After clearing cell outputs");
|
|
||||||
// }
|
|
||||||
// assert(cellWithOutputs === undefined, 'Could not find notebook cells with outputs');
|
|
||||||
// });
|
|
||||||
|
|
||||||
test('Clear all outputs - SQL notebook ', async function () {
|
test('Clear all outputs - SQL notebook ', async function () {
|
||||||
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata);
|
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata, this.test.title);
|
||||||
let cellWithOutputs = notebook.document.cells.find(cell => cell.contents && cell.contents.outputs && cell.contents.outputs.length > 0);
|
await verifyClearAllOutputs(notebook);
|
||||||
console.log('Before clearing cell outputs');
|
|
||||||
if (cellWithOutputs) {
|
|
||||||
let clearedOutputs = await notebook.clearAllOutputs();
|
|
||||||
let cells = notebook.document.cells;
|
|
||||||
cells.forEach(cell => {
|
|
||||||
assert(cell.contents && cell.contents.outputs && cell.contents.outputs.length === 0, `Expected cell outputs to be empty. Actual: '${cell.contents.outputs}'`);
|
|
||||||
});
|
|
||||||
assert(clearedOutputs, 'Outputs of all the code cells from SQL notebook should be cleared');
|
|
||||||
console.log('After clearing cell outputs');
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw new Error('Could not find notebook cells with outputs');
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (process.env.RUN_PYTHON3_TEST === '1') {
|
||||||
|
test('Python3 notebook test', async function () {
|
||||||
|
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata, this.test.title);
|
||||||
|
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||||
|
console.log('Got cell outputs');
|
||||||
|
let result = (<azdata.nb.IExecuteResult>cellOutputs[0]).data['text/plain'];
|
||||||
|
assert(result === '2', `Expected python result: 2, Actual: ${result}`);
|
||||||
|
});
|
||||||
|
|
||||||
// test('PySpark3 notebook test', async function () {
|
test('Clear all outputs - Python3 notebook ', async function () {
|
||||||
// this.timeout(12000);
|
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata, this.test.title);
|
||||||
// let notebook = await openNotebook(pySparkNotebookContent, pySpark3KernelMetadata);
|
await verifyClearAllOutputs(notebook);
|
||||||
// let cellOutputs = notebook.document.cells[0].contents.outputs;
|
});
|
||||||
// let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
|
}
|
||||||
// assert(sparkResult === '2', `Expected: 2, Actual: ${sparkResult}`);
|
|
||||||
// });
|
if (process.env.RUN_PYSPARK_TEST === '1') {
|
||||||
|
test('PySpark3 notebook test', async function () {
|
||||||
|
let notebook = await openNotebook(pySparkNotebookContent, pySpark3KernelMetadata, this.test.title);
|
||||||
|
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||||
|
let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
|
||||||
|
assert(sparkResult === '2', `Expected spark result: 2, Actual: ${sparkResult}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
async function openNotebook(content: azdata.nb.INotebookContents, kernelMetadata: any): Promise<azdata.nb.NotebookEditor> {
|
|
||||||
|
async function openNotebook(content: azdata.nb.INotebookContents, kernelMetadata: any, testName: string): Promise<azdata.nb.NotebookEditor> {
|
||||||
let notebookConfig = vscode.workspace.getConfiguration('notebook');
|
let notebookConfig = vscode.workspace.getConfiguration('notebook');
|
||||||
notebookConfig.update('pythonPath', process.env.PYTHON_TEST_PATH, 1);
|
notebookConfig.update('pythonPath', process.env.PYTHON_TEST_PATH, 1);
|
||||||
let server = await getBdcServer();
|
let server = await getBdcServer();
|
||||||
await connectToServer(server, 6000);
|
await connectToServer(server, 6000);
|
||||||
let pythonNotebook = Object.assign({}, content, { metadata: kernelMetadata });
|
let pythonNotebook = Object.assign({}, content, { metadata: kernelMetadata });
|
||||||
let uri = writeNotebookToFile(pythonNotebook);
|
let uri = writeNotebookToFile(pythonNotebook, testName);
|
||||||
console.log(uri);
|
console.log(uri);
|
||||||
let notebook = await azdata.nb.showNotebookDocument(uri);
|
let notebook = await azdata.nb.showNotebookDocument(uri);
|
||||||
console.log('Notebook is opened');
|
console.log('Notebook is opened');
|
||||||
@@ -111,4 +99,16 @@ async function openNotebook(content: azdata.nb.INotebookContents, kernelMetadata
|
|||||||
assert(notebook !== undefined && notebook !== null, 'Expected notebook object is defined');
|
assert(notebook !== undefined && notebook !== null, 'Expected notebook object is defined');
|
||||||
return notebook;
|
return notebook;
|
||||||
}
|
}
|
||||||
|
async function verifyClearAllOutputs(notebook: azdata.nb.NotebookEditor) {
|
||||||
|
let cellWithOutputs = notebook.document.cells.find(cell => cell.contents && cell.contents.outputs && cell.contents.outputs.length > 0);
|
||||||
|
assert(cellWithOutputs !== undefined, 'Could not find notebook cells with outputs');
|
||||||
|
console.log('Before clearing cell outputs');
|
||||||
|
let clearedOutputs = await notebook.clearAllOutputs();
|
||||||
|
let cells = notebook.document.cells;
|
||||||
|
cells.forEach(cell => {
|
||||||
|
assert(cell.contents && cell.contents.outputs && cell.contents.outputs.length === 0, `Expected Output: 0, Actual: '${cell.contents.outputs.length}'`);
|
||||||
|
});
|
||||||
|
assert(clearedOutputs, 'Outputs of all the code cells from Python notebook should be cleared');
|
||||||
|
console.log('After clearing cell outputs');
|
||||||
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -73,14 +73,18 @@ export const pythonKernelMetadata = {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export function writeNotebookToFile(pythonNotebook: azdata.nb.INotebookContents): vscode.Uri {
|
export function writeNotebookToFile(pythonNotebook: azdata.nb.INotebookContents, testName: string): vscode.Uri {
|
||||||
|
let fileName = getFileName(testName);
|
||||||
let notebookContentString = JSON.stringify(pythonNotebook);
|
let notebookContentString = JSON.stringify(pythonNotebook);
|
||||||
let localFile = path.join(os.tmpdir(), 'notebook' + Math.floor(Math.random() * 101) + '.ipynb');
|
fs.writeFileSync(fileName, notebookContentString);
|
||||||
while (fs.existsSync(localFile)) {
|
console.log(`Local file is created: '${fileName}'`);
|
||||||
localFile = path.join(os.tmpdir(), 'notebook' + Math.floor(Math.random() * 101) + '.ipynb');
|
let uri = vscode.Uri.file(fileName);
|
||||||
}
|
|
||||||
fs.writeFileSync(localFile, notebookContentString);
|
|
||||||
console.log(`Local file is created: '${localFile}'`);
|
|
||||||
let uri = vscode.Uri.file(localFile);
|
|
||||||
return uri;
|
return uri;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getFileName(testName: string): string {
|
||||||
|
if (testName) {
|
||||||
|
return path.join(os.tmpdir(), testName + '.ipynb');
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
@echo off pass in username password hostname of big data cluster, please use "" for BDC_BACKEND_HOSTNAME to include port.
|
@echo off pass in username password hostname of big data cluster, please use "" for BDC_BACKEND_HOSTNAME to include port.
|
||||||
@echo For example: setbackendvariables.cmd sa pwd "23.101.143.196,31433" pythonPath standaloneSql standaloneSqlUser standaloneSqlPwd azureSql azureSqlUser azureSqlPwd
|
@echo For example: setbackendvariables.cmd sa pwd "23.101.143.196,31433" pythonPath standaloneSql standaloneSqlUser standaloneSqlPwd azureSql azureSqlUser azureSqlPwd 1 1
|
||||||
set BDC_BACKEND_USERNAME=%~1
|
set BDC_BACKEND_USERNAME=%~1
|
||||||
set BDC_BACKEND_PWD=%~2
|
set BDC_BACKEND_PWD=%~2
|
||||||
set BDC_BACKEND_HOSTNAME=%~3
|
set BDC_BACKEND_HOSTNAME=%~3
|
||||||
@@ -13,7 +13,12 @@ set AZURE_SQL=%~8
|
|||||||
set AZURE_SQL_USERNAME=%~9
|
set AZURE_SQL_USERNAME=%~9
|
||||||
shift
|
shift
|
||||||
set AZURE_SQL_PWD=%~9
|
set AZURE_SQL_PWD=%~9
|
||||||
|
shift
|
||||||
|
set RUN_PYTHON3_TEST=%~9
|
||||||
|
shift
|
||||||
|
set RUN_PYSPARK_TEST=%~9
|
||||||
|
|
||||||
@echo No problem reading BDC cluster: %BDC_BACKEND_USERNAME%, bdc_password, %BDC_BACKEND_HOSTNAME% and %PYTHON_TEST_PATH%
|
@echo No problem reading BDC cluster: %BDC_BACKEND_USERNAME%, bdc_password, %BDC_BACKEND_HOSTNAME% and %PYTHON_TEST_PATH%
|
||||||
@echo No problem reading Standalone SQL instance: %STANDALONE_SQL%, %STANDALONE_SQL_USERNAME% and standalone_sql_password
|
@echo No problem reading Standalone SQL instance: %STANDALONE_SQL%, %STANDALONE_SQL_USERNAME% and standalone_sql_password
|
||||||
@echo No problem reading AZURE SQL instance: %AZURE_SQL%, %AZURE_SQL_USERNAME% and %AZURE_SQL_PWD%
|
@echo No problem reading AZURE SQL instance: %AZURE_SQL%, %AZURE_SQL_USERNAME% and %AZURE_SQL_PWD%
|
||||||
|
@echo No problem reading run python test: %RUN_PYTHON3_TEST% and %RUN_PYSPARK_TEST%
|
||||||
@@ -12,8 +12,12 @@ export AZURE_SQL=%~8
|
|||||||
export AZURE_SQL_USERNAME=%~9
|
export AZURE_SQL_USERNAME=%~9
|
||||||
export AZURE_SQL_PWD=%~10
|
export AZURE_SQL_PWD=%~10
|
||||||
|
|
||||||
echo No problem reading BDC cluster$BDC_BACKEND_USERNAME, password, $BDC_BACKEND_HOSTNAME and $PYTHON_TEST_PATH,
|
export RUN_PYTHON3_TEST=%11
|
||||||
|
export RUN_PYSPARK_TEST=%12
|
||||||
|
|
||||||
|
echo No problem reading BDC cluster$BDC_BACKEND_USERNAME, password, $BDC_BACKEND_HOSTNAME and $PYTHON_TEST_PATH
|
||||||
echo No problem reading Standalone SQL instance: $STANDALONE_SQL, $STANDALONE_SQL_USERNAME and $STANDALONE_SQL_PWD
|
echo No problem reading Standalone SQL instance: $STANDALONE_SQL, $STANDALONE_SQL_USERNAME and $STANDALONE_SQL_PWD
|
||||||
echo No problem reading AZURE SQL instance: $AZURE_SQL, $AZURE_SQL_USERNAME and $AZURE_SQL_PWD
|
echo No problem reading AZURE SQL instance: $AZURE_SQL, $AZURE_SQL_USERNAME and $AZURE_SQL_PWD
|
||||||
|
@echo No problem reading run python and pyspark test: $RUN_PYTHON3_TEST and $RUN_PYSPARK_TEST
|
||||||
|
|
||||||
set
|
set
|
||||||
Reference in New Issue
Block a user