mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-08 09:38:26 -05:00
Add integration-test code coverage (#10632)
* wip * Update to latest vscodetestcover and add tests back in * Update to latest version of vscodetestcover * updates * another yarn.lock * update version * Revert import path change
This commit is contained in:
162
extensions/integration-tests/src/test/cms.test.ts
Normal file
162
extensions/integration-tests/src/test/cms.test.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import 'mocha';
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import * as mssql from '../../../mssql';
|
||||
import * as utils from './utils';
|
||||
import * as uuid from './uuid';
|
||||
import assert = require('assert');
|
||||
import { getStandaloneServer, TestServerProfile } from './testConfig';
|
||||
|
||||
let cmsService: mssql.ICmsService;
|
||||
let server: TestServerProfile;
|
||||
let connectionId: string;
|
||||
let ownerUri: string;
|
||||
const SERVER_CONNECTION_TIMEOUT: number = 3000;
|
||||
const TEST_CMS_NAME = `adsTestCms_${uuid.v4().asHex()}`;
|
||||
const TEST_CMS_GROUP = `adsTestCmsGroup_${uuid.v4().asHex()}`;
|
||||
const TEST_CMS_SERVER = `adsTestCmsServer_${uuid.v4().asHex()}`;
|
||||
const TEST_CMS_REG_SERVER = `adsTestCmsRegisteredServer_${uuid.v4().asHex()}`;
|
||||
|
||||
suite('CMS integration test suite', () => {
|
||||
|
||||
setup(async function () {
|
||||
// Set up CMS provider
|
||||
if (!cmsService) {
|
||||
cmsService = ((await vscode.extensions.getExtension(mssql.extension.name).activate() as mssql.IExtension)).cmsService;
|
||||
assert(cmsService !== undefined);
|
||||
}
|
||||
|
||||
// Set up connection
|
||||
if (!server) {
|
||||
server = await getStandaloneServer();
|
||||
connectionId = await utils.connectToServer(server, SERVER_CONNECTION_TIMEOUT);
|
||||
ownerUri = await azdata.connection.getConnectionString(connectionId, true);
|
||||
console.log('Start CMS tests');
|
||||
}
|
||||
if (!ownerUri) {
|
||||
ownerUri = await azdata.connection.getConnectionString(connectionId, true);
|
||||
}
|
||||
});
|
||||
|
||||
test('Create CMS Server', async function () {
|
||||
// Should fail
|
||||
await utils.assertThrowsAsync(
|
||||
async () => await cmsService.createCmsServer(undefined, 'test_description', undefined, ownerUri),
|
||||
'Cannot add a CMS server without a name or connection');
|
||||
let connection = {
|
||||
serverName: server.serverName,
|
||||
userName: server.userName,
|
||||
password: server.password,
|
||||
authenticationType: server.authenticationTypeName,
|
||||
database: server.database,
|
||||
provider: server.provider,
|
||||
version: server.version,
|
||||
engineType: server.engineType,
|
||||
options: {}
|
||||
};
|
||||
|
||||
// Should create a CMS Server without an error
|
||||
await cmsService.createCmsServer(TEST_CMS_NAME, 'test_description', connection, ownerUri);
|
||||
});
|
||||
|
||||
test('Add and delete registered group to/from CMS server', async function () {
|
||||
await utils.assertThrowsAsync(
|
||||
async () => await cmsService.addServerGroup(ownerUri, '', undefined, 'test_description'),
|
||||
'Cannot add a server group without a name');
|
||||
|
||||
// Should create a server group
|
||||
let result = await cmsService.addServerGroup(ownerUri, '', TEST_CMS_GROUP, 'test_description');
|
||||
assert(result === true, `Server group ${TEST_CMS_GROUP} was not added to CMS server successfully`);
|
||||
|
||||
let existingRegisteredServerGroupCount = (await cmsService.getRegisteredServers(ownerUri, '')).registeredServerGroups.length;
|
||||
|
||||
// Shouldn't be able to create a new server group with same name
|
||||
await utils.assertThrowsAsync(
|
||||
async () => await cmsService.addServerGroup(ownerUri, '', TEST_CMS_GROUP, 'test_description'),
|
||||
'Cannot add a server group with existing name');
|
||||
|
||||
let cmsResources = await cmsService.getRegisteredServers(ownerUri, '');
|
||||
assert(cmsResources.registeredServerGroups.length === existingRegisteredServerGroupCount,
|
||||
`Unexpected number of Registered Server Groups after attempting to add group that already exists. Groups : [${cmsResources.registeredServerGroups.map(g => g.name).join(', ')}]`);
|
||||
|
||||
// Should remove the server group we added above
|
||||
let deleteResult = await cmsService.removeServerGroup(ownerUri, '', TEST_CMS_GROUP);
|
||||
assert(deleteResult === true, `Server group ${TEST_CMS_GROUP} was not removed successfully`);
|
||||
|
||||
cmsResources = await cmsService.getRegisteredServers(ownerUri, '');
|
||||
assert(cmsResources.registeredServerGroups.find(g => g.name === TEST_CMS_GROUP) === undefined,
|
||||
`The server group ${TEST_CMS_GROUP} was not removed successfully. Groups : [${cmsResources.registeredServerGroups.map(g => g.name).join(', ')}]`);
|
||||
});
|
||||
|
||||
test('Add and delete registered server to/from CMS server', async function () {
|
||||
|
||||
await utils.assertThrowsAsync(
|
||||
async () => cmsService.addRegisteredServer(ownerUri, '', undefined, 'test_description', undefined),
|
||||
'Cannot add a registered without a name or connection');
|
||||
|
||||
let server = await getStandaloneServer('2019');
|
||||
let connection = {
|
||||
serverName: server.serverName,
|
||||
userName: server.userName,
|
||||
password: server.password,
|
||||
authenticationType: server.authenticationTypeName,
|
||||
database: server.database,
|
||||
provider: server.provider,
|
||||
version: server.version,
|
||||
engineType: server.engineType,
|
||||
options: {}
|
||||
};
|
||||
|
||||
// Should create a registered server
|
||||
let result = await cmsService.addRegisteredServer(ownerUri, '', TEST_CMS_SERVER, 'test_description', connection);
|
||||
assert(result === true, `Registered server ${TEST_CMS_SERVER} was not added to CMS server successfully`);
|
||||
|
||||
// Shouldn't be able to create a new registered server with same name
|
||||
await utils.assertThrowsAsync(
|
||||
async () => await cmsService.addRegisteredServer(ownerUri, '', TEST_CMS_SERVER, 'test_description', connection),
|
||||
'Cannot add a registered server with existing name');
|
||||
|
||||
// Should remove the registered server we added above
|
||||
let deleteResult = await cmsService.removeRegisteredServer(ownerUri, '', TEST_CMS_SERVER);
|
||||
assert(deleteResult === true, `Registered server ${TEST_CMS_SERVER} was not removed correctly`);
|
||||
});
|
||||
|
||||
test('Add and delete registered server to/from server group', async function () {
|
||||
|
||||
// Should create a server group
|
||||
let result = await cmsService.addServerGroup(ownerUri, '', TEST_CMS_GROUP, 'test_description');
|
||||
assert(result === true, `Server group ${TEST_CMS_GROUP} was not created successfully`);
|
||||
|
||||
// Make sure server group is created
|
||||
let cmsResources = await cmsService.getRegisteredServers(ownerUri, '');
|
||||
assert(cmsResources.registeredServerGroups.find(g => g.name === TEST_CMS_GROUP),
|
||||
`Registered Server Group ${TEST_CMS_GROUP} was not found after being added. Groups : [${cmsResources.registeredServerGroups.map(g => g.name).join(', ')}]`);
|
||||
|
||||
// Should create a registered server under the group
|
||||
let server = await getStandaloneServer('2019');
|
||||
let connection = {
|
||||
serverName: server.serverName,
|
||||
userName: server.userName,
|
||||
password: server.password,
|
||||
authenticationType: server.authenticationTypeName,
|
||||
database: server.database,
|
||||
provider: server.provider,
|
||||
version: server.version,
|
||||
engineType: server.engineType,
|
||||
options: {}
|
||||
};
|
||||
let relativePath = cmsResources.registeredServerGroups[0].relativePath;
|
||||
|
||||
result = await cmsService.addRegisteredServer(ownerUri, relativePath, TEST_CMS_REG_SERVER, 'test_description', connection);
|
||||
assert(result === true, `Registered server ${TEST_CMS_REG_SERVER} was not added to server group successfully`);
|
||||
|
||||
// Should remove the server group we added above
|
||||
let deleteResult = await cmsService.removeServerGroup(ownerUri, '', TEST_CMS_GROUP);
|
||||
assert(deleteResult === true, `Server group ${TEST_CMS_GROUP} was not deleted from CMS server successfully`);
|
||||
});
|
||||
});
|
||||
115
extensions/integration-tests/src/test/dacpac.test.ts
Normal file
115
extensions/integration-tests/src/test/dacpac.test.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import 'mocha';
|
||||
import * as azdata from 'azdata';
|
||||
import * as utils from './utils';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as mssql from '../../../mssql';
|
||||
import * as vscode from 'vscode';
|
||||
import { getStandaloneServer } from './testConfig';
|
||||
import * as assert from 'assert';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const retryCount = 24; // 2 minutes
|
||||
const dacpac1: string = path.join(__dirname, '../../testData/Database1.dacpac');
|
||||
suite('Dacpac integration test suite', () => {
|
||||
suiteSetup(async function () {
|
||||
await utils.sleep(5000); // To ensure the providers are registered.
|
||||
console.log(`Start dacpac tests`);
|
||||
});
|
||||
|
||||
test('Deploy and extract dacpac', async function () {
|
||||
const server = await getStandaloneServer();
|
||||
await utils.connectToServer(server);
|
||||
|
||||
const nodes = <azdata.objectexplorer.ObjectExplorerNode[]>await azdata.objectexplorer.getActiveConnectionNodes();
|
||||
const index = nodes.findIndex(node => node.nodePath.includes(server.serverName));
|
||||
const ownerUri = await azdata.connection.getUriForConnection(nodes[index].connectionId);
|
||||
const now = new Date();
|
||||
const databaseName = 'ADS_deployDacpac_' + now.getTime().toString();
|
||||
|
||||
try {
|
||||
const dacfxService = ((await vscode.extensions.getExtension(mssql.extension.name).activate() as mssql.IExtension)).dacFx;
|
||||
assert(dacfxService, 'DacFx Service Provider is not available');
|
||||
|
||||
// Deploy dacpac
|
||||
const deployResult = await dacfxService.deployDacpac(dacpac1, databaseName, false, ownerUri, azdata.TaskExecutionMode.execute);
|
||||
await utils.assertDatabaseCreationResult(databaseName, ownerUri, retryCount);
|
||||
const dbConnectionId = await utils.connectToServer({
|
||||
serverName: server.serverName,
|
||||
database: databaseName,
|
||||
userName: server.userName,
|
||||
password: server.password,
|
||||
authenticationTypeName: server.authenticationTypeName,
|
||||
providerName: server.providerName
|
||||
});
|
||||
const dbConnectionOwnerUri = await azdata.connection.getUriForConnection(dbConnectionId);
|
||||
await utils.assertTableCreationResult('dbo', 'Table1', dbConnectionOwnerUri, retryCount);
|
||||
await utils.assertTableCreationResult('dbo', 'Table2', dbConnectionOwnerUri, retryCount);
|
||||
assert(deployResult.success === true && deployResult.errorMessage === '', `Deploy dacpac should succeed Expected: there should be no error. Actual Error message: "${deployResult.errorMessage}"`);
|
||||
|
||||
// Extract dacpac
|
||||
const folderPath = path.join(os.tmpdir(), 'DacFxTest');
|
||||
if (!(await promisify(fs.exists)(folderPath))) {
|
||||
await fs.promises.mkdir(folderPath);
|
||||
}
|
||||
const packageFilePath = path.join(folderPath, `${databaseName}.dacpac`);
|
||||
const extractResult = await dacfxService.extractDacpac(databaseName, packageFilePath, databaseName, '1.0.0.0', ownerUri, azdata.TaskExecutionMode.execute);
|
||||
await utils.assertFileGenerationResult(packageFilePath, retryCount);
|
||||
|
||||
assert(extractResult.success === true && extractResult.errorMessage === '', `Extract dacpac should succeed. Expected: there should be no error. Actual Error message: "${extractResult.errorMessage}"`);
|
||||
} finally {
|
||||
await utils.deleteDB(server, databaseName, ownerUri);
|
||||
}
|
||||
});
|
||||
|
||||
const bacpac1: string = path.join(__dirname, '..', '..', 'testData', 'Database1.bacpac');
|
||||
test('Import and export bacpac', async function () {
|
||||
const server = await getStandaloneServer();
|
||||
await utils.connectToServer(server);
|
||||
|
||||
const nodes = <azdata.objectexplorer.ObjectExplorerNode[]>await azdata.objectexplorer.getActiveConnectionNodes();
|
||||
const index = nodes.findIndex(node => node.nodePath.includes(server.serverName));
|
||||
const ownerUri = await azdata.connection.getUriForConnection(nodes[index].connectionId);
|
||||
const now = new Date();
|
||||
const databaseName = 'ADS_importBacpac_' + now.getTime().toString();
|
||||
|
||||
try {
|
||||
let dacfxService = ((await vscode.extensions.getExtension(mssql.extension.name).activate() as mssql.IExtension)).dacFx;
|
||||
assert(dacfxService, 'DacFx Service Provider is not available');
|
||||
|
||||
// Import bacpac
|
||||
const importResult = await dacfxService.importBacpac(bacpac1, databaseName, ownerUri, azdata.TaskExecutionMode.execute);
|
||||
await utils.assertDatabaseCreationResult(databaseName, ownerUri, retryCount);
|
||||
const dbConnectionId = await utils.connectToServer({
|
||||
serverName: server.serverName,
|
||||
database: databaseName,
|
||||
userName: server.userName,
|
||||
password: server.password,
|
||||
authenticationTypeName: server.authenticationTypeName,
|
||||
providerName: server.providerName
|
||||
});
|
||||
const dbConnectionOwnerUri = await azdata.connection.getUriForConnection(dbConnectionId);
|
||||
await utils.assertTableCreationResult('dbo', 'Table1', dbConnectionOwnerUri, retryCount, true);
|
||||
await utils.assertTableCreationResult('dbo', 'Table2', dbConnectionOwnerUri, retryCount, true);
|
||||
assert(importResult.success === true && importResult.errorMessage === '', `Expected: Import bacpac should succeed and there should be no error. Actual Error message: "${importResult.errorMessage}"`);
|
||||
|
||||
// Export bacpac
|
||||
const folderPath = path.join(os.tmpdir(), 'DacFxTest');
|
||||
if (!(await promisify(fs.exists)(folderPath))) {
|
||||
await fs.promises.mkdir(folderPath);
|
||||
}
|
||||
const packageFilePath = path.join(folderPath, `${databaseName}.bacpac`);
|
||||
const exportResult = await dacfxService.exportBacpac(databaseName, packageFilePath, ownerUri, azdata.TaskExecutionMode.execute);
|
||||
await utils.assertFileGenerationResult(packageFilePath, retryCount);
|
||||
assert(exportResult.success === true && exportResult.errorMessage === '', `Expected: Export bacpac should succeed and there should be no error. Actual Error message: "${exportResult.errorMessage}"`);
|
||||
} finally {
|
||||
await utils.deleteDB(server, databaseName, ownerUri);
|
||||
}
|
||||
});
|
||||
});
|
||||
48
extensions/integration-tests/src/test/index.ts
Normal file
48
extensions/integration-tests/src/test/index.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as path from 'path';
|
||||
import * as testRunner from 'vscodetestcover';
|
||||
|
||||
const suite = 'Extension Integration Tests';
|
||||
|
||||
const mochaOptions: any = {
|
||||
ui: 'tdd',
|
||||
useColors: true,
|
||||
timeout: 60000
|
||||
};
|
||||
|
||||
// set relevant mocha options from the environment
|
||||
if (process.env.ADS_TEST_GREP) {
|
||||
mochaOptions.grep = process.env.ADS_TEST_GREP;
|
||||
console.log(`setting options.grep to: ${mochaOptions.grep}`);
|
||||
}
|
||||
if (process.env.ADS_TEST_INVERT_GREP) {
|
||||
mochaOptions.invert = parseInt(process.env.ADS_TEST_INVERT_GREP);
|
||||
console.log(`setting options.invert to: ${mochaOptions.invert}`);
|
||||
}
|
||||
if (process.env.ADS_TEST_TIMEOUT) {
|
||||
mochaOptions.timeout = parseInt(process.env.ADS_TEST_TIMEOUT);
|
||||
console.log(`setting options.timeout to: ${mochaOptions.timeout}`);
|
||||
}
|
||||
if (process.env.ADS_TEST_RETRIES) {
|
||||
mochaOptions.retries = parseInt(process.env.ADS_TEST_RETRIES);
|
||||
console.log(`setting options.retries to: ${mochaOptions.retries}`);
|
||||
}
|
||||
|
||||
if (process.env.BUILD_ARTIFACTSTAGINGDIRECTORY) {
|
||||
mochaOptions.reporter = 'mocha-multi-reporters';
|
||||
mochaOptions.reporterOptions = {
|
||||
reporterEnabled: 'spec, mocha-junit-reporter',
|
||||
mochaJunitReporterReporterOptions: {
|
||||
testsuitesTitle: `${suite} ${process.platform}`,
|
||||
mochaFile: path.join(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY, `test-results/${process.platform}-${suite.toLowerCase().replace(/[^\w]/g, '-')}-results.xml`)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
testRunner.configure(mochaOptions, { coverConfig: '../../coverConfig.json' });
|
||||
|
||||
export = testRunner;
|
||||
375
extensions/integration-tests/src/test/notebook.test.ts
Normal file
375
extensions/integration-tests/src/test/notebook.test.ts
Normal file
@@ -0,0 +1,375 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import 'mocha';
|
||||
import * as assert from 'assert';
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import { sqlNotebookContent, writeNotebookToFile, sqlKernelMetadata, getFileName, pySparkNotebookContent, pySparkKernelMetadata, pythonKernelMetadata, sqlNotebookMultipleCellsContent, notebookContentForCellLanguageTest, sqlKernelSpec, pythonKernelSpec, pySparkKernelSpec, CellTypes } from './notebook.util';
|
||||
import { getConfigValue, EnvironmentVariable_PYTHON_PATH, TestServerProfile, getStandaloneServer } from './testConfig';
|
||||
import { connectToServer, sleep, testServerProfileToIConnectionProfile } from './utils';
|
||||
import * as fs from 'fs';
|
||||
import { isNullOrUndefined, promisify } from 'util';
|
||||
|
||||
suite('Notebook integration test suite', function () {
|
||||
setup(async function () {
|
||||
console.log(`Start "${this.currentTest.title}"`);
|
||||
let server = await getStandaloneServer();
|
||||
assert(server && server.serverName, 'No server could be found');
|
||||
await connectToServer(server, 6000);
|
||||
});
|
||||
|
||||
teardown(async function () {
|
||||
try {
|
||||
let fileName = getFileName(this.test.title + this.invocationCount++);
|
||||
if (await promisify(fs.exists)(fileName)) {
|
||||
await fs.promises.unlink(fileName);
|
||||
console.log(`"${fileName}" is deleted.`);
|
||||
}
|
||||
await vscode.commands.executeCommand('workbench.action.closeActiveEditor');
|
||||
}
|
||||
catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
finally {
|
||||
console.log(`"${this.test.title}" is done`);
|
||||
}
|
||||
});
|
||||
|
||||
test('Sql NB test @UNSTABLE@', async function () {
|
||||
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata, this.test.title + this.invocationCount++, true);
|
||||
await runCell(notebook);
|
||||
const expectedOutput0 = '(1 row affected)';
|
||||
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||
console.log('Got cell outputs ---');
|
||||
if (cellOutputs) {
|
||||
cellOutputs.forEach(o => console.log(o));
|
||||
}
|
||||
assert(cellOutputs.length === 3, `Expected length: 3, Actual: ${cellOutputs.length}`);
|
||||
let actualOutput0 = (<azdata.nb.IDisplayData>cellOutputs[0]).data['text/html'];
|
||||
console.log('Got first output');
|
||||
assert(actualOutput0 === expectedOutput0, `Expected row count: ${expectedOutput0}, Actual: ${actualOutput0}`);
|
||||
let actualOutput2 = (<azdata.nb.IExecuteResult>cellOutputs[2]).data['application/vnd.dataresource+json'].data[0];
|
||||
assert(actualOutput2[0] === '1', `Expected result: 1, Actual: '${actualOutput2[0]}'`);
|
||||
});
|
||||
|
||||
test('Sql NB multiple cells test @UNSTABLE@', async function () {
|
||||
let notebook = await openNotebook(sqlNotebookMultipleCellsContent, sqlKernelMetadata, this.test.title + this.invocationCount++);
|
||||
await runCells(notebook);
|
||||
const expectedOutput0 = '(1 row affected)';
|
||||
for (let i = 0; i < 3; i++) {
|
||||
let cellOutputs = notebook.document.cells[i].contents.outputs;
|
||||
console.log(`Got cell outputs --- ${i}`);
|
||||
|
||||
if (cellOutputs) {
|
||||
cellOutputs.forEach(console.log);
|
||||
}
|
||||
|
||||
assert(cellOutputs.length === 3, `Expected length: 3, Actual: '${cellOutputs.length}'`);
|
||||
let actualOutput0 = (<azdata.nb.IDisplayData>cellOutputs[0]).data['text/html'];
|
||||
console.log('Got first output');
|
||||
assert(actualOutput0 === expectedOutput0, `Expected row count: '${expectedOutput0}', Actual: '${actualOutput0}'`);
|
||||
|
||||
const executeResult = cellOutputs[2] as azdata.nb.IExecuteResult;
|
||||
assert(Object.keys(executeResult).includes('data'), `Execute result did not include data key. It included ${Object.keys(executeResult)}`);
|
||||
const applicationDataResource = executeResult.data['application/vnd.dataresource+json'];
|
||||
|
||||
assert(Object.keys(applicationDataResource).includes('data'), `Execute result did not include data key. It included ${Object.keys(applicationDataResource)}`);
|
||||
const actualOutput2 = applicationDataResource.data[0];
|
||||
|
||||
assert(actualOutput2[0] === i.toString(), `Expected result: ${i.toString()}, Actual: '${actualOutput2[0]}'`);
|
||||
console.log('Sql multiple cells NB done');
|
||||
}
|
||||
});
|
||||
|
||||
test('Sql NB run cells above and below test', async function () {
|
||||
let notebook = await openNotebook(sqlNotebookMultipleCellsContent, sqlKernelMetadata, this.test.title + this.invocationCount++);
|
||||
// When running all cells above a cell, ensure that only cells preceding current cell have output
|
||||
await runCells(notebook, true, undefined, notebook.document.cells[1]);
|
||||
assert(notebook.document.cells[0].contents.outputs.length === 3, `Expected length: '3', Actual: '${notebook.document.cells[0].contents.outputs.length}'`);
|
||||
assert(notebook.document.cells[1].contents.outputs.length === 0, `Expected length: '0', Actual: '${notebook.document.cells[1].contents.outputs.length}'`);
|
||||
assert(notebook.document.cells[2].contents.outputs.length === 0, `Expected length: '0', Actual: '${notebook.document.cells[2].contents.outputs.length}'`);
|
||||
|
||||
await notebook.clearAllOutputs();
|
||||
|
||||
// When running all cells below a cell, ensure that current cell and cells after have output
|
||||
await runCells(notebook, undefined, true, notebook.document.cells[1]);
|
||||
assert(notebook.document.cells[0].contents.outputs.length === 0, `Expected length: '0', Actual: '${notebook.document.cells[0].contents.outputs.length}'`);
|
||||
assert(notebook.document.cells[1].contents.outputs.length === 3, `Expected length: '3', Actual: '${notebook.document.cells[1].contents.outputs.length}'`);
|
||||
assert(notebook.document.cells[2].contents.outputs.length === 3, `Expected length: '3', Actual: '${notebook.document.cells[2].contents.outputs.length}'`);
|
||||
});
|
||||
|
||||
test('Clear cell output - SQL notebook', async function () {
|
||||
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata, this.test.title + this.invocationCount++);
|
||||
await runCell(notebook);
|
||||
await verifyClearOutputs(notebook);
|
||||
});
|
||||
|
||||
test('Clear all outputs - SQL notebook ', async function () {
|
||||
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata, this.test.title + this.invocationCount++);
|
||||
await runCell(notebook);
|
||||
await verifyClearAllOutputs(notebook);
|
||||
});
|
||||
|
||||
test('sql language test', async function () {
|
||||
let language = 'sql';
|
||||
await cellLanguageTest(notebookContentForCellLanguageTest, this.test.title + this.invocationCount++, language, {
|
||||
'kernelspec': {
|
||||
'name': language,
|
||||
'display_name': language.toUpperCase()
|
||||
},
|
||||
'language_info': {
|
||||
'name': language,
|
||||
'version': '',
|
||||
'mimetype': ''
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: Need to make this test more reliable.
|
||||
test('should not be dirty after saving notebook test @UNSTABLE@', async function () {
|
||||
// Given a notebook that's been edited (in this case, open notebook runs the 1st cell and adds an output)
|
||||
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata, this.test.title);
|
||||
await runCell(notebook);
|
||||
assert(notebook.document.providerId === 'sql', `Expected providerId to be sql, Actual: ${notebook.document.providerId}`);
|
||||
assert(notebook.document.kernelSpec.name === 'SQL', `Expected first kernel name: SQL, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
assert(notebook.document.isDirty === true, 'Notebook should be dirty after edit');
|
||||
|
||||
// When I save it, it should no longer be dirty
|
||||
let saved = await notebook.document.save();
|
||||
assert(saved === true, 'Expect initial save to succeed');
|
||||
// Note: need to sleep after save as the change events happen after save
|
||||
// We need to give back the thread or the event won't have been drained.
|
||||
// This is consistent with VSCode APIs, so keeping as-is
|
||||
await sleep(100);
|
||||
assert(notebook.document.isDirty === false, 'Notebook should not be dirty after initial save');
|
||||
|
||||
// And when I edit again, should become dirty
|
||||
let edited = await notebook.edit(builder => {
|
||||
builder.insertCell({
|
||||
cell_type: CellTypes.Code,
|
||||
source: ''
|
||||
});
|
||||
});
|
||||
assert(edited === true, 'Expect edit to succeed');
|
||||
await sleep(100);
|
||||
assert(notebook.document.isDirty === true, 'Notebook should be dirty after edit');
|
||||
|
||||
// Finally on 2nd save it should no longer be dirty
|
||||
saved = await notebook.document.save();
|
||||
await sleep(100);
|
||||
assert(saved === true, 'Expect save after edit to succeed');
|
||||
assert(notebook.document.isDirty === false, 'Notebook should not be dirty after 2nd save');
|
||||
});
|
||||
|
||||
if (process.env['RUN_PYTHON3_TEST'] === '1') {
|
||||
test('Python3 notebook test', async function () {
|
||||
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata, this.test.title + this.invocationCount++);
|
||||
await runCell(notebook);
|
||||
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||
console.log('Got cell outputs ---');
|
||||
if (cellOutputs) {
|
||||
cellOutputs.forEach(o => console.log(JSON.stringify(o, undefined, '\t')));
|
||||
}
|
||||
let result = (<azdata.nb.IExecuteResult>cellOutputs[0]).data['text/plain'];
|
||||
assert(result === '2', `Expected python result: 2, Actual: ${result}`);
|
||||
});
|
||||
|
||||
test('Clear all outputs - Python3 notebook ', async function () {
|
||||
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata, this.test.title + this.invocationCount++);
|
||||
await runCell(notebook);
|
||||
await verifyClearAllOutputs(notebook);
|
||||
});
|
||||
|
||||
test('python language test', async function () {
|
||||
let language = 'python';
|
||||
await cellLanguageTest(notebookContentForCellLanguageTest, this.test.title + this.invocationCount++, language, {
|
||||
'kernelspec': {
|
||||
'name': 'python3',
|
||||
'display_name': 'Python 3'
|
||||
},
|
||||
'language_info': {
|
||||
'name': language,
|
||||
'version': '',
|
||||
'mimetype': ''
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test('Change kernel different provider SQL to Python to SQL', async function () {
|
||||
let notebook = await openNotebook(sqlNotebookContent, sqlKernelMetadata, this.test.title);
|
||||
await runCell(notebook);
|
||||
assert(notebook.document.providerId === 'sql', `Expected providerId to be sql, Actual: ${notebook.document.providerId}`);
|
||||
assert(notebook.document.kernelSpec.name === 'SQL', `Expected first kernel name: SQL, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
|
||||
let kernelChanged = await notebook.changeKernel(pythonKernelSpec);
|
||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||
assert(kernelChanged && notebook.document.kernelSpec.name === 'python3', `Expected second kernel name: python3, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
|
||||
kernelChanged = await notebook.changeKernel(sqlKernelSpec);
|
||||
assert(notebook.document.providerId === 'sql', `Expected providerId to be sql, Actual: ${notebook.document.providerId}`);
|
||||
assert(kernelChanged && notebook.document.kernelSpec.name === 'SQL', `Expected third kernel name: SQL, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
});
|
||||
|
||||
test('Change kernel different provider Python to SQL to Python', async function () {
|
||||
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata, this.test.title);
|
||||
await runCell(notebook);
|
||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||
assert(notebook.document.kernelSpec.name === 'python3', `Expected first kernel name: python3, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
|
||||
let kernelChanged = await notebook.changeKernel(sqlKernelSpec);
|
||||
assert(notebook.document.providerId === 'sql', `Expected providerId to be sql, Actual: ${notebook.document.providerId}`);
|
||||
assert(kernelChanged && notebook.document.kernelSpec.name === 'SQL', `Expected second kernel name: SQL, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
|
||||
kernelChanged = await notebook.changeKernel(pythonKernelSpec);
|
||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||
assert(kernelChanged && notebook.document.kernelSpec.name === 'python3', `Expected third kernel name: python3, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
});
|
||||
|
||||
test('Change kernel same provider Python to PySpark to Python', async function () {
|
||||
let notebook = await openNotebook(pySparkNotebookContent, pythonKernelMetadata, this.test.title);
|
||||
await runCell(notebook);
|
||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||
assert(notebook.document.kernelSpec.name === 'python3', `Expected first kernel name: python3, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
|
||||
let kernelChanged = await notebook.changeKernel(pySparkKernelSpec);
|
||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||
assert(kernelChanged && notebook.document.kernelSpec.name === 'pysparkkernel', `Expected second kernel name: pysparkkernel, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
|
||||
kernelChanged = await notebook.changeKernel(pythonKernelSpec);
|
||||
assert(notebook.document.providerId === 'jupyter', `Expected providerId to be jupyter, Actual: ${notebook.document.providerId}`);
|
||||
assert(kernelChanged && notebook.document.kernelSpec.name === 'python3', `Expected third kernel name: python3, Actual: ${notebook.document.kernelSpec.name}`);
|
||||
});
|
||||
}
|
||||
|
||||
if (process.env['RUN_PYSPARK_TEST'] === '1') {
|
||||
test('PySpark notebook test', async function () {
|
||||
let notebook = await openNotebook(pySparkNotebookContent, pySparkKernelMetadata, this.test.title + this.invocationCount++);
|
||||
await runCell(notebook);
|
||||
let cellOutputs = notebook.document.cells[0].contents.outputs;
|
||||
let sparkResult = (<azdata.nb.IStreamResult>cellOutputs[3]).text;
|
||||
assert(sparkResult === '2', `Expected spark result: 2, Actual: ${sparkResult}`);
|
||||
});
|
||||
}
|
||||
|
||||
/* After https://github.com/microsoft/azuredatastudio/issues/5598 is fixed, enable these tests.
|
||||
test('scala language test', async function () {
|
||||
let language = 'scala';
|
||||
await cellLanguageTest(notebookContentForCellLanguageTest, this.test.title + this.invocationCount++, language, {
|
||||
'kernelspec': {
|
||||
'name': '',
|
||||
'display_name': ''
|
||||
},
|
||||
'language_info': {
|
||||
name: language,
|
||||
version: '',
|
||||
mimetype: ''
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test('empty language test', async function () {
|
||||
let language = '';
|
||||
await cellLanguageTest(notebookContentForCellLanguageTest, this.test.title + this.invocationCount++, language, {
|
||||
'kernelspec': {
|
||||
'name': language,
|
||||
'display_name': ''
|
||||
},
|
||||
'language_info': {
|
||||
name: language,
|
||||
version: '',
|
||||
mimetype: 'x-scala'
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test('cplusplus language test', async function () {
|
||||
let language = 'cplusplus';
|
||||
await cellLanguageTest(notebookContentForCellLanguageTest, this.test.title + this.invocationCount++, language, {
|
||||
'kernelspec': {
|
||||
'name': '',
|
||||
'display_name': ''
|
||||
},
|
||||
'language_info': {
|
||||
name: language,
|
||||
version: '',
|
||||
mimetype: ''
|
||||
}
|
||||
});
|
||||
});
|
||||
*/
|
||||
});
|
||||
|
||||
async function openNotebook(content: azdata.nb.INotebookContents, kernelMetadata: any, testName: string, connectToDifferentServer?: boolean): Promise<azdata.nb.NotebookEditor> {
|
||||
let notebookConfig = vscode.workspace.getConfiguration('notebook');
|
||||
notebookConfig.update('pythonPath', getConfigValue(EnvironmentVariable_PYTHON_PATH), 1);
|
||||
let server: TestServerProfile;
|
||||
if (!connectToDifferentServer) {
|
||||
server = await getStandaloneServer();
|
||||
assert(server && server.serverName, 'No server could be found in openNotebook');
|
||||
await connectToServer(server, 6000);
|
||||
}
|
||||
let notebookJson = Object.assign({}, content, { metadata: kernelMetadata });
|
||||
let uri = writeNotebookToFile(notebookJson, testName);
|
||||
console.log('Notebook uri ' + uri);
|
||||
let nbShowOptions: azdata.nb.NotebookShowOptions;
|
||||
if (server) {
|
||||
nbShowOptions = { connectionProfile: testServerProfileToIConnectionProfile(server) };
|
||||
}
|
||||
let notebook = await azdata.nb.showNotebookDocument(uri, nbShowOptions);
|
||||
return notebook;
|
||||
}
|
||||
|
||||
async function runCells(notebook: azdata.nb.NotebookEditor, runCellsAbove?: boolean, runCellsBelow?: boolean, currentCell?: azdata.nb.NotebookCell) {
|
||||
assert(notebook !== undefined && notebook !== null, 'Expected notebook object is defined');
|
||||
let ran;
|
||||
if (runCellsAbove) {
|
||||
ran = await notebook.runAllCells(undefined, currentCell);
|
||||
} else if (runCellsBelow) {
|
||||
ran = await notebook.runAllCells(currentCell, undefined);
|
||||
} else {
|
||||
ran = await notebook.runAllCells();
|
||||
}
|
||||
assert(ran, 'Notebook runCell should succeed');
|
||||
}
|
||||
|
||||
async function runCell(notebook: azdata.nb.NotebookEditor, cell?: azdata.nb.NotebookCell) {
|
||||
if (isNullOrUndefined(cell)) {
|
||||
cell = notebook.document.cells[0];
|
||||
}
|
||||
let ran = await notebook.runCell(cell);
|
||||
assert(ran, 'Notebook runCell should succeed');
|
||||
}
|
||||
|
||||
async function verifyClearAllOutputs(notebook: azdata.nb.NotebookEditor): Promise<void> {
|
||||
let cellWithOutputs = notebook.document.cells.find(cell => cell.contents && cell.contents.outputs && cell.contents.outputs.length > 0);
|
||||
assert(cellWithOutputs !== undefined, 'Could not find notebook cells with outputs');
|
||||
console.log('Before clearing cell outputs');
|
||||
let clearedOutputs = await notebook.clearAllOutputs();
|
||||
let cells = notebook.document.cells;
|
||||
cells.forEach(cell => {
|
||||
assert(cell.contents && cell.contents.outputs && cell.contents.outputs.length === 0, `Expected Output: 0, Actual: '${cell.contents.outputs.length}'`);
|
||||
});
|
||||
assert(clearedOutputs, 'Outputs of all the code cells from Python notebook should be cleared');
|
||||
console.log('After clearing cell outputs');
|
||||
}
|
||||
|
||||
async function verifyClearOutputs(notebook: azdata.nb.NotebookEditor): Promise<void> {
|
||||
let cellWithOutputs = notebook.document.cells[0].contents && notebook.document.cells[0].contents.outputs && notebook.document.cells[0].contents.outputs.length > 0;
|
||||
assert(cellWithOutputs === true, 'Expected first cell to have outputs');
|
||||
let clearedOutputs = await notebook.clearOutput(notebook.document.cells[0]);
|
||||
let firstCell = notebook.document.cells[0];
|
||||
assert(firstCell.contents && firstCell.contents.outputs && firstCell.contents.outputs.length === 0, `Expected Output: 0, Actual: '${firstCell.contents.outputs.length}'`);
|
||||
assert(clearedOutputs, 'Outputs of requested code cell should be cleared');
|
||||
}
|
||||
|
||||
async function cellLanguageTest(content: azdata.nb.INotebookContents, testName: string, languageConfigured: string, metadataInfo: any) {
|
||||
let notebookJson = Object.assign({}, content, { metadata: metadataInfo });
|
||||
let uri = writeNotebookToFile(notebookJson, testName);
|
||||
let notebook = await azdata.nb.showNotebookDocument(uri);
|
||||
await notebook.document.save();
|
||||
let languageInNotebook = notebook.document.cells[0].contents.metadata.language;
|
||||
assert(languageInNotebook === languageConfigured, `Expected cell language is: ${languageConfigured}, Actual: ${languageInNotebook}`);
|
||||
}
|
||||
179
extensions/integration-tests/src/test/notebook.util.ts
Normal file
179
extensions/integration-tests/src/test/notebook.util.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import 'mocha';
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
|
||||
export class CellTypes {
|
||||
public static readonly Code = 'code';
|
||||
public static readonly Markdown = 'markdown';
|
||||
public static readonly Raw = 'raw';
|
||||
}
|
||||
|
||||
export const pySparkNotebookContent: azdata.nb.INotebookContents = {
|
||||
cells: [{
|
||||
cell_type: CellTypes.Code,
|
||||
source: '1+1',
|
||||
metadata: { language: 'python' },
|
||||
execution_count: 1
|
||||
}],
|
||||
metadata: {
|
||||
'kernelspec': {
|
||||
'name': 'pysparkkernel',
|
||||
'display_name': 'PySpark'
|
||||
}
|
||||
},
|
||||
nbformat: 4,
|
||||
nbformat_minor: 2
|
||||
};
|
||||
|
||||
export const notebookContentForCellLanguageTest: azdata.nb.INotebookContents = {
|
||||
cells: [{
|
||||
cell_type: CellTypes.Code,
|
||||
source: '1+1',
|
||||
metadata: {},
|
||||
execution_count: 1
|
||||
}],
|
||||
metadata: {
|
||||
'kernelspec': {
|
||||
'name': ''
|
||||
},
|
||||
},
|
||||
nbformat: 4,
|
||||
nbformat_minor: 2
|
||||
};
|
||||
|
||||
export const pythonNotebookMultipleCellsContent: azdata.nb.INotebookContents = {
|
||||
cells: [{
|
||||
cell_type: CellTypes.Code,
|
||||
source: '1+1',
|
||||
metadata: { language: 'python' },
|
||||
execution_count: 1
|
||||
}, {
|
||||
cell_type: CellTypes.Code,
|
||||
source: '1+2',
|
||||
metadata: { language: 'python' },
|
||||
execution_count: 1
|
||||
}, {
|
||||
cell_type: CellTypes.Code,
|
||||
source: '1+3',
|
||||
metadata: { language: 'python' },
|
||||
execution_count: 1
|
||||
}, {
|
||||
cell_type: CellTypes.Code,
|
||||
source: '1+4',
|
||||
metadata: { language: 'python' },
|
||||
execution_count: 1
|
||||
}],
|
||||
metadata: {
|
||||
'kernelspec': {
|
||||
'name': 'python3',
|
||||
'display_name': 'Python 3'
|
||||
}
|
||||
},
|
||||
nbformat: 4,
|
||||
nbformat_minor: 2
|
||||
};
|
||||
|
||||
export const sqlNotebookContent: azdata.nb.INotebookContents = {
|
||||
cells: [{
|
||||
cell_type: CellTypes.Code,
|
||||
source: 'select 1',
|
||||
metadata: { language: 'sql' },
|
||||
execution_count: 1
|
||||
}],
|
||||
metadata: {
|
||||
'kernelspec': {
|
||||
'name': 'SQL',
|
||||
'display_name': 'SQL'
|
||||
}
|
||||
},
|
||||
nbformat: 4,
|
||||
nbformat_minor: 2
|
||||
};
|
||||
|
||||
export const sqlNotebookMultipleCellsContent: azdata.nb.INotebookContents = {
|
||||
cells: [{
|
||||
cell_type: CellTypes.Code,
|
||||
source: 'select 0',
|
||||
metadata: { language: 'sql' },
|
||||
execution_count: 1
|
||||
}, {
|
||||
cell_type: CellTypes.Code,
|
||||
source: `WAITFOR DELAY '00:00:02'\nselect 1`,
|
||||
metadata: { language: 'sql' },
|
||||
execution_count: 1
|
||||
}, {
|
||||
cell_type: CellTypes.Code,
|
||||
source: 'select 2',
|
||||
metadata: { language: 'sql' },
|
||||
execution_count: 1
|
||||
}],
|
||||
metadata: {
|
||||
'kernelspec': {
|
||||
'name': 'SQL',
|
||||
'display_name': 'SQL'
|
||||
}
|
||||
},
|
||||
nbformat: 4,
|
||||
nbformat_minor: 2
|
||||
};
|
||||
|
||||
export const pySparkKernelMetadata = {
|
||||
'kernelspec': {
|
||||
'name': 'pysparkkernel',
|
||||
'display_name': 'PySpark'
|
||||
}
|
||||
};
|
||||
|
||||
export const pySparkKernelSpec = {
|
||||
name: 'pyspark',
|
||||
display_name: 'PySpark'
|
||||
};
|
||||
|
||||
export const sqlKernelMetadata = {
|
||||
'kernelspec': {
|
||||
'name': 'SQL',
|
||||
'display_name': 'SQL'
|
||||
}
|
||||
};
|
||||
|
||||
export const sqlKernelSpec: azdata.nb.IKernelSpec = {
|
||||
name: 'SQL',
|
||||
display_name: 'SQL'
|
||||
};
|
||||
|
||||
export const pythonKernelMetadata = {
|
||||
'kernelspec': {
|
||||
'name': 'python3',
|
||||
'display_name': 'Python 3'
|
||||
}
|
||||
};
|
||||
|
||||
export const pythonKernelSpec: azdata.nb.IKernelSpec = {
|
||||
name: 'python3',
|
||||
display_name: 'Python 3'
|
||||
};
|
||||
|
||||
export function writeNotebookToFile(pythonNotebook: azdata.nb.INotebookContents, testName: string): vscode.Uri {
|
||||
let fileName = getFileName(testName);
|
||||
let notebookContentString = JSON.stringify(pythonNotebook);
|
||||
// eslint-disable-next-line no-sync
|
||||
fs.writeFileSync(fileName, notebookContentString);
|
||||
console.log(`Local file is created: '${fileName}'`);
|
||||
let uri = vscode.Uri.file(fileName);
|
||||
return uri;
|
||||
}
|
||||
|
||||
export function getFileName(testName: string): string {
|
||||
if (testName) {
|
||||
return path.join(os.tmpdir(), testName + '.ipynb');
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
135
extensions/integration-tests/src/test/objectExplorer.test.ts
Normal file
135
extensions/integration-tests/src/test/objectExplorer.test.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import 'mocha';
|
||||
import * as azdata from 'azdata';
|
||||
import { getBdcServer, TestServerProfile, getAzureServer, getStandaloneServer } from './testConfig';
|
||||
import { connectToServer, createDB, deleteDB, DefaultConnectTimeoutInMs, asyncTimeout } from './utils';
|
||||
import * as assert from 'assert';
|
||||
|
||||
suite('Object Explorer integration suite', () => {
|
||||
test.skip('BDC instance node label test', async function () {
|
||||
const expectedNodeLabel = ['Databases', 'Security', 'Server Objects'];
|
||||
const server = await getBdcServer();
|
||||
await verifyOeNode(server, DefaultConnectTimeoutInMs, expectedNodeLabel);
|
||||
});
|
||||
test('Standalone instance node label test', async function () {
|
||||
if (process.platform === 'win32') {
|
||||
const expectedNodeLabel = ['Databases', 'Security', 'Server Objects'];
|
||||
const server = await getStandaloneServer();
|
||||
await verifyOeNode(server, DefaultConnectTimeoutInMs, expectedNodeLabel);
|
||||
}
|
||||
});
|
||||
test('Azure SQL DB instance node label test @UNSTABLE@', async function () {
|
||||
const expectedNodeLabel = ['Databases', 'Security'];
|
||||
const server = await getAzureServer();
|
||||
await verifyOeNode(server, DefaultConnectTimeoutInMs, expectedNodeLabel);
|
||||
});
|
||||
test.skip('BDC instance context menu test', async function () {
|
||||
const server = await getBdcServer();
|
||||
let expectedActions: string[];
|
||||
// Properties comes from the admin-tool-ext-win extension which is for Windows only, so the item won't show up on non-Win32 platforms
|
||||
if (process.platform === 'win32') {
|
||||
expectedActions = ['Manage', 'New Query', 'New Notebook', 'Disconnect', 'Delete Connection', 'Refresh', 'Data-tier Application wizard', 'Launch Profiler', 'Properties'];
|
||||
}
|
||||
else {
|
||||
expectedActions = ['Manage', 'New Query', 'New Notebook', 'Disconnect', 'Delete Connection', 'Refresh', 'Data-tier Application wizard', 'Launch Profiler'];
|
||||
}
|
||||
return await verifyContextMenu(server, expectedActions);
|
||||
});
|
||||
test('Azure SQL DB context menu test @UNSTABLE@', async function () {
|
||||
const server = await getAzureServer();
|
||||
const expectedActions = ['Manage', 'New Query', 'New Notebook', 'Disconnect', 'Delete Connection', 'Refresh', 'Data-tier Application wizard', 'Launch Profiler'];
|
||||
await verifyContextMenu(server, expectedActions);
|
||||
});
|
||||
test('Standalone database context menu test', async function () {
|
||||
const server = await getStandaloneServer();
|
||||
let expectedActions: string[] = [];
|
||||
// Generate Scripts and Properties come from the admin-tool-ext-win extension which is for Windows only, so the item won't show up on non-Win32 platforms
|
||||
if (process.platform === 'win32') {
|
||||
expectedActions = ['Manage', 'New Query', 'New Notebook', 'Refresh', 'Backup', 'Restore', 'Data-tier Application wizard', 'Import New Database Project', 'Schema Compare', 'Import wizard', 'Generate Scripts...', 'Properties'];
|
||||
}
|
||||
else {
|
||||
expectedActions = ['Manage', 'New Query', 'New Notebook', 'Refresh', 'Backup', 'Restore', 'Data-tier Application wizard', 'Import New Database Project', 'Schema Compare', 'Import wizard'];
|
||||
}
|
||||
await verifyDBContextMenu(server, DefaultConnectTimeoutInMs, expectedActions);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
async function verifyContextMenu(server: TestServerProfile, expectedActions: string[]): Promise<void> {
|
||||
await connectToServer(server, DefaultConnectTimeoutInMs);
|
||||
const nodes = <azdata.objectexplorer.ObjectExplorerNode[]>await azdata.objectexplorer.getActiveConnectionNodes();
|
||||
assert(nodes.length > 0, `Expecting at least one active connection, actual: ${nodes.length}`);
|
||||
|
||||
const index = nodes.findIndex(node => node.nodePath.includes(server.serverName));
|
||||
assert(index !== -1, `Failed to find server: "${server.serverName}" in OE tree`);
|
||||
|
||||
const node = nodes[index];
|
||||
const actions = await azdata.objectexplorer.getNodeActions(node.connectionId, node.nodePath);
|
||||
|
||||
const expectedString = expectedActions.join(',');
|
||||
const actualString = actions.join(',');
|
||||
return assert(expectedActions.length === actions.length && expectedString === actualString, `Expected actions: "${expectedString}", Actual actions: "${actualString}"`);
|
||||
}
|
||||
|
||||
async function verifyOeNode(server: TestServerProfile, timeout: number, expectedNodeLabel: string[]): Promise<void> {
|
||||
await connectToServer(server, timeout);
|
||||
const nodes = <azdata.objectexplorer.ObjectExplorerNode[]>await azdata.objectexplorer.getActiveConnectionNodes();
|
||||
assert(nodes.length > 0, `Expecting at least one active connection, actual: ${nodes.length}`);
|
||||
|
||||
const index = nodes.findIndex(node => node.nodePath.includes(server.serverName));
|
||||
assert(index !== -1, `Failed to find server: "${server.serverName}" in OE tree`);
|
||||
// TODO: #7146 HDFS isn't always filled in by the call to getChildren since it's loaded asynchronously. To avoid this test being flaky just removing
|
||||
// the node for now if it exists until a proper fix can be made.
|
||||
|
||||
let children: azdata.objectexplorer.ObjectExplorerNode[];
|
||||
try {
|
||||
children = await asyncTimeout(nodes[index].getChildren(), timeout);
|
||||
} catch (e) {
|
||||
return assert.fail('getChildren() timed out...', e);
|
||||
}
|
||||
|
||||
const nonHDFSChildren = children.filter(c => c.label !== 'HDFS');
|
||||
const actualLabelsString = nonHDFSChildren.map(c => c.label).join(',');
|
||||
const expectedLabelString = expectedNodeLabel.join(',');
|
||||
return assert(expectedNodeLabel.length === nonHDFSChildren.length && expectedLabelString === actualLabelsString, `Expected node label: "${expectedLabelString}", Actual: "${actualLabelsString}"`);
|
||||
}
|
||||
|
||||
async function verifyDBContextMenu(server: TestServerProfile, timeoutinMS: number, expectedActions: string[]): Promise<void> {
|
||||
|
||||
await connectToServer(server, timeoutinMS);
|
||||
|
||||
const nodes = <azdata.objectexplorer.ObjectExplorerNode[]>await azdata.objectexplorer.getActiveConnectionNodes();
|
||||
assert(nodes.length > 0, `Expecting at least one active connection, actual: ${nodes.length}`);
|
||||
|
||||
const index = nodes.findIndex(node => node.nodePath.includes(server.serverName));
|
||||
assert(index !== -1, `Failed to find server: "${server.serverName}" in OE tree`);
|
||||
|
||||
const ownerUri = await azdata.connection.getUriForConnection(nodes[index].connectionId);
|
||||
const dbName: string = 'ads_test_VerifyDBContextMenu_' + new Date().getTime().toString();
|
||||
try {
|
||||
await createDB(dbName, ownerUri);
|
||||
|
||||
const serverNode = nodes[index];
|
||||
const children = await serverNode.getChildren();
|
||||
|
||||
assert(children[0].label === 'Databases', `Expected Databases node. Actual ${children[0].label}`);
|
||||
const databasesFolder = children[0];
|
||||
|
||||
const databases = await databasesFolder.getChildren();
|
||||
assert(databases.length > 2, `No database present, can not test further`); // System Databses folder and at least one database
|
||||
|
||||
const actions = await azdata.objectexplorer.getNodeActions(databases[1].connectionId, databases[1].nodePath);
|
||||
|
||||
const expectedString = expectedActions.join(',');
|
||||
const actualString = actions.join(',');
|
||||
return assert(expectedActions.length === actions.length && expectedString === actualString, `Expected actions: "${expectedString}", Actual actions: "${actualString}"`);
|
||||
}
|
||||
finally {
|
||||
await deleteDB(server, dbName, ownerUri);
|
||||
}
|
||||
}
|
||||
|
||||
341
extensions/integration-tests/src/test/schemaCompare.test.ts
Normal file
341
extensions/integration-tests/src/test/schemaCompare.test.ts
Normal file
@@ -0,0 +1,341 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import 'mocha';
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import * as utils from './utils';
|
||||
import * as mssql from '../../../mssql';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as assert from 'assert';
|
||||
import { getStandaloneServer } from './testConfig';
|
||||
import { promisify } from 'util';
|
||||
|
||||
let schemaCompareService: mssql.ISchemaCompareService;
|
||||
let dacfxService: mssql.IDacFxService;
|
||||
const dacpac1: string = path.join(__dirname, '..', '..', 'testData', 'Database1.dacpac');
|
||||
const dacpac2: string = path.join(__dirname, '..', '..', 'testData', 'Database2.dacpac');
|
||||
const includeExcludeSourceDacpac: string = path.join(__dirname, '..', '..', 'testData', 'SchemaCompareIncludeExcludeSource.dacpac');
|
||||
const includeExcludeTargetDacpac: string = path.join(__dirname, '..', '..', 'testData', 'SchemaCompareIncludeExcludeTarget.dacpac');
|
||||
const SERVER_CONNECTION_TIMEOUT: number = 3000;
|
||||
const retryCount = 24; // 2 minutes
|
||||
const folderPath = path.join(os.tmpdir(), 'SchemaCompareTest');
|
||||
|
||||
suite('Schema compare integration test suite', () => {
|
||||
suiteSetup(async function () {
|
||||
let attempts: number = 20;
|
||||
while (attempts > 0) {
|
||||
schemaCompareService = ((await vscode.extensions.getExtension(mssql.extension.name).activate() as mssql.IExtension)).schemaCompare;
|
||||
if (schemaCompareService) {
|
||||
break;
|
||||
}
|
||||
attempts--;
|
||||
await utils.sleep(1000); // To ensure the providers are registered.
|
||||
}
|
||||
dacfxService = ((await vscode.extensions.getExtension(mssql.extension.name).activate() as mssql.IExtension)).dacFx;
|
||||
console.log(`Start schema compare tests`);
|
||||
});
|
||||
test('Schema compare dacpac to dacpac comparison and scmp', async function () {
|
||||
assert(schemaCompareService, 'Schema Compare Service Provider is not available');
|
||||
const now = new Date();
|
||||
const operationId = 'testOperationId_' + now.getTime().toString();
|
||||
|
||||
let source: mssql.SchemaCompareEndpointInfo = {
|
||||
endpointType: mssql.SchemaCompareEndpointType.Dacpac,
|
||||
packageFilePath: dacpac1,
|
||||
serverDisplayName: '',
|
||||
serverName: '',
|
||||
databaseName: '',
|
||||
ownerUri: '',
|
||||
connectionDetails: undefined
|
||||
};
|
||||
let target: mssql.SchemaCompareEndpointInfo = {
|
||||
endpointType: mssql.SchemaCompareEndpointType.Dacpac,
|
||||
packageFilePath: dacpac2,
|
||||
serverDisplayName: '',
|
||||
serverName: '',
|
||||
databaseName: '',
|
||||
ownerUri: '',
|
||||
connectionDetails: undefined
|
||||
};
|
||||
|
||||
let schemaCompareResult = await schemaCompareService.schemaCompare(operationId, source, target, azdata.TaskExecutionMode.execute, null);
|
||||
assertSchemaCompareResult(schemaCompareResult, operationId, 4);
|
||||
|
||||
// save to scmp
|
||||
const filepath = path.join(folderPath, `ads_schemaCompare_${now.getTime().toString()}.scmp`);
|
||||
if (!(await promisify(fs.exists)(folderPath))) {
|
||||
await fs.promises.mkdir(folderPath);
|
||||
}
|
||||
const saveScmpResult = await schemaCompareService.schemaCompareSaveScmp(source, target, azdata.TaskExecutionMode.execute, null, filepath, [], []);
|
||||
assert(saveScmpResult.success && !saveScmpResult.errorMessage, `Save scmp should succeed. Expected: there should be no error. Actual Error message: "${saveScmpResult.errorMessage}`);
|
||||
assert(await promisify(fs.exists)(filepath), `File ${filepath} is expected to be present`);
|
||||
|
||||
// open scmp
|
||||
const openScmpResult = await schemaCompareService.schemaCompareOpenScmp(filepath);
|
||||
assert(openScmpResult.success && !openScmpResult.errorMessage, `Open scmp should succeed. Expected: there should be no error. Actual Error message: "${openScmpResult.errorMessage}`);
|
||||
assert(openScmpResult.sourceEndpointInfo.packageFilePath === source.packageFilePath, `Expected: source packageFilePath to be ${source.packageFilePath}, Actual: ${openScmpResult.sourceEndpointInfo.packageFilePath}`);
|
||||
assert(openScmpResult.targetEndpointInfo.packageFilePath === target.packageFilePath, `Expected: target packageFilePath to be ${target.packageFilePath}, Actual: ${openScmpResult.targetEndpointInfo.packageFilePath}`);
|
||||
});
|
||||
test('Schema compare database to database comparison, script generation, and scmp', async function () {
|
||||
let server = await getStandaloneServer();
|
||||
await utils.connectToServer(server, SERVER_CONNECTION_TIMEOUT);
|
||||
|
||||
let nodes = <azdata.objectexplorer.ObjectExplorerNode[]>await azdata.objectexplorer.getActiveConnectionNodes();
|
||||
assert(nodes.length > 0, `Expecting at least one active connection, actual: ${nodes.length}`);
|
||||
|
||||
let index = nodes.findIndex(node => node.nodePath.includes(server.serverName));
|
||||
assert(index !== -1, `Failed to find server: "${server.serverName}" in OE tree`);
|
||||
|
||||
const ownerUri = await azdata.connection.getUriForConnection(nodes[index].connectionId);
|
||||
const now = new Date();
|
||||
|
||||
const operationId = 'testOperationId_' + now.getTime().toString();
|
||||
const sourceDB: string = 'ads_schemaCompare_sourceDB_' + now.getTime().toString();
|
||||
const targetDB: string = 'ads_schemaCompare_targetDB_' + now.getTime().toString();
|
||||
|
||||
try {
|
||||
assert(dacfxService, 'DacFx Service Provider is not available');
|
||||
let result1 = await dacfxService.deployDacpac(dacpac1, sourceDB, true, ownerUri, azdata.TaskExecutionMode.execute);
|
||||
let result2 = await dacfxService.deployDacpac(dacpac2, targetDB, true, ownerUri, azdata.TaskExecutionMode.execute);
|
||||
|
||||
assert(result1.success === true, 'Deploy source database should succeed');
|
||||
assert(result2.success === true, 'Deploy target database should succeed');
|
||||
await utils.assertDatabaseCreationResult(sourceDB, ownerUri, retryCount);
|
||||
await utils.assertDatabaseCreationResult(targetDB, ownerUri, retryCount);
|
||||
|
||||
assert(schemaCompareService, 'Schema Compare Service Provider is not available');
|
||||
|
||||
let source: mssql.SchemaCompareEndpointInfo = {
|
||||
endpointType: mssql.SchemaCompareEndpointType.Database,
|
||||
packageFilePath: '',
|
||||
serverDisplayName: '',
|
||||
serverName: server.serverName,
|
||||
databaseName: sourceDB,
|
||||
ownerUri: ownerUri,
|
||||
connectionDetails: undefined
|
||||
};
|
||||
let target: mssql.SchemaCompareEndpointInfo = {
|
||||
endpointType: mssql.SchemaCompareEndpointType.Database,
|
||||
packageFilePath: '',
|
||||
serverDisplayName: '',
|
||||
serverName: server.serverName,
|
||||
databaseName: targetDB,
|
||||
ownerUri: ownerUri,
|
||||
connectionDetails: undefined
|
||||
};
|
||||
|
||||
let schemaCompareResult = await schemaCompareService.schemaCompare(operationId, source, target, azdata.TaskExecutionMode.execute, null);
|
||||
assertSchemaCompareResult(schemaCompareResult, operationId, 4);
|
||||
|
||||
let status = await schemaCompareService.schemaCompareGenerateScript(schemaCompareResult.operationId, server.serverName, targetDB, azdata.TaskExecutionMode.script);
|
||||
|
||||
// TODO : add wait for tasks to complete
|
||||
// script generation might take too long and the 'success' status does not mean that script is created.
|
||||
await assertScriptGenerationResult(status, target.serverName, target.databaseName);
|
||||
|
||||
// save to scmp
|
||||
const filepath = path.join(folderPath, `ads_schemaCompare_${now.getTime().toString()}.scmp`);
|
||||
if (!(await promisify(fs.exists)(folderPath))) {
|
||||
await fs.promises.mkdir(folderPath);
|
||||
}
|
||||
const saveScmpResult = await schemaCompareService.schemaCompareSaveScmp(source, target, azdata.TaskExecutionMode.execute, null, filepath, [], []);
|
||||
assert(saveScmpResult.success && !saveScmpResult.errorMessage, `Save scmp should succeed. Expected: there should be no error. Actual Error message: "${saveScmpResult.errorMessage}`);
|
||||
assert(promisify(fs.exists)(filepath), `File ${filepath} is expected to be present`);
|
||||
|
||||
// open scmp
|
||||
const openScmpResult = await schemaCompareService.schemaCompareOpenScmp(filepath);
|
||||
assert(openScmpResult.success && !openScmpResult.errorMessage, `Open scmp should succeed. Expected: there should be no error. Actual Error message: "${openScmpResult.errorMessage}`);
|
||||
assert(openScmpResult.sourceEndpointInfo.databaseName === source.databaseName, `Expected: source database to be ${source.databaseName}, Actual: ${openScmpResult.sourceEndpointInfo.databaseName}`);
|
||||
assert(openScmpResult.targetEndpointInfo.databaseName === target.databaseName, `Expected: target database to be ${target.databaseName}, Actual: ${openScmpResult.targetEndpointInfo.databaseName}`);
|
||||
|
||||
await fs.promises.unlink(filepath);
|
||||
}
|
||||
finally {
|
||||
await utils.deleteDB(server, sourceDB, ownerUri);
|
||||
await utils.deleteDB(server, targetDB, ownerUri);
|
||||
}
|
||||
});
|
||||
test('Schema compare dacpac to database comparison, script generation, and scmp', async function () {
|
||||
let server = await getStandaloneServer();
|
||||
await utils.connectToServer(server, SERVER_CONNECTION_TIMEOUT);
|
||||
|
||||
let nodes = <azdata.objectexplorer.ObjectExplorerNode[]>await azdata.objectexplorer.getActiveConnectionNodes();
|
||||
assert(nodes.length > 0, `Expecting at least one active connection, actual: ${nodes.length}`);
|
||||
|
||||
let index = nodes.findIndex(node => node.nodePath.includes(server.serverName));
|
||||
assert(index !== -1, `Failed to find server: "${server.serverName}" in OE tree`);
|
||||
|
||||
const ownerUri = await azdata.connection.getUriForConnection(nodes[index].connectionId);
|
||||
const now = new Date();
|
||||
const operationId = 'testOperationId_' + now.getTime().toString();
|
||||
const targetDB: string = 'ads_schemaCompare_targetDB_' + now.getTime().toString();
|
||||
|
||||
try {
|
||||
assert(dacfxService, 'DacFx Service Provider is not available');
|
||||
let result = await dacfxService.deployDacpac(dacpac2, targetDB, true, ownerUri, azdata.TaskExecutionMode.execute);
|
||||
|
||||
assert(result.success === true, 'Deploy database 2 (target) should succeed');
|
||||
|
||||
let source: mssql.SchemaCompareEndpointInfo = {
|
||||
endpointType: mssql.SchemaCompareEndpointType.Dacpac,
|
||||
packageFilePath: dacpac1,
|
||||
serverDisplayName: '',
|
||||
serverName: '',
|
||||
databaseName: '',
|
||||
ownerUri: ownerUri,
|
||||
connectionDetails: undefined
|
||||
};
|
||||
let target: mssql.SchemaCompareEndpointInfo = {
|
||||
endpointType: mssql.SchemaCompareEndpointType.Database,
|
||||
packageFilePath: '',
|
||||
serverDisplayName: '',
|
||||
serverName: server.serverName,
|
||||
databaseName: targetDB,
|
||||
ownerUri: ownerUri,
|
||||
connectionDetails: undefined
|
||||
};
|
||||
|
||||
assert(schemaCompareService, 'Schema Compare Service Provider is not available');
|
||||
|
||||
let schemaCompareResult = await schemaCompareService.schemaCompare(operationId, source, target, azdata.TaskExecutionMode.execute, null);
|
||||
assertSchemaCompareResult(schemaCompareResult, operationId, 4);
|
||||
|
||||
let status = await schemaCompareService.schemaCompareGenerateScript(schemaCompareResult.operationId, server.serverName, targetDB, azdata.TaskExecutionMode.script);
|
||||
await assertScriptGenerationResult(status, target.serverName, target.databaseName);
|
||||
|
||||
// save to scmp
|
||||
const filepath = path.join(folderPath, `ads_schemaCompare_${now.getTime().toString()}.scmp`);
|
||||
if (!(await promisify(fs.exists)(folderPath))) {
|
||||
await fs.promises.mkdir(folderPath);
|
||||
}
|
||||
const saveScmpResult = await schemaCompareService.schemaCompareSaveScmp(source, target, azdata.TaskExecutionMode.execute, null, filepath, [], []);
|
||||
assert(saveScmpResult.success && !saveScmpResult.errorMessage, `Save scmp should succeed. Expected: there should be no error. Actual Error message: "${saveScmpResult.errorMessage}`);
|
||||
assert(await promisify(fs.exists)(filepath), `File ${filepath} is expected to be present`);
|
||||
|
||||
// open scmp
|
||||
const openScmpResult = await schemaCompareService.schemaCompareOpenScmp(filepath);
|
||||
assert(openScmpResult.success && !openScmpResult.errorMessage, `Open scmp should succeed. Expected: there should be no error. Actual Error message: "${openScmpResult.errorMessage}`);
|
||||
assert(openScmpResult.sourceEndpointInfo.packageFilePath === source.packageFilePath, `Expected: source packageFilePath to be ${source.packageFilePath}, Actual: ${openScmpResult.sourceEndpointInfo.packageFilePath}`);
|
||||
assert(openScmpResult.targetEndpointInfo.databaseName === target.databaseName, `Expected: target database to be ${target.databaseName}, Actual: ${openScmpResult.targetEndpointInfo.databaseName}`);
|
||||
}
|
||||
finally {
|
||||
await utils.deleteDB(server, targetDB, ownerUri);
|
||||
}
|
||||
});
|
||||
test('Schema compare dacpac to dacpac comparison with include exclude', async function () {
|
||||
assert(schemaCompareService, 'Schema Compare Service Provider is not available');
|
||||
const operationId = 'testOperationId_' + new Date().getTime().toString();
|
||||
|
||||
let source: mssql.SchemaCompareEndpointInfo = {
|
||||
endpointType: mssql.SchemaCompareEndpointType.Dacpac,
|
||||
packageFilePath: includeExcludeSourceDacpac,
|
||||
serverDisplayName: '',
|
||||
serverName: '',
|
||||
databaseName: '',
|
||||
ownerUri: '',
|
||||
connectionDetails: undefined
|
||||
};
|
||||
let target: mssql.SchemaCompareEndpointInfo = {
|
||||
endpointType: mssql.SchemaCompareEndpointType.Dacpac,
|
||||
packageFilePath: includeExcludeTargetDacpac,
|
||||
serverDisplayName: '',
|
||||
serverName: '',
|
||||
databaseName: '',
|
||||
ownerUri: '',
|
||||
connectionDetails: undefined
|
||||
};
|
||||
|
||||
const deploymentOptionsResult = await schemaCompareService.schemaCompareGetDefaultOptions();
|
||||
let deploymentOptions = deploymentOptionsResult.defaultDeploymentOptions;
|
||||
const schemaCompareResult = await schemaCompareService.schemaCompare(operationId, source, target, azdata.TaskExecutionMode.execute, deploymentOptions);
|
||||
assertSchemaCompareResult(schemaCompareResult, operationId, 5);
|
||||
|
||||
// try to exclude table t2 and it should fail because a dependency is still included
|
||||
const t2Difference = schemaCompareResult.differences.find(e => e.sourceValue && e.sourceValue[1] === 't2' && e.name === 'SqlTable');
|
||||
assert(t2Difference !== undefined, 'The difference Table t2 should be found. Should not be undefined');
|
||||
const excludeResult = await schemaCompareService.schemaCompareIncludeExcludeNode(operationId, t2Difference, false, azdata.TaskExecutionMode.execute);
|
||||
assertIncludeExcludeResult(excludeResult, false, 1, 0);
|
||||
assert(excludeResult.blockingDependencies[0].sourceValue[1] === 'v1', `Blocking dependency should be view v1. Actual: ${excludeResult.blockingDependencies[0].sourceValue[1]}`);
|
||||
|
||||
// Exclude the view v1 that t2 was a dependency for and it should succeed and t2 should also be excluded
|
||||
const v1Difference = schemaCompareResult.differences.find(e => e.sourceValue && e.sourceValue[1] === 'v1' && e.name === 'SqlView');
|
||||
assert(v1Difference !== undefined, 'The difference View v1 should be found. Should not be undefined');
|
||||
const excludeResult2 = await schemaCompareService.schemaCompareIncludeExcludeNode(operationId, v1Difference, false, azdata.TaskExecutionMode.execute);
|
||||
assertIncludeExcludeResult(excludeResult2, true, 0, 1);
|
||||
assert(excludeResult2.affectedDependencies[0].sourceValue[1] === 't2', `Table t2 should be the affected dependency. Actual: ${excludeResult2.affectedDependencies[0].sourceValue[1]}`);
|
||||
assert(excludeResult2.affectedDependencies[0].included === false, 'Table t2 should be excluded as a result of excluding v1. Actual: true');
|
||||
|
||||
// including the view v1 should also include the table t2
|
||||
const includeResult = await schemaCompareService.schemaCompareIncludeExcludeNode(operationId, v1Difference, true, azdata.TaskExecutionMode.execute);
|
||||
assertIncludeExcludeResult(includeResult, true, 0, 1);
|
||||
assert(includeResult.affectedDependencies[0].sourceValue[1] === 't2', `Table t2 should be the affected dependency. Actual: ${includeResult.affectedDependencies[0].sourceValue[1]}`);
|
||||
assert(includeResult.affectedDependencies[0].included === true, 'Table t2 should be included as a result of including v1. Actual: false');
|
||||
|
||||
// excluding views from the comparison should make it so t2 can be excluded
|
||||
deploymentOptions.excludeObjectTypes.push(mssql.SchemaObjectType.Views);
|
||||
await schemaCompareService.schemaCompare(operationId, source, target, azdata.TaskExecutionMode.execute, deploymentOptions);
|
||||
const excludeResult3 = await schemaCompareService.schemaCompareIncludeExcludeNode(operationId, t2Difference, false, azdata.TaskExecutionMode.execute);
|
||||
assertIncludeExcludeResult(excludeResult3, true, 0, 0);
|
||||
});
|
||||
});
|
||||
|
||||
function assertIncludeExcludeResult(result: mssql.SchemaCompareIncludeExcludeResult, expectedSuccess: boolean, expectedBlockingDependenciesLength: number, expectedAffectedDependenciesLength: number): void {
|
||||
assert(result.success === expectedSuccess, `Operation success should have been ${expectedSuccess}. Actual: ${result.success}`);
|
||||
if (result.blockingDependencies) {
|
||||
assert(result.blockingDependencies.length === expectedBlockingDependenciesLength, `Expected ${expectedBlockingDependenciesLength} blocking dependencies. Actual: ${result.blockingDependencies}`);
|
||||
} else if (expectedBlockingDependenciesLength !== 0) {
|
||||
throw new Error(`ExpectedBlockingDependencies length was ${expectedBlockingDependenciesLength} but blockingDependencies was undefined`);
|
||||
}
|
||||
if (result.affectedDependencies) {
|
||||
assert(result.affectedDependencies.length === expectedAffectedDependenciesLength, `Expected ${expectedAffectedDependenciesLength} affected dependencies. Actual: ${result.affectedDependencies}`);
|
||||
} else if (expectedAffectedDependenciesLength !== 0) {
|
||||
throw new Error(`ExpectedAffectedDependencies length was ${expectedAffectedDependenciesLength} but affectedDependencies was undefined`);
|
||||
}
|
||||
}
|
||||
|
||||
function assertSchemaCompareResult(schemaCompareResult: mssql.SchemaCompareResult, operationId: string, expectedDifferenceCount: number): void {
|
||||
assert(schemaCompareResult.areEqual === false, `Expected: the schemas are not to be equal Actual: Equal`);
|
||||
assert(schemaCompareResult.errorMessage === null, `Expected: there should be no error. Actual Error message: "${schemaCompareResult.errorMessage}"`);
|
||||
assert(schemaCompareResult.success === true, `Expected: success in schema compare, Actual: Failure`);
|
||||
assert(schemaCompareResult.differences.length === expectedDifferenceCount, `Expected: ${expectedDifferenceCount} differences. Actual differences: "${schemaCompareResult.differences.length}"`);
|
||||
assert(schemaCompareResult.operationId === operationId, `Operation Id Expected to be same as passed. Expected : ${operationId}, Actual ${schemaCompareResult.operationId}`);
|
||||
}
|
||||
|
||||
async function assertScriptGenerationResult(resultstatus: azdata.ResultStatus, server: string, database: string): Promise<void> {
|
||||
// TODO add more validation
|
||||
assert(resultstatus.success === true, `Expected: success true Actual: "${resultstatus.success}" Error Message: "${resultstatus.errorMessage}`);
|
||||
const taskService = azdata.dataprotocol.getProvider<azdata.TaskServicesProvider>('MSSQL', azdata.DataProviderType.TaskServicesProvider);
|
||||
const tasks = await taskService.getAllTasks({ listActiveTasksOnly: true });
|
||||
let foundTask: azdata.TaskInfo;
|
||||
tasks.tasks.forEach(t => {
|
||||
if (t.serverName === server && t.databaseName === database && t.taskExecutionMode === azdata.TaskExecutionMode.script) {
|
||||
foundTask = t;
|
||||
}
|
||||
});
|
||||
assert(foundTask, 'Could not find Script task');
|
||||
assert(foundTask.isCancelable, 'The task should be cancellable');
|
||||
|
||||
if (foundTask.status !== azdata.TaskStatus.Succeeded) {
|
||||
// wait for all tasks completion before exiting test and cleaning up db otherwise tasks fail
|
||||
let retry = 10;
|
||||
let allCompleted = false;
|
||||
while (retry > 0 && !allCompleted) {
|
||||
retry--;
|
||||
await utils.sleep(1000);
|
||||
allCompleted = true;
|
||||
let tasks = await taskService.getAllTasks({ listActiveTasksOnly: true });
|
||||
tasks.tasks.forEach(t => {
|
||||
if (t.status !== azdata.TaskStatus.Succeeded) {
|
||||
allCompleted = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
// TODO: add proper validation for task completion to ensure all tasks successfully complete before exiting test
|
||||
assert(tasks !== null && tasks.tasks.length > 0, 'Tasks should still show in list. This is to ensure that the tasks actually complete.');
|
||||
}
|
||||
}
|
||||
171
extensions/integration-tests/src/test/testConfig.ts
Normal file
171
extensions/integration-tests/src/test/testConfig.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
/*
|
||||
TODO: Due to a runtime error, I duplicated this file at these 2 locations:
|
||||
$/extensions/integration-test/src/testConfig.ts
|
||||
$/test/smoke/src/sql/testConfig.ts
|
||||
for now, make sure to keep both files in sync.
|
||||
*/
|
||||
|
||||
interface ITestServerProfile {
|
||||
serverName: string;
|
||||
userName: string;
|
||||
password: string;
|
||||
authenticationType: AuthenticationType;
|
||||
database: string;
|
||||
provider: ConnectionProvider;
|
||||
version: string;
|
||||
engineType: EngineType;
|
||||
}
|
||||
|
||||
interface INameDisplayNamePair {
|
||||
name: string;
|
||||
displayName: string;
|
||||
}
|
||||
|
||||
export enum AuthenticationType {
|
||||
Windows,
|
||||
SqlLogin
|
||||
}
|
||||
|
||||
export enum ConnectionProvider {
|
||||
SQLServer
|
||||
}
|
||||
|
||||
export enum EngineType {
|
||||
Standalone,
|
||||
Azure,
|
||||
BigDataCluster
|
||||
}
|
||||
|
||||
let connectionProviderMapping: { [key: string]: { name: string; displayName: string } } = {};
|
||||
let authenticationTypeMapping: { [key: string]: { name: string; displayName: string } } = {};
|
||||
connectionProviderMapping[ConnectionProvider.SQLServer] = { name: 'MSSQL', displayName: 'Microsoft SQL Server' };
|
||||
|
||||
authenticationTypeMapping[AuthenticationType.SqlLogin] = { name: 'SqlLogin', displayName: 'SQL Login' };
|
||||
authenticationTypeMapping[AuthenticationType.Windows] = { name: 'Integrated', displayName: 'Windows Authentication' };
|
||||
|
||||
export function getConfigValue(name: string): string {
|
||||
let configValue = process.env[name];
|
||||
return configValue ? configValue.toString() : '';
|
||||
}
|
||||
|
||||
export const EnvironmentVariable_BDC_SERVER: string = 'BDC_BACKEND_HOSTNAME';
|
||||
export const EnvironmentVariable_BDC_USERNAME: string = 'BDC_BACKEND_USERNAME';
|
||||
export const EnvironmentVariable_BDC_PASSWORD: string = 'BDC_BACKEND_PWD';
|
||||
export const EnvironmentVariable_STANDALONE_SERVER: string = 'STANDALONE_SQL';
|
||||
export const EnvironmentVariable_STANDALONE_USERNAME: string = 'STANDALONE_SQL_USERNAME';
|
||||
export const EnvironmentVariable_STANDALONE_PASSWORD: string = 'STANDALONE_SQL_PWD';
|
||||
export const EnvironmentVariable_AZURE_SERVER: string = 'AZURE_SQL';
|
||||
export const EnvironmentVariable_AZURE_USERNAME: string = 'AZURE_SQL_USERNAME';
|
||||
export const EnvironmentVariable_AZURE_PASSWORD: string = 'AZURE_SQL_PWD';
|
||||
export const EnvironmentVariable_PYTHON_PATH: string = 'PYTHON_TEST_PATH';
|
||||
export const EnvironmentVariable_STANDALONE_SERVER_2019: string = 'STANDALONE_SQL_2019';
|
||||
export const EnvironmentVariable_STANDALONE_USERNAME_2019: string = 'STANDALONE_SQL_USERNAME_2019';
|
||||
export const EnvironmentVariable_STANDALONE_PASSWORD_2019: string = 'STANDALONE_SQL_PWD_2019';
|
||||
|
||||
export interface TestConnectionInfo {
|
||||
readonly serverName: string;
|
||||
readonly database: string;
|
||||
readonly userName: string;
|
||||
readonly password: string;
|
||||
readonly providerName: string;
|
||||
readonly authenticationTypeName: string;
|
||||
}
|
||||
export class TestServerProfile implements TestConnectionInfo {
|
||||
constructor(private _profile: ITestServerProfile) { }
|
||||
public get serverName(): string { return this._profile.serverName; }
|
||||
public get userName(): string { return this._profile.userName; }
|
||||
public get password(): string { return this._profile.password; }
|
||||
public get database(): string { return this._profile.database; }
|
||||
public get version(): string { return this._profile.version; }
|
||||
public get provider(): ConnectionProvider { return this._profile.provider; }
|
||||
public get providerName(): string { return getEnumMappingEntry(connectionProviderMapping, this.provider).name; }
|
||||
public get providerDisplayName(): string { return getEnumMappingEntry(connectionProviderMapping, this.provider).displayName; }
|
||||
public get authenticationType(): AuthenticationType { return this._profile.authenticationType; }
|
||||
public get authenticationTypeName(): string { return getEnumMappingEntry(authenticationTypeMapping, this.authenticationType).name; }
|
||||
public get authenticationTypeDisplayName(): string { return getEnumMappingEntry(authenticationTypeMapping, this.authenticationType).displayName; }
|
||||
public get engineType(): EngineType { return this._profile.engineType; }
|
||||
}
|
||||
|
||||
let TestingServers: TestServerProfile[] = [
|
||||
new TestServerProfile(
|
||||
{
|
||||
serverName: getConfigValue(EnvironmentVariable_STANDALONE_SERVER),
|
||||
userName: getConfigValue(EnvironmentVariable_STANDALONE_USERNAME),
|
||||
password: getConfigValue(EnvironmentVariable_STANDALONE_PASSWORD),
|
||||
authenticationType: AuthenticationType.SqlLogin,
|
||||
database: 'master',
|
||||
provider: ConnectionProvider.SQLServer,
|
||||
version: '2017',
|
||||
engineType: EngineType.Standalone
|
||||
}),
|
||||
new TestServerProfile(
|
||||
{
|
||||
serverName: getConfigValue(EnvironmentVariable_AZURE_SERVER),
|
||||
userName: getConfigValue(EnvironmentVariable_AZURE_USERNAME),
|
||||
password: getConfigValue(EnvironmentVariable_AZURE_PASSWORD),
|
||||
authenticationType: AuthenticationType.SqlLogin,
|
||||
database: 'master',
|
||||
provider: ConnectionProvider.SQLServer,
|
||||
version: '2012',
|
||||
engineType: EngineType.Azure
|
||||
}),
|
||||
new TestServerProfile(
|
||||
{
|
||||
serverName: getConfigValue(EnvironmentVariable_BDC_SERVER),
|
||||
userName: getConfigValue(EnvironmentVariable_BDC_USERNAME),
|
||||
password: getConfigValue(EnvironmentVariable_BDC_PASSWORD),
|
||||
authenticationType: AuthenticationType.SqlLogin,
|
||||
database: 'master',
|
||||
provider: ConnectionProvider.SQLServer,
|
||||
version: '2019',
|
||||
engineType: EngineType.BigDataCluster
|
||||
}),
|
||||
new TestServerProfile(
|
||||
{
|
||||
serverName: getConfigValue(EnvironmentVariable_STANDALONE_SERVER_2019),
|
||||
userName: getConfigValue(EnvironmentVariable_STANDALONE_USERNAME_2019),
|
||||
password: getConfigValue(EnvironmentVariable_STANDALONE_PASSWORD_2019),
|
||||
authenticationType: AuthenticationType.SqlLogin,
|
||||
database: 'master',
|
||||
provider: ConnectionProvider.SQLServer,
|
||||
version: '2019',
|
||||
engineType: EngineType.Standalone
|
||||
})
|
||||
];
|
||||
|
||||
function getEnumMappingEntry(mapping: any, enumValue: any): INameDisplayNamePair {
|
||||
let entry = mapping[enumValue];
|
||||
if (entry) {
|
||||
return entry;
|
||||
} else {
|
||||
throw new Error(`Unknown enum type: ${enumValue.toString()}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getAzureServer(): Promise<TestServerProfile> {
|
||||
let servers = await getTestingServers();
|
||||
return servers.filter(s => s.engineType === EngineType.Azure)[0];
|
||||
}
|
||||
|
||||
export async function getStandaloneServer(version: '2017' | '2019' = '2017'): Promise<TestServerProfile> {
|
||||
let servers = await getTestingServers();
|
||||
return servers.filter(s => s.version === version && s.engineType === EngineType.Standalone)[0];
|
||||
}
|
||||
|
||||
export async function getBdcServer(): Promise<TestServerProfile> {
|
||||
let servers = await getTestingServers();
|
||||
return servers.filter(s => s.version === '2019' && s.engineType === EngineType.BigDataCluster)[0];
|
||||
}
|
||||
|
||||
export async function getTestingServers(): Promise<TestServerProfile[]> {
|
||||
let promise = new Promise<TestServerProfile[]>(resolve => {
|
||||
resolve(TestingServers);
|
||||
});
|
||||
await promise;
|
||||
return promise;
|
||||
}
|
||||
294
extensions/integration-tests/src/test/utils.ts
Normal file
294
extensions/integration-tests/src/test/utils.ts
Normal file
@@ -0,0 +1,294 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import * as fs from 'fs';
|
||||
import { TestServerProfile, TestConnectionInfo } from './testConfig';
|
||||
import { isNullOrUndefined, promisify } from 'util';
|
||||
|
||||
// default server connection timeout
|
||||
export const DefaultConnectTimeoutInMs: number = 10000;
|
||||
|
||||
/**
|
||||
* @param connectionInfo test connection profile
|
||||
* @param timeout optional timeout parameter
|
||||
* Returns connection id for a new connection
|
||||
*/
|
||||
export async function connectToServer(connectionInfo: TestConnectionInfo, timeout: number = DefaultConnectTimeoutInMs): Promise<string> {
|
||||
let connectionProfile: azdata.IConnectionProfile = {
|
||||
serverName: connectionInfo.serverName,
|
||||
databaseName: connectionInfo.database,
|
||||
authenticationType: connectionInfo.authenticationTypeName,
|
||||
providerName: connectionInfo.providerName,
|
||||
connectionName: '',
|
||||
userName: connectionInfo.userName,
|
||||
password: connectionInfo.password,
|
||||
savePassword: false,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: undefined,
|
||||
groupId: undefined,
|
||||
options: {}
|
||||
};
|
||||
await ensureConnectionViewOpened();
|
||||
let result = <azdata.ConnectionResult>await azdata.connection.connect(connectionProfile);
|
||||
assert(result.connected, `Failed to connect to "${connectionProfile.serverName}", error code: ${result.errorCode}, error message: ${result.errorMessage}`);
|
||||
|
||||
//workaround
|
||||
//wait for OE to load
|
||||
await pollTimeout(async () => {
|
||||
const nodes = await azdata.objectexplorer.getActiveConnectionNodes();
|
||||
let found = nodes.some(node => {
|
||||
return node.connectionId === result.connectionId;
|
||||
});
|
||||
if (found === undefined) {
|
||||
found = false;
|
||||
}
|
||||
return found;
|
||||
}, 1000, timeout);
|
||||
|
||||
return result.connectionId;
|
||||
}
|
||||
|
||||
export class PromiseCancelledError extends Error { }
|
||||
/**
|
||||
* Wait for a promise to resolve but timeout after a certain amount of time.
|
||||
* It will throw CancelledError when it fails.
|
||||
* @param p promise to wait on
|
||||
* @param timeout time to wait
|
||||
*/
|
||||
export async function asyncTimeout<T>(p: Thenable<T>, timeout: number): Promise<(T | undefined)> {
|
||||
const timeoutPromise = new Promise<T>((done, reject) => {
|
||||
setTimeout(() => {
|
||||
reject(new PromiseCancelledError('Promise did not resolve in time'));
|
||||
}, timeout);
|
||||
});
|
||||
|
||||
return Promise.race([p, timeoutPromise]);
|
||||
}
|
||||
|
||||
export async function pollTimeout(predicate: () => Thenable<boolean>, intervalDelay: number, timeoutTime: number): Promise<boolean> {
|
||||
let interval: NodeJS.Timer;
|
||||
return new Promise(pollOver => {
|
||||
const complete = (success = false) => {
|
||||
clearInterval(interval);
|
||||
pollOver(success);
|
||||
};
|
||||
interval = setInterval(async () => {
|
||||
const predResult = await predicate();
|
||||
if (predResult) {
|
||||
complete(true);
|
||||
}
|
||||
}, intervalDelay);
|
||||
setTimeout(complete, timeoutTime);
|
||||
});
|
||||
}
|
||||
|
||||
export async function ensureConnectionViewOpened() {
|
||||
await vscode.commands.executeCommand('dataExplorer.servers.focus');
|
||||
}
|
||||
|
||||
export async function sleep(ms: number): Promise<{}> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export async function createDB(dbName: string, ownerUri: string): Promise<void> {
|
||||
let query = `BEGIN TRY
|
||||
CREATE DATABASE ${dbName}
|
||||
SELECT 1 AS NoError
|
||||
END TRY
|
||||
BEGIN CATCH
|
||||
SELECT ERROR_MESSAGE() AS ErrorMessage;
|
||||
END CATCH`;
|
||||
|
||||
let dbCreatedResult = await runQuery(query, ownerUri);
|
||||
assert(dbCreatedResult.columnInfo[0].columnName !== 'ErrorMessage', 'DB creation threw error');
|
||||
}
|
||||
|
||||
export async function deleteDB(server: TestServerProfile, dbName: string, ownerUri: string): Promise<void> {
|
||||
let query = `BEGIN TRY
|
||||
ALTER DATABASE ${dbName}
|
||||
SET OFFLINE
|
||||
WITH ROLLBACK IMMEDIATE
|
||||
DROP DATABASE ${dbName}
|
||||
SELECT 1 AS NoError
|
||||
END TRY
|
||||
BEGIN CATCH
|
||||
SELECT ERROR_MESSAGE() AS ErrorMessage;
|
||||
END CATCH`;
|
||||
|
||||
ownerUri = await ensureServerConnected(server, ownerUri);
|
||||
let dbDeleteResult = await runQuery(query, ownerUri);
|
||||
assert(dbDeleteResult.columnInfo[0].columnName !== 'ErrorMessage', 'DB deletion threw error');
|
||||
}
|
||||
|
||||
async function ensureServerConnected(server: TestServerProfile, ownerUri: string): Promise<string> {
|
||||
try {
|
||||
// The queries might fail if connection is removed
|
||||
// Check if connection is present - if not create new connection and use OwnerUri from there
|
||||
let connection = await azdata.connection.getConnection(ownerUri);
|
||||
if (isNullOrUndefined(connection)) {
|
||||
let connectionId = await connectToServer(server);
|
||||
return azdata.connection.getUriForConnection(connectionId);
|
||||
}
|
||||
}
|
||||
catch (ex) {
|
||||
console.error('utils.ensureServerConnected : Failed to get or create connection');
|
||||
console.error(ex); // not throwing here because it is a safety net and actual query will throw if failed.
|
||||
}
|
||||
return ownerUri;
|
||||
}
|
||||
|
||||
|
||||
export async function runQuery(query: string, ownerUri: string): Promise<azdata.SimpleExecuteResult> {
|
||||
try {
|
||||
let queryProvider = azdata.dataprotocol.getProvider<azdata.QueryProvider>('MSSQL', azdata.DataProviderType.QueryProvider);
|
||||
let result = await queryProvider.runQueryAndReturn(ownerUri, query);
|
||||
return result;
|
||||
}
|
||||
catch (ex) {
|
||||
console.error('utils.runQuery : Failed to run query');
|
||||
console.error(ex);
|
||||
throw ex;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export async function assertThrowsAsync(fn: () => Promise<any>, msg: string): Promise<void> {
|
||||
let f = () => {
|
||||
// Empty
|
||||
};
|
||||
try {
|
||||
await fn();
|
||||
} catch (e) {
|
||||
f = () => { throw e; };
|
||||
} finally {
|
||||
assert.throws(f, msg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param databaseName name of database to check for
|
||||
* @param ownerUri owner uri
|
||||
* @param retryCount number of times to retry with a 5 second wait between each try
|
||||
* Checks for database getting created for operations that have async database creation
|
||||
*/
|
||||
export async function assertDatabaseCreationResult(databaseName: string, ownerUri: string, retryCount: number): Promise<void> {
|
||||
let result: azdata.SimpleExecuteResult;
|
||||
while (retryCount > 0) {
|
||||
--retryCount;
|
||||
// add state=0 to the query to make sure the database is online
|
||||
const query = `BEGIN TRY
|
||||
SELECT name FROM sys.databases WHERE name='${databaseName}' AND state=0
|
||||
END TRY
|
||||
BEGIN CATCH
|
||||
SELECT ERROR_MESSAGE() AS ErrorMessage;
|
||||
END CATCH`;
|
||||
try {
|
||||
result = await runQuery(query, ownerUri);
|
||||
if (result.rowCount > 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// exception will be thrown by the SQL Tools Service if no results is returned
|
||||
// ignore it.
|
||||
}
|
||||
|
||||
await sleep(5000);
|
||||
}
|
||||
|
||||
assert(result.rowCount === 1, `Database ${databaseName} should be created`);
|
||||
assert(result.columnInfo[0].columnName !== 'ErrorMessage', 'Checking for db creation threw error');
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param filepath File path to check for
|
||||
* @param retryCount number of times to retry with a 5 second wait between each try
|
||||
* Checks for file getting created for async file generation and deletes file
|
||||
*/
|
||||
export async function assertFileGenerationResult(filepath: string, retryCount: number): Promise<void> {
|
||||
let exists = false;
|
||||
while (retryCount > 0 && !exists) {
|
||||
--retryCount;
|
||||
exists = await promisify(fs.exists)(filepath);
|
||||
await sleep(5000);
|
||||
}
|
||||
|
||||
assert(exists, `File ${filepath} is expected to be present`);
|
||||
assert((await fs.promises.readFile(filepath)).byteLength > 0, 'File ${filepath} should not be empty');
|
||||
await fs.promises.unlink(filepath);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param tableName table to look for
|
||||
* @param schema schema to look for
|
||||
* @param ownerUri owner uri
|
||||
* @param retryCount number of times to retry with a 5 second wait between each try
|
||||
* @param checkForData whether or not to check if the table has data
|
||||
* Checks for table existing
|
||||
*/
|
||||
export async function assertTableCreationResult(schema: string, tableName: string, ownerUri: string, retryCount: number, checkForData?: boolean): Promise<void> {
|
||||
let result: azdata.SimpleExecuteResult;
|
||||
while (retryCount > 0) {
|
||||
--retryCount;
|
||||
let query = `BEGIN TRY
|
||||
SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '${schema}' AND TABLE_NAME = '${tableName}'
|
||||
END TRY
|
||||
BEGIN CATCH
|
||||
SELECT ERROR_MESSAGE() AS ErrorMessage;
|
||||
END CATCH`;
|
||||
result = await runQuery(query, ownerUri);
|
||||
if (result.rowCount > 0) {
|
||||
break;
|
||||
}
|
||||
await sleep(5000);
|
||||
}
|
||||
|
||||
assert(result.rowCount === 1, `Table ${tableName} should be created. ${result.rowCount} rows were found`);
|
||||
assert(result.columnInfo[0].columnName !== 'ErrorMessage', `Checking for table creation threw error ${result.rows[0][0].displayValue}`);
|
||||
|
||||
if (checkForData) {
|
||||
while (retryCount > 0) {
|
||||
let query = `BEGIN TRY
|
||||
SELECT * FROM ${tableName}
|
||||
END TRY
|
||||
BEGIN CATCH
|
||||
SELECT ERROR_MESSAGE() AS ErrorMessage;
|
||||
END CATCH`;
|
||||
result = await runQuery(query, ownerUri);
|
||||
if (result.rowCount > 0) {
|
||||
break;
|
||||
}
|
||||
await sleep(5000);
|
||||
}
|
||||
|
||||
assert(result.rowCount > 0, `Table ${tableName} should have at least one row of data. ${result.rowCount} rows were found`);
|
||||
assert(result.columnInfo[0].columnName !== 'ErrorMessage', `Checking for table creation threw error ${result.rows[0][0].displayValue}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function testServerProfileToIConnectionProfile(serverProfile: TestServerProfile): azdata.IConnectionProfile {
|
||||
return {
|
||||
serverName: serverProfile.serverName,
|
||||
databaseName: serverProfile.database,
|
||||
authenticationType: serverProfile.authenticationTypeName,
|
||||
providerName: serverProfile.providerName,
|
||||
connectionName: '',
|
||||
userName: serverProfile.userName,
|
||||
password: serverProfile.password,
|
||||
savePassword: false,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: undefined,
|
||||
groupId: undefined,
|
||||
options: {}
|
||||
};
|
||||
}
|
||||
108
extensions/integration-tests/src/test/uuid.ts
Normal file
108
extensions/integration-tests/src/test/uuid.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
/**
|
||||
* Represents a UUID as defined by rfc4122.
|
||||
*/
|
||||
export interface UUID {
|
||||
|
||||
/**
|
||||
* @returns the canonical representation in sets of hexadecimal numbers separated by dashes.
|
||||
*/
|
||||
asHex(): string;
|
||||
}
|
||||
|
||||
class ValueUUID implements UUID {
|
||||
|
||||
constructor(public _value: string) {
|
||||
// empty
|
||||
}
|
||||
|
||||
public asHex(): string {
|
||||
return this._value;
|
||||
}
|
||||
}
|
||||
|
||||
class V4UUID extends ValueUUID {
|
||||
|
||||
private static readonly _chars = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'];
|
||||
|
||||
private static readonly _timeHighBits = ['8', '9', 'a', 'b'];
|
||||
|
||||
private static _oneOf(array: string[]): string {
|
||||
return array[Math.floor(array.length * Math.random())];
|
||||
}
|
||||
|
||||
private static _randomHex(): string {
|
||||
return V4UUID._oneOf(V4UUID._chars);
|
||||
}
|
||||
|
||||
constructor() {
|
||||
super([
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
'-',
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
'-',
|
||||
'4',
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
'-',
|
||||
V4UUID._oneOf(V4UUID._timeHighBits),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
'-',
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
V4UUID._randomHex(),
|
||||
].join(''));
|
||||
}
|
||||
}
|
||||
|
||||
export function v4(): UUID {
|
||||
return new V4UUID();
|
||||
}
|
||||
|
||||
const _UUIDPattern = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
|
||||
|
||||
export function isUUID(value: string): boolean {
|
||||
return _UUIDPattern.test(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a UUID that is of the format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx.
|
||||
* @param value A uuid string.
|
||||
*/
|
||||
export function parse(value: string): UUID {
|
||||
if (!isUUID(value)) {
|
||||
throw new Error('invalid uuid');
|
||||
}
|
||||
|
||||
return new ValueUUID(value);
|
||||
}
|
||||
|
||||
export function generateUuid(): string {
|
||||
return v4().asHex();
|
||||
}
|
||||
Reference in New Issue
Block a user