Unit Test Cleanup (#141)

This is a fairly large set of changes to the unit tests that help isolate the effectiveness of the unit tests.

* Unit tests for query execution have been split into separate files for different classes.
* Unit tests have been added for the ResultSet class which previously did not have tests
* The InMemoryStreamWrapper has been improved to share memory, creating a simulated filesystem
* Creating a mock ConnectionService to decrease noisy exceptions and prevent "row stealing". Unfortunately this lowers code coverage. However, since the tests that touched the connection service were not really testing it, this helps keep us honest. But it will require adding more unit tests for connection service.
* Standardizing the await mechanism for query execution
* Cleaning up the mechanism for getting WorkspaceService mocks and mock FileStreamFactories

* Refactor the query execution tests into their own files

* Removing tests from ExecuteTests.cs that were moved to separate files

* Adding tests for ResultSet class

* Adding test for the FOR XML/JSON component of the resultset class

* Setting up shared storage between file stream readers/writers

* Standardizing on Workspace mocking, awaiting execution completion

* Adding comment for ResultSet class
This commit is contained in:
Benjamin Russell
2016-11-10 11:42:31 -08:00
committed by GitHub
parent 9ff9a02932
commit ec94d986a8
16 changed files with 1171 additions and 1069 deletions

View File

@@ -8,7 +8,6 @@ using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Data.Common;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
@@ -17,6 +16,10 @@ using Microsoft.SqlTools.ServiceLayer.Utility;
namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
{
/// <summary>
/// Class that represents a resultset the was generated from a query. Contains logic for
/// storing and retrieving results. Is contained by a Batch class.
/// </summary>
public class ResultSet : IDisposable
{
#region Constants
@@ -35,11 +38,21 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
#region Member Variables
/// <summary>
/// The reader to use for this resultset
/// </summary>
private readonly StorageDataReader dataReader;
/// <summary>
/// For IDisposable pattern, whether or not object has been disposed
/// </summary>
private bool disposed;
/// <summary>
/// A list of offsets into the buffer file that correspond to where rows start
/// </summary>
private readonly LongList<long> fileOffsets;
/// <summary>
/// The factory to use to get reading/writing handlers
/// </summary>
@@ -63,12 +76,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
/// <summary>
/// Whether the resultSet is in the process of being disposed
/// </summary>
private bool isBeingDisposed;
/// <summary>
/// All save tasks currently saving this ResultSet
/// </summary>
private ConcurrentDictionary<string, Task> saveTasks;
private readonly ConcurrentDictionary<string, Task> saveTasks;
#endregion
@@ -82,11 +90,11 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
// Sanity check to make sure we got a reader
Validate.IsNotNull(nameof(reader), SR.QueryServiceResultSetReaderNull);
DataReader = new StorageDataReader(reader);
dataReader = new StorageDataReader(reader);
// Initialize the storage
outputFileName = factory.CreateFile();
FileOffsets = new LongList<long>();
fileOffsets = new LongList<long>();
// Store the factory
fileStreamFactory = factory;
@@ -100,29 +108,13 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
/// Whether the resultSet is in the process of being disposed
/// </summary>
/// <returns></returns>
internal bool IsBeingDisposed
{
get
{
return isBeingDisposed;
}
}
internal bool IsBeingDisposed { get; private set; }
/// <summary>
/// The columns for this result set
/// </summary>
public DbColumnWrapper[] Columns { get; private set; }
/// <summary>
/// The reader to use for this resultset
/// </summary>
private StorageDataReader DataReader { get; set; }
/// <summary>
/// A list of offsets into the buffer file that correspond to where rows start
/// </summary>
private LongList<long> FileOffsets { get; set; }
/// <summary>
/// Maximum number of characters to store for a field
/// </summary>
@@ -178,14 +170,14 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
if (isSingleColumnXmlJsonResultSet)
{
// Iterate over all the rows and process them into a list of string builders
IEnumerable<string> rowValues = FileOffsets.Select(rowOffset => fileStreamReader.ReadRow(rowOffset, Columns)[0].DisplayValue);
IEnumerable<string> rowValues = fileOffsets.Select(rowOffset => fileStreamReader.ReadRow(rowOffset, Columns)[0].DisplayValue);
rows = new[] { new[] { string.Join(string.Empty, rowValues) } };
}
else
{
// Figure out which rows we need to read back
IEnumerable<long> rowOffsets = FileOffsets.Skip(startRow).Take(rowCount);
IEnumerable<long> rowOffsets = fileOffsets.Skip(startRow).Take(rowCount);
// Iterate over the rows we need and process them into output
rows = rowOffsets.Select(rowOffset =>
@@ -216,18 +208,22 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
using (IFileStreamWriter fileWriter = fileStreamFactory.GetWriter(outputFileName, MaxCharsToStore, MaxXmlCharsToStore))
{
// If we can initialize the columns using the column schema, use that
if (!DataReader.DbDataReader.CanGetColumnSchema())
if (!dataReader.DbDataReader.CanGetColumnSchema())
{
throw new InvalidOperationException(SR.QueryServiceResultSetNoColumnSchema);
}
Columns = DataReader.Columns;
long currentFileOffset = 0;
Columns = dataReader.Columns;
while (await DataReader.ReadAsync(cancellationToken))
long currentFileOffset = 0;
while (await dataReader.ReadAsync(cancellationToken))
{
// Store the beginning of the row
long rowStart = currentFileOffset;
currentFileOffset += fileWriter.WriteRow(dataReader);
// Add the row to the list of rows we have only if the row was successfully written
RowCount++;
FileOffsets.Add(currentFileOffset);
currentFileOffset += fileWriter.WriteRow(DataReader);
fileOffsets.Add(rowStart);
}
}
// Check if resultset is 'for xml/json'. If it is, set isJson/isXml value in column metadata
@@ -251,7 +247,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
return;
}
isBeingDisposed = true;
IsBeingDisposed = true;
// Check if saveTasks are running for this ResultSet
if (!saveTasks.IsEmpty)
{
@@ -263,7 +259,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
fileStreamFactory.DisposeFile(outputFileName);
}
disposed = true;
isBeingDisposed = false;
IsBeingDisposed = false;
});
}
else
@@ -274,7 +270,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
fileStreamFactory.DisposeFile(outputFileName);
}
disposed = true;
isBeingDisposed = false;
IsBeingDisposed = false;
}
}