Add serialization service support (#840)

Added a method that handles serialization requests
Support splitting save over multiple requests to reduce overall message size
Added unit tests
String changes used a new version of the string tool for generation.
Will publish PR separately for the changes to build & localization
so this can run on Mac without .Net Core 1.0
This commit is contained in:
Kevin Cunnane
2019-08-06 16:50:42 -07:00
committed by GitHub
parent 92bb281cdd
commit 7ef82feea7
18 changed files with 7649 additions and 16794 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -139,6 +139,13 @@ QueryServiceResultSetNoColumnSchema = Could not retrieve column schema for resul
QueryServiceExecutionPlanNotFound = Could not retrieve an execution plan from the result set
############################################################################
# Serialization Service
SerializationServiceUnsupportedFormat (string formatName) = Unsupported Save Format: {0}
SerializationServiceRequestInProgress (string filePath) = A request for file {0} is already in progress
SerializationServiceRequestNotFound (string filePath) = Cannot serialize more data as no request for file {0} could be found
############################################################################
# Language Service
@@ -798,4 +805,4 @@ ExtractInvalidVersion = Invalid version '{0}' passed. Version must be in the for
PublishChangesTaskName = Apply schema compare changes
SchemaCompareExcludeIncludeNodeNotFound = Failed to find the specified change in the model
OpenScmpConnectionBasedModelParsingError = Error encountered while trying to parse connection information for endpoint '{0}' with error message '{1}'
SchemaCompareSessionNotFound = Could not find the schema compare session to cancel
SchemaCompareSessionNotFound = Could not find the schema compare session to cancel

View File

@@ -2006,6 +2006,24 @@
<target state="new">Could not find the schema compare session to cancel</target>
<note></note>
</trans-unit>
<trans-unit id="SerializationServiceUnsupportedFormat">
<source>Unsupported Save Format: {0}</source>
<target state="new">Unsupported Save Format: {0}</target>
<note>.
Parameters: 0 - formatName (string) </note>
</trans-unit>
<trans-unit id="SerializationServiceRequestInProgress">
<source>A request for file {0} is already in progress</source>
<target state="new">A request for file {0} is already in progress</target>
<note>.
Parameters: 0 - filePath (string) </note>
</trans-unit>
<trans-unit id="SerializationServiceRequestNotFound">
<source>Cannot serialize more data as no request for file {0} could be found</source>
<target state="new">Cannot serialize more data as no request for file {0} could be found</target>
<note>.
Parameters: 0 - filePath (string) </note>
</trans-unit>
</body>
</file>
</xliff>

View File

@@ -3,6 +3,7 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using System.Data.Common;
@@ -93,104 +94,8 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts
DataType = column.DataType;
DataTypeName = column.DataTypeName.ToLowerInvariant();
// Determine the SqlDbType
SqlDbType type;
if (Enum.TryParse(DataTypeName, true, out type))
{
SqlDbType = type;
}
else
{
switch (DataTypeName)
{
case "numeric":
SqlDbType = SqlDbType.Decimal;
break;
case "sql_variant":
SqlDbType = SqlDbType.Variant;
break;
case "timestamp":
SqlDbType = SqlDbType.VarBinary;
break;
case "sysname":
SqlDbType = SqlDbType.NVarChar;
break;
default:
SqlDbType = DataTypeName.EndsWith(".sys.hierarchyid") ? SqlDbType.NVarChar : SqlDbType.Udt;
break;
}
}
// We want the display name for the column to always exist
ColumnName = string.IsNullOrEmpty(column.ColumnName)
? SR.QueryServiceColumnNull
: column.ColumnName;
switch (DataTypeName)
{
case "varchar":
case "nvarchar":
IsChars = true;
Debug.Assert(ColumnSize.HasValue);
if (ColumnSize.Value == int.MaxValue)
{
//For Yukon, special case nvarchar(max) with column name == "Microsoft SQL Server 2005 XML Showplan" -
//assume it is an XML showplan.
//Please note this field must be in sync with a similar field defined in QESQLBatch.cs.
//This is not the best fix that we could do but we are trying to minimize code impact
//at this point. Post Yukon we should review this code again and avoid
//hard-coding special column name in multiple places.
const string yukonXmlShowPlanColumn = "Microsoft SQL Server 2005 XML Showplan";
if (column.ColumnName == yukonXmlShowPlanColumn)
{
// Indicate that this is xml to apply the right size limit
// Note we leave chars type as well to use the right retrieval mechanism.
IsXml = true;
}
IsLong = true;
}
break;
case "text":
case "ntext":
IsChars = true;
IsLong = true;
break;
case "xml":
IsXml = true;
IsLong = true;
break;
case "binary":
case "image":
IsBytes = true;
IsLong = true;
break;
case "varbinary":
case "rowversion":
IsBytes = true;
Debug.Assert(ColumnSize.HasValue);
if (ColumnSize.Value == int.MaxValue)
{
IsLong = true;
}
break;
case "sql_variant":
IsSqlVariant = true;
break;
default:
if (!AllServerDataTypes.Contains(DataTypeName))
{
// treat all UDT's as long/bytes data types to prevent the CLR from attempting
// to load the UDT assembly into our process to call ToString() on the object.
IsUdt = true;
IsBytes = true;
IsLong = true;
}
break;
}
DetermineSqlDbType();
AddNameAndDataFields(column.ColumnName);
if (IsUdt)
{
@@ -215,6 +120,19 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts
}
}
public DbColumnWrapper(ColumnInfo columnInfo)
{
DataTypeName = columnInfo.DataTypeName.ToLowerInvariant();
DetermineSqlDbType();
DataType = TypeConvertor.ToNetType(this.SqlDbType);
if (DataType == typeof(String))
{
this.ColumnSize = int.MaxValue;
}
AddNameAndDataFields(columnInfo.Name);
}
/// <summary>
/// Default constructor, used for deserializing JSON RPC only
/// </summary>
@@ -299,5 +217,176 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts
#endregion
private void DetermineSqlDbType()
{
// Determine the SqlDbType
SqlDbType type;
if (Enum.TryParse(DataTypeName, true, out type))
{
SqlDbType = type;
}
else
{
switch (DataTypeName)
{
case "numeric":
SqlDbType = SqlDbType.Decimal;
break;
case "sql_variant":
SqlDbType = SqlDbType.Variant;
break;
case "timestamp":
SqlDbType = SqlDbType.VarBinary;
break;
case "sysname":
SqlDbType = SqlDbType.NVarChar;
break;
default:
SqlDbType = DataTypeName.EndsWith(".sys.hierarchyid") ? SqlDbType.NVarChar : SqlDbType.Udt;
break;
}
}
}
private void AddNameAndDataFields(string columnName)
{
// We want the display name for the column to always exist
ColumnName = string.IsNullOrEmpty(columnName)
? SR.QueryServiceColumnNull
: columnName;
switch (DataTypeName)
{
case "varchar":
case "nvarchar":
IsChars = true;
Debug.Assert(ColumnSize.HasValue);
if (ColumnSize.Value == int.MaxValue)
{
//For Yukon, special case nvarchar(max) with column name == "Microsoft SQL Server 2005 XML Showplan" -
//assume it is an XML showplan.
//Please note this field must be in sync with a similar field defined in QESQLBatch.cs.
//This is not the best fix that we could do but we are trying to minimize code impact
//at this point. Post Yukon we should review this code again and avoid
//hard-coding special column name in multiple places.
const string yukonXmlShowPlanColumn = "Microsoft SQL Server 2005 XML Showplan";
if (columnName == yukonXmlShowPlanColumn)
{
// Indicate that this is xml to apply the right size limit
// Note we leave chars type as well to use the right retrieval mechanism.
IsXml = true;
}
IsLong = true;
}
break;
case "text":
case "ntext":
IsChars = true;
IsLong = true;
break;
case "xml":
IsXml = true;
IsLong = true;
break;
case "binary":
case "image":
IsBytes = true;
IsLong = true;
break;
case "varbinary":
case "rowversion":
IsBytes = true;
Debug.Assert(ColumnSize.HasValue);
if (ColumnSize.Value == int.MaxValue)
{
IsLong = true;
}
break;
case "sql_variant":
IsSqlVariant = true;
break;
default:
if (!AllServerDataTypes.Contains(DataTypeName))
{
// treat all UDT's as long/bytes data types to prevent the CLR from attempting
// to load the UDT assembly into our process to call ToString() on the object.
IsUdt = true;
IsBytes = true;
IsLong = true;
}
break;
}
}
}
/// <summary>
/// Convert a base data type to another base data type
/// </summary>
public sealed class TypeConvertor
{
private static Dictionary<SqlDbType,Type> _typeMap = new Dictionary<SqlDbType,Type>();
static TypeConvertor()
{
_typeMap[SqlDbType.BigInt] = typeof(Int64);
_typeMap[SqlDbType.Binary] = typeof(Byte);
_typeMap[SqlDbType.Bit] = typeof(Boolean);
_typeMap[SqlDbType.Char] = typeof(String);
_typeMap[SqlDbType.DateTime] = typeof(DateTime);
_typeMap[SqlDbType.Decimal] = typeof(Decimal);
_typeMap[SqlDbType.Float] = typeof(Double);
_typeMap[SqlDbType.Image] = typeof(Byte[]);
_typeMap[SqlDbType.Int] = typeof(Int32);
_typeMap[SqlDbType.Money] = typeof(Decimal);
_typeMap[SqlDbType.NChar] = typeof(String);
_typeMap[SqlDbType.NChar] = typeof(String);
_typeMap[SqlDbType.NChar] = typeof(String);
_typeMap[SqlDbType.NText] = typeof(String);
_typeMap[SqlDbType.NVarChar] = typeof(String);
_typeMap[SqlDbType.Real] = typeof(Single);
_typeMap[SqlDbType.UniqueIdentifier] = typeof(Guid);
_typeMap[SqlDbType.SmallDateTime] = typeof(DateTime);
_typeMap[SqlDbType.SmallInt] = typeof(Int16);
_typeMap[SqlDbType.SmallMoney] = typeof(Decimal);
_typeMap[SqlDbType.Text] = typeof(String);
_typeMap[SqlDbType.Timestamp] = typeof(Byte[]);
_typeMap[SqlDbType.TinyInt] = typeof(Byte);
_typeMap[SqlDbType.VarBinary] = typeof(Byte[]);
_typeMap[SqlDbType.VarChar] = typeof(String);
_typeMap[SqlDbType.Variant] = typeof(Object);
// Note: treating as string
_typeMap[SqlDbType.Xml] = typeof(String);
_typeMap[SqlDbType.TinyInt] = typeof(Byte);
_typeMap[SqlDbType.TinyInt] = typeof(Byte);
_typeMap[SqlDbType.TinyInt] = typeof(Byte);
_typeMap[SqlDbType.TinyInt] = typeof(Byte);
}
private TypeConvertor()
{
}
/// <summary>
/// Convert TSQL type to .Net data type
/// </summary>
/// <param name="sqlDbType"></param>
/// <returns></returns>
public static Type ToNetType(SqlDbType sqlDbType)
{
Type netType;
if (!_typeMap.TryGetValue(sqlDbType, out netType))
{
netType = typeof(String);
}
return netType;
}
}
}

View File

@@ -3,14 +3,77 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System.Data;
using Microsoft.SqlTools.Hosting.Protocol.Contracts;
using Microsoft.SqlTools.Utility;
namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts
{
public class ColumnInfo
{
/// <summary>
/// Name of this column
/// </summary>
public string Name { get; set; }
public string DataTypeName { get; set; }
public ColumnInfo()
{
}
public ColumnInfo(string name, string dataTypeName)
{
this.Name = name;
this.DataTypeName = dataTypeName;
}
}
public interface ISerializationParams
{
/// <summary>
/// Path to file that the serialized results will be stored in
/// </summary>
string FilePath { get; set; }
/// <summary>
/// Results that are to be serialized into 'SaveFormat' format
/// </summary>
DbCellValue[][] Rows { get; set; }
/// <summary>
/// Whether the current set of Rows passed in is the last for this file
// </summary>
bool IsLastBatch { get; set; }
}
/// <summary>
/// Class used for storing results and how the results are to be serialized
/// </summary>
public class SaveResultsInfo
public class SerializeDataContinueRequestParams : ISerializationParams
{
/// <summary>
/// Path to file that the serialized results will be stored in
/// </summary>
public string FilePath { get; set; }
/// <summary>
/// Results that are to be serialized into 'SaveFormat' format
/// </summary>
public DbCellValue[][] Rows { get; set; }
/// <summary>
/// Whether the current set of Rows passed in is the last for this file
// </summary>
public bool IsLastBatch { get; set; }
}
/// <summary>
/// Class used for storing results and how the results are to be serialized
/// </summary>
public class SerializeDataStartRequestParams : GeneralRequestDetails, ISerializationParams
{
/// <summary>
/// String representation of the type that service is supposed to serialize to
@@ -21,37 +84,97 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts
/// <summary>
/// Path to file that the serialized results will be stored in
/// </summary>
public string SavePath { get; set; }
public string FilePath { get; set; }
/// <summary>
/// Results that are to be serialized into 'SaveFormat' format
/// </summary>
public DbCellValue[][] Rows { get; set; }
/// <summary>
/// Whether the current set of Rows passed in is the last for this file
// </summary>
public bool IsLast { get; set; }
public ColumnInfo[] Columns { get; set; }
/// <summary>
/// Whether this is the only request expected for this file.
// </summary>
public bool IsLastBatch { get; set; }
public SerializeDataStartRequestParams()
{
}
/// <summary>
/// Constructor
/// </summary>
public SaveResultsInfo(string saveFormat,
public SerializeDataStartRequestParams(string saveFormat,
string savePath,
DbCellValue[][] rows,
bool isLast)
{
this.SaveFormat = saveFormat;
this.SavePath = savePath;
this.FilePath = savePath;
this.Rows = Rows;
this.IsLast = isLast;
this.IsLastBatch = isLast;
}
internal bool IncludeHeaders
{
get { return this.GetOptionValue<bool>(SerializationOptionsHelper.IncludeHeaders); }
set { this.SetOptionValue<bool>(SerializationOptionsHelper.IncludeHeaders, value); }
}
internal string Delimiter
{
get { return this.GetOptionValue<string>(SerializationOptionsHelper.Delimiter); }
set { this.SetOptionValue<string>(SerializationOptionsHelper.Delimiter, value); }
}
internal string LineSeparator
{
get { return this.GetOptionValue<string>(SerializationOptionsHelper.LineSeparator); }
set { this.SetOptionValue<string>(SerializationOptionsHelper.LineSeparator, value); }
}
internal string TextIdentifier
{
get { return this.GetOptionValue<string>(SerializationOptionsHelper.TextIdentifier); }
set { this.SetOptionValue<string>(SerializationOptionsHelper.TextIdentifier, value); }
}
internal string Encoding
{
get { return this.GetOptionValue<string>(SerializationOptionsHelper.Encoding); }
set { this.SetOptionValue<string>(SerializationOptionsHelper.Encoding, value); }
}
internal bool Formatted
{
get { return this.GetOptionValue<bool>(SerializationOptionsHelper.Formatted); }
set { this.SetOptionValue<bool>(SerializationOptionsHelper.Formatted, value); }
}
}
public class SaveAsRequest
public class SerializeDataResult
{
public static readonly
RequestType<SaveResultsInfo, SaveResultRequestResult> Type =
RequestType<SaveResultsInfo, SaveResultRequestResult>.Create("query/saveAs");
public string Messages { get; set; }
public bool Succeeded { get; set; }
}
public class SerializeStartRequest
{
public static readonly RequestType<SerializeDataStartRequestParams, SerializeDataResult> Type = RequestType<SerializeDataStartRequestParams, SerializeDataResult>.Create("serialize/start");
}
public class SerializeContinueRequest
{
public static readonly RequestType<SerializeDataContinueRequestParams, SerializeDataResult> Type = RequestType<SerializeDataContinueRequestParams, SerializeDataResult>.Create("serialize/continue");
}
class SerializationOptionsHelper
{
internal const string IncludeHeaders = "includeHeaders";
internal const string Delimiter = "delimiter";
internal const string LineSeparator = "lineSeparator";
internal const string TextIdentifier = "textIdentifier";
internal const string Encoding = "encoding";
internal const string Formatted = "formatted";
}
}

View File

@@ -0,0 +1,295 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Composition;
using System.Diagnostics;
using System.Threading.Tasks;
using Microsoft.SqlTools.Extensibility;
using Microsoft.SqlTools.Hosting;
using Microsoft.SqlTools.Hosting.Protocol;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage;
using Microsoft.SqlTools.Utility;
namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
{
[Export(typeof(IHostedService))]
public class SerializationService : HostedService<SerializationService>, IComposableService
{
private ConcurrentDictionary<string, DataSerializer> inProgressSerializations;
public SerializationService()
{
inProgressSerializations = new ConcurrentDictionary<string, DataSerializer>();
}
public override void InitializeService(IProtocolEndpoint serviceHost)
{
Logger.Write(TraceEventType.Verbose, "SerializationService initialized");
serviceHost.SetRequestHandler(SerializeStartRequest.Type, HandleSerializeStartRequest);
serviceHost.SetRequestHandler(SerializeContinueRequest.Type, HandleSerializeContinueRequest);
}
/// <summary>
/// Begin to process request to save a resultSet to a file in CSV format
/// </summary>
internal async Task HandleSerializeStartRequest(SerializeDataStartRequestParams serializeParams,
RequestContext<SerializeDataResult> requestContext)
{
try
{
Validate.IsNotNull(nameof(serializeParams), serializeParams);
Validate.IsNotNullOrWhitespaceString("FilePath", serializeParams.FilePath);
DataSerializer serializer = null;
bool hasSerializer = inProgressSerializations.TryGetValue(serializeParams.FilePath, out serializer);
if (hasSerializer)
{
throw new Exception(SR.SerializationServiceRequestInProgress(serializeParams.FilePath));
}
serializer = new DataSerializer(serializeParams);
if (!serializeParams.IsLastBatch)
{
inProgressSerializations.AddOrUpdate(serializer.FilePath, serializer, (key, old) => serializer);
}
Func<Task<SerializeDataResult>> writeData = () =>
{
return Task.Factory.StartNew(() =>
{
var result = serializer.ProcessRequest(serializeParams);
return result;
});
};
await HandleRequest(writeData, requestContext, "HandleSerializeStartRequest");
}
catch (Exception ex)
{
await requestContext.SendError(ex.Message);
}
}
/// <summary>
/// Process request to save a resultSet to a file in CSV format
/// </summary>
internal async Task HandleSerializeContinueRequest(SerializeDataContinueRequestParams serializeParams,
RequestContext<SerializeDataResult> requestContext)
{
try
{
Validate.IsNotNull(nameof(serializeParams), serializeParams);
Validate.IsNotNullOrWhitespaceString("FilePath", serializeParams.FilePath);
DataSerializer serializer = null;
bool hasSerializer = inProgressSerializations.TryGetValue(serializeParams.FilePath, out serializer);
if (!hasSerializer)
{
throw new Exception(SR.SerializationServiceRequestNotFound(serializeParams.FilePath));
}
Func<Task<SerializeDataResult>> writeData = () =>
{
return Task.Factory.StartNew(() =>
{
var result = serializer.ProcessRequest(serializeParams);
if (serializeParams.IsLastBatch)
{
// Cleanup the serializer
this.inProgressSerializations.TryRemove(serializer.FilePath, out serializer);
}
return result;
});
};
await HandleRequest(writeData, requestContext, "HandleSerializeContinueRequest");
}
catch (Exception ex)
{
await requestContext.SendError(ex.Message);
}
}
private async Task HandleRequest<T>(Func<Task<T>> handler, RequestContext<T> requestContext, string requestType)
{
Logger.Write(TraceEventType.Verbose, requestType);
try
{
T result = await handler();
await requestContext.SendResult(result);
}
catch (Exception ex)
{
await requestContext.SendError(ex.Message);
}
}
}
class DataSerializer
{
private IFileStreamWriter writer;
private SerializeDataStartRequestParams requestParams;
private IList<DbColumnWrapper> columns;
public string FilePath { get; private set; }
public DataSerializer(SerializeDataStartRequestParams requestParams)
{
this.requestParams = requestParams;
this.columns = this.MapColumns(requestParams.Columns);
this.FilePath = requestParams.FilePath;
}
private IList<DbColumnWrapper> MapColumns(ColumnInfo[] columns)
{
List<DbColumnWrapper> columnWrappers = new List<DbColumnWrapper>();
foreach (ColumnInfo column in columns)
{
DbColumnWrapper wrapper = new DbColumnWrapper(column);
columnWrappers.Add(wrapper);
}
return columnWrappers;
}
public SerializeDataResult ProcessRequest(ISerializationParams serializeParams)
{
SerializeDataResult result = new SerializeDataResult();
try
{
this.WriteData(serializeParams.Rows, serializeParams.IsLastBatch);
if (serializeParams.IsLastBatch)
{
this.CloseStreams();
}
result.Succeeded = true;
}
catch (Exception ex)
{
result.Messages = ex.Message;
result.Succeeded = false;
this.CloseStreams();
}
return result;
}
public void WriteData(DbCellValue[][] rows, bool isComplete)
{
this.EnsureWriterCreated();
foreach (var row in rows)
{
SetRawObjects(row);
writer.WriteRow(row, this.columns);
}
}
private void SetRawObjects(DbCellValue[] row)
{
for (int i = 0; i < row.Length; i++)
{
try
{
// Try to set as the "correct" type
var value = Convert.ChangeType(row[i].DisplayValue, columns[i].DataType);
row[i].RawObject = value;
}
catch (Exception)
{
row[i].RawObject = row[i].DisplayValue;
}
}
}
private void EnsureWriterCreated()
{
if (this.writer == null)
{
IFileStreamFactory factory;
switch (this.requestParams.SaveFormat.ToLowerInvariant())
{
case "json":
factory = new SaveAsJsonFileStreamFactory()
{
SaveRequestParams = CreateJsonRequestParams()
};
break;
case "csv":
factory = new SaveAsCsvFileStreamFactory()
{
SaveRequestParams = CreateCsvRequestParams()
};
break;
case "xml":
factory = new SaveAsXmlFileStreamFactory()
{
SaveRequestParams = CreateXmlRequestParams()
};
break;
case "excel":
factory = new SaveAsExcelFileStreamFactory()
{
SaveRequestParams = CreateExcelRequestParams()
};
break;
default:
throw new Exception(SR.SerializationServiceUnsupportedFormat(this.requestParams.SaveFormat));
}
this.writer = factory.GetWriter(requestParams.FilePath);
}
}
private void CloseStreams()
{
this.writer.Dispose();
}
private SaveResultsAsJsonRequestParams CreateJsonRequestParams()
{
return new SaveResultsAsJsonRequestParams
{
FilePath = this.requestParams.FilePath,
BatchIndex = 0,
ResultSetIndex = 0
};
}
private SaveResultsAsExcelRequestParams CreateExcelRequestParams()
{
return new SaveResultsAsExcelRequestParams
{
FilePath = this.requestParams.FilePath,
BatchIndex = 0,
ResultSetIndex = 0,
IncludeHeaders = this.requestParams.IncludeHeaders
};
}
private SaveResultsAsCsvRequestParams CreateCsvRequestParams()
{
return new SaveResultsAsCsvRequestParams
{
FilePath = this.requestParams.FilePath,
BatchIndex = 0,
ResultSetIndex = 0,
IncludeHeaders = this.requestParams.IncludeHeaders,
Delimiter = this.requestParams.Delimiter,
LineSeperator = this.requestParams.LineSeparator,
TextIdentifier = this.requestParams.TextIdentifier,
Encoding = this.requestParams.Encoding
};
}
private SaveResultsAsXmlRequestParams CreateXmlRequestParams()
{
return new SaveResultsAsXmlRequestParams
{
FilePath = this.requestParams.FilePath,
BatchIndex = 0,
ResultSetIndex = 0,
Formatted = this.requestParams.Formatted,
Encoding = this.requestParams.Encoding
};
}
}
}

View File

@@ -0,0 +1,351 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using System.Xml;
using Microsoft.SqlTools.Extensibility;
using Microsoft.SqlTools.Hosting.Protocol;
using Microsoft.SqlTools.ServiceLayer.QueryExecution;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.Test.Common;
using Microsoft.SqlTools.ServiceLayer.Test.Common.RequestContextMocking;
using Moq;
using Newtonsoft.Json;
using Xunit;
namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
{
public class SerializationServiceTests
{
private static readonly DbCellValue[][] DefaultData = new DbCellValue[3][] {
new DbCellValue[] {
new DbCellValue() { DisplayValue = "1", IsNull = false },
new DbCellValue() { DisplayValue = "Hello", IsNull = false },
new DbCellValue() { DisplayValue = "false", IsNull = false },
},
new DbCellValue[] {
new DbCellValue() { DisplayValue = "2", IsNull = false },
new DbCellValue() { DisplayValue = null, IsNull = true },
new DbCellValue() { DisplayValue = "true", IsNull = false },
},
new DbCellValue[] {
new DbCellValue() { DisplayValue = "3", IsNull = false },
new DbCellValue() { DisplayValue = "World", IsNull = false },
new DbCellValue() { DisplayValue = "True", IsNull = false },
}
};
private static readonly ColumnInfo[] DefaultColumns = {
new ColumnInfo("IntCol", "Int"),
new ColumnInfo("StringCol", "NVarChar"),
new ColumnInfo("BitCol", "Bit")
};
public SerializationServiceTests()
{
HostMock = new Mock<IProtocolEndpoint>();
ServiceProvider = ExtensionServiceProvider.CreateDefaultServiceProvider();
HostLoader.InitializeHostedServices(ServiceProvider, HostMock.Object);
SerializationService = ServiceProvider.GetService<SerializationService>();
}
protected ExtensionServiceProvider ServiceProvider { get; private set; }
protected Mock<IProtocolEndpoint> HostMock { get; private set; }
protected SerializationService SerializationService { get; private set; }
[Fact]
public async Task SaveResultsAsCsvNoHeaderSuccess()
{
await TestSaveAsCsvSuccess(false);
}
[Fact]
public async Task SaveResultsAsCsvWithHeaderSuccess()
{
await TestSaveAsCsvSuccess(true);
}
private async Task TestSaveAsCsvSuccess(bool includeHeaders)
{
await this.RunFileSaveTest(async (filePath) =>
{
// Given:
// ... A simple data set that requires 1 message
SerializeDataStartRequestParams saveParams = new SerializeDataStartRequestParams()
{
FilePath = filePath,
Columns = DefaultColumns,
Rows = DefaultData,
IsLastBatch = true,
SaveFormat = "csv",
IncludeHeaders = includeHeaders
};
// When: I attempt to save this to a file
var efv = new EventFlowValidator<SerializeDataResult>()
.AddStandardResultValidator()
.Complete();
await SerializationService.HandleSerializeStartRequest(saveParams, efv.Object);
// Then:
// ... There should not have been an error
efv.Validate();
// ... And the file should look as expected
VerifyContents.VerifyCsvMatchesData(saveParams.Rows, saveParams.Columns, saveParams.IncludeHeaders, saveParams.FilePath);
});
}
[Fact]
public async Task SaveResultsAsCsvNoHeaderMultiRequestSuccess()
{
await TestSaveAsCsvMultiRequestSuccess(false);
}
[Fact]
public async Task SaveResultsAsCsvWithHeaderMultiRequestSuccess()
{
await TestSaveAsCsvMultiRequestSuccess(true);
}
private async Task TestSaveAsCsvMultiRequestSuccess(bool includeHeaders)
{
Action<SerializeDataStartRequestParams> setParams = (serializeParams) => {
serializeParams.SaveFormat = "csv";
serializeParams.IncludeHeaders = includeHeaders;
};
Action<string> validation = (filePath) => {
VerifyContents.VerifyCsvMatchesData(DefaultData, DefaultColumns, includeHeaders, filePath);
};
await this.TestSerializeDataMultiRequestSuccess(setParams, validation);
}
[Fact]
private async Task SaveAsJsonMultiRequestSuccess()
{
Action<SerializeDataStartRequestParams> setParams = (serializeParams) => {
serializeParams.SaveFormat = "json";
};
Action<string> validation = (filePath) => {
VerifyContents.VerifyJsonMatchesData(DefaultData, DefaultColumns, filePath);
};
await this.TestSerializeDataMultiRequestSuccess(setParams, validation);
}
[Fact]
private async Task SaveAsXmlMultiRequestSuccess()
{
Action<SerializeDataStartRequestParams> setParams = (serializeParams) => {
serializeParams.SaveFormat = "xml";
};
Action<string> validation = (filePath) => {
VerifyContents.VerifyXmlMatchesData(DefaultData, DefaultColumns, filePath);
};
await this.TestSerializeDataMultiRequestSuccess(setParams, validation);
}
private async Task TestSerializeDataMultiRequestSuccess(Action<SerializeDataStartRequestParams> setStandardParams, Action<string> verify)
{
await this.RunFileSaveTest(async (filePath) =>
{
// Given:
// ... A simple data set that requires 3 messages
var startParams = new SerializeDataStartRequestParams()
{
FilePath = filePath,
Columns = DefaultColumns,
Rows = new DbCellValue[][] { DefaultData[0] },
IsLastBatch = false
};
setStandardParams(startParams);
// When I send all 3 messages
await SendAndVerifySerializeStartRequest(startParams);
var continueParams = new SerializeDataContinueRequestParams()
{
FilePath = filePath,
Rows = new DbCellValue[][] { DefaultData[1] },
IsLastBatch = false
};
await SendAndVerifySerializeContinueRequest(continueParams);
continueParams.Rows = new DbCellValue[][] { DefaultData[2] };
continueParams.IsLastBatch = true;
await SendAndVerifySerializeContinueRequest(continueParams);
// ... Then the file should look as expected
verify(filePath);
});
}
private async Task SendAndVerifySerializeStartRequest(SerializeDataStartRequestParams request1)
{
// When: I attempt to save this to a file
var efv = new EventFlowValidator<SerializeDataResult>()
.AddStandardResultValidator()
.Complete();
await SerializationService.HandleSerializeStartRequest(request1, efv.Object);
// Then:
// ... There should not have been an error
efv.Validate();
}
private async Task SendAndVerifySerializeContinueRequest(SerializeDataContinueRequestParams request1)
{
// When: I attempt to save this to a file
var efv = new EventFlowValidator<SerializeDataResult>()
.AddStandardResultValidator()
.Complete();
await SerializationService.HandleSerializeContinueRequest(request1, efv.Object);
// Then:
// ... There should not have been an error
efv.Validate();
}
private Task RunFileSaveTest(Func<string, Task> doSave)
{
using (SelfCleaningTempFile tempFile = new SelfCleaningTempFile())
{
return doSave(tempFile.FilePath);
}
}
}
public static class SerializeDataEventFlowValidatorExtensions
{
public static EventFlowValidator<SerializeDataResult> AddStandardResultValidator(
this EventFlowValidator<SerializeDataResult> efv)
{
return efv.AddResultValidation(r =>
{
Assert.NotNull(r);
Assert.Null(r.Messages);
Assert.True(r.Succeeded);
});
}
}
public static class VerifyContents
{
public static void VerifyCsvMatchesData(DbCellValue[][] data, ColumnInfo[] columns, bool includeHeaders, string filePath)
{
Assert.True(File.Exists(filePath), "Expected file to have been written");
string[] lines = File.ReadAllLines(filePath);
int expectedLength = includeHeaders ? data.Length + 1 : data.Length;
Assert.Equal(expectedLength, lines.Length);
int lineIndex = 0;
if (includeHeaders)
{
AssertLineEquals(lines[lineIndex], columns.Select((c) => c.Name).ToArray());
lineIndex++;
}
for (int dataIndex =0; dataIndex < data.Length && lineIndex < lines.Length; dataIndex++, lineIndex++)
{
AssertLineEquals(lines[lineIndex], data[dataIndex].Select((d) => GetCsvPrintValue(d)).ToArray());
}
}
private static string GetCsvPrintValue(DbCellValue d)
{
return d.IsNull ? "NULL" : d.DisplayValue;
}
private static void AssertLineEquals(string line, string[] expected)
{
var actual = line.Split(',');
Assert.True(actual.Length == expected.Length, string.Format("Line '{0}' does not match values {1}", line, string.Join(",", expected)));
for (int i = 0; i < actual.Length; i++)
{
Assert.True(expected[i] == actual[i], string.Format("Line '{0}' does not match values '{1}' as '{2}' does not equal '{3}'", line, string.Join(",", expected), expected[i], actual[i]));
}
}
public static void VerifyJsonMatchesData(DbCellValue[][] data, ColumnInfo[] columns, string filePath)
{
// ... Upon deserialization to an array of dictionaries
Assert.True(File.Exists(filePath), "Expected file to have been written");
string output = File.ReadAllText(filePath);
Dictionary<string, object>[] outputObject =
JsonConvert.DeserializeObject<Dictionary<string, object>[]>(output);
// ... There should be 2 items in the array,
// ... The item should have three fields, and three values, assigned appropriately
// ... The deserialized values should match the display value
Assert.Equal(data.Length, outputObject.Length);
for (int rowIndex = 0; rowIndex < outputObject.Length; rowIndex++)
{
Dictionary<string,object> item = outputObject[rowIndex];
Assert.Equal(columns.Length, item.Count);
for (int columnIndex = 0; columnIndex < columns.Length; columnIndex++)
{
var key = columns[columnIndex].Name;
Assert.True(item.ContainsKey(key));
DbCellValue value = data[rowIndex][columnIndex];
object expectedValue = GetJsonExpectedValue(value, columns[columnIndex]);
Assert.Equal(expectedValue, item[key]);
}
}
}
private static object GetJsonExpectedValue(DbCellValue value, ColumnInfo column)
{
if (value.IsNull)
{
return null;
}
else if (column.DataTypeName == "Int")
{
return Int64.Parse(value.DisplayValue.ToLower());
}
else if (column.DataTypeName == "Bit")
{
return Boolean.Parse(value.DisplayValue.ToLower());
}
return value.DisplayValue;
}
public static void VerifyXmlMatchesData(DbCellValue[][] data, ColumnInfo[] columns, string filePath)
{
// ... Upon deserialization to an array of dictionaries
Assert.True(File.Exists(filePath), "Expected file to have been written");
string output = File.ReadAllText(filePath);
XmlDocument xmlDoc = new XmlDocument();
xmlDoc.LoadXml(output);
// ... There should be 2 items in the array,
// ... The item should have three fields, and three values, assigned appropriately
// ... The deserialized values should match the display value
string xpath = "data/row";
var rows = xmlDoc.SelectNodes(xpath);
Assert.Equal(data.Length, rows.Count);
for (int rowIndex = 0; rowIndex < rows.Count; rowIndex++)
{
var rowValue = rows.Item(rowIndex);
var xmlCols = rowValue.ChildNodes.Cast<XmlNode>().ToArray();
Assert.Equal(columns.Length, xmlCols.Length);
for (int columnIndex = 0; columnIndex < columns.Length; columnIndex++)
{
var columnName = columns[columnIndex].Name;
var xmlColumn = xmlCols.FirstOrDefault(x => x.Name == columnName);
Assert.NotNull(xmlColumn);
DbCellValue value = data[rowIndex][columnIndex];
object expectedValue = GetXmlExpectedValue(value);
Assert.Equal(expectedValue, xmlColumn.InnerText);
}
}
}
private static string GetXmlExpectedValue(DbCellValue d)
{
return d.IsNull ? "" : d.DisplayValue;
}
}
}