Export headers in an empty result set (#1434)

* Minimal changes to make headers appear on empty result sets

* Columns for everyone!

* Updating tests - some don't pass yet

* Adding some more tests to verify the changes for column/row selection

* null default columns

* Updates to comments as per PR comments
This commit is contained in:
Benjamin Russell
2022-03-31 11:10:32 -05:00
committed by GitHub
parent 5d805bd678
commit 2ace786d95
24 changed files with 814 additions and 499 deletions

View File

@@ -3,6 +3,9 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System.Collections.Generic;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
{
/// <summary>
@@ -14,7 +17,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
IFileStreamReader GetReader(string fileName);
IFileStreamWriter GetWriter(string fileName);
IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns = null);
void DisposeFile(string fileName);

View File

@@ -16,7 +16,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
public interface IFileStreamWriter : IDisposable
{
int WriteRow(StorageDataReader dataReader);
void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns);
void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns);
void Seek(long offset);
void FlushBuffer();
}

View File

@@ -4,6 +4,7 @@
//
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
@@ -48,17 +49,28 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <returns>Stream reader</returns>
public IFileStreamReader GetReader(string fileName)
{
return new ServiceBufferFileStreamReader(new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamReader(
new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>
/// Returns a new CSV writer for writing results to a CSV file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">Path to the CSV output file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
/// <returns>Stream writer</returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new SaveAsCsvFileStreamWriter(new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite), SaveRequestParams);
return new SaveAsCsvFileStreamWriter(
new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite),
SaveRequestParams,
columns
);
}
/// <summary>

View File

@@ -20,8 +20,11 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
#region Member Variables
private readonly SaveResultsAsCsvRequestParams saveParams;
private bool headerWritten;
private readonly char delimiter;
private readonly Encoding encoding;
private readonly string lineSeparator;
private readonly char textIdentifier;
private readonly string textIdentifierString;
#endregion
@@ -31,56 +34,39 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="stream">FileStream to access the CSV file output</param>
/// <param name="requestParams">CSV save as request parameters</param>
public SaveAsCsvFileStreamWriter(Stream stream, SaveResultsAsCsvRequestParams requestParams)
: base(stream, requestParams)
{
saveParams = requestParams;
}
/// <summary>
/// Writes a row of data as a CSV row. If this is the first row and the user has requested
/// it, the headers for the column will be emitted as well.
/// </summary>
/// <param name="row">The data of the row to output to the file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public override void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
public SaveAsCsvFileStreamWriter(Stream stream, SaveResultsAsCsvRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
: base(stream, requestParams, columns)
{
char delimiter = ',';
if(!string.IsNullOrEmpty(saveParams.Delimiter))
// Parse the config
delimiter = ',';
if (!string.IsNullOrEmpty(requestParams.Delimiter))
{
// first char in string
delimiter = saveParams.Delimiter[0];
delimiter = requestParams.Delimiter[0];
}
string lineSeperator = Environment.NewLine;
if(!string.IsNullOrEmpty(saveParams.LineSeperator))
lineSeparator = Environment.NewLine;
if (!string.IsNullOrEmpty(requestParams.LineSeperator))
{
lineSeperator = saveParams.LineSeperator;
lineSeparator = requestParams.LineSeperator;
}
char textIdentifier = '"';
if(!string.IsNullOrEmpty(saveParams.TextIdentifier))
textIdentifier = '"';
if (!string.IsNullOrEmpty(requestParams.TextIdentifier))
{
// first char in string
textIdentifier = saveParams.TextIdentifier[0];
textIdentifier = requestParams.TextIdentifier[0];
}
textIdentifierString = textIdentifier.ToString();
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
int codepage;
Encoding encoding;
try
{
if(int.TryParse(saveParams.Encoding, out codepage))
{
encoding = Encoding.GetEncoding(codepage);
}
else
{
encoding = Encoding.GetEncoding(saveParams.Encoding);
}
encoding = int.TryParse(requestParams.Encoding, out int codePage)
? Encoding.GetEncoding(codePage)
: Encoding.GetEncoding(requestParams.Encoding);
}
catch
{
@@ -88,30 +74,38 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
encoding = Encoding.GetEncoding("utf-8");
}
// Write out the header if we haven't already and the user chose to have it
if (saveParams.IncludeHeaders && !headerWritten)
// Output the header if the user requested it
if (requestParams.IncludeHeaders)
{
// Build the string
var selectedColumns = columns.Skip(ColumnStartIndex ?? 0).Take(ColumnCount ?? columns.Count)
.Select(c => EncodeCsvField(c.ColumnName, delimiter, textIdentifier) ?? string.Empty);
var selectedColumns = columns.Skip(ColumnStartIndex)
.Take(ColumnCount)
.Select(c => EncodeCsvField(c.ColumnName) ?? string.Empty);
string headerLine = string.Join(delimiter, selectedColumns);
// Encode it and write it out
byte[] headerBytes = encoding.GetBytes(headerLine + lineSeperator);
byte[] headerBytes = encoding.GetBytes(headerLine + lineSeparator);
FileStream.Write(headerBytes, 0, headerBytes.Length);
headerWritten = true;
}
}
/// <summary>
/// Writes a row of data as a CSV row. If this is the first row and the user has requested
/// it, the headers for the column will be emitted as well.
/// </summary>
/// <param name="row">The data of the row to output to the file</param>
/// <param name="columns">The columns for the row to output</param>
public override void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
// Build the string for the row
var selectedCells = row.Skip(ColumnStartIndex ?? 0)
.Take(ColumnCount ?? columns.Count)
.Select(c => EncodeCsvField(c.DisplayValue, delimiter, textIdentifier));
var selectedCells = row.Skip(ColumnStartIndex)
.Take(ColumnCount)
.Select(c => EncodeCsvField(c.DisplayValue));
string rowLine = string.Join(delimiter, selectedCells);
// Encode it and write it out
byte[] rowBytes = encoding.GetBytes(rowLine + lineSeperator);
byte[] rowBytes = encoding.GetBytes(rowLine + lineSeparator);
FileStream.Write(rowBytes, 0, rowBytes.Length);
}
@@ -124,7 +118,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <list type="bullet">
/// <item><description>The field begins or ends with a space</description></item>
/// <item><description>The field begins or ends with a tab</description></item>
/// <item><description>The field contains the ListSeparator string</description></item>
/// <item><description>The field contains the delimiter string</description></item>
/// <item><description>The field contains the '\n' character</description></item>
/// <item><description>The field contains the '\r' character</description></item>
/// <item><description>The field contains the '"' character</description></item>
@@ -132,27 +126,24 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="field">The field to encode</param>
/// <returns>The CSV encoded version of the original field</returns>
internal static string EncodeCsvField(string field, char delimiter, char textIdentifier)
internal string EncodeCsvField(string field)
{
string strTextIdentifier = textIdentifier.ToString();
// Special case for nulls
if (field == null)
{
return "NULL";
}
// Replace all quotes in the original field with double quotes
string ret = field.Replace(textIdentifierString, textIdentifierString + textIdentifierString);
// Whether this field has special characters which require it to be embedded in quotes
bool embedInQuotes = field.IndexOfAny(new[] { delimiter, '\r', '\n', textIdentifier }) >= 0 // Contains special characters
|| field.StartsWith(" ") || field.EndsWith(" ") // Start/Ends with space
|| field.StartsWith("\t") || field.EndsWith("\t"); // Starts/Ends with tab
//Replace all quotes in the original field with double quotes
string ret = field.Replace(strTextIdentifier, strTextIdentifier + strTextIdentifier);
if (embedInQuotes)
{
ret = strTextIdentifier + $"{ret}" + strTextIdentifier;
ret = $"{textIdentifier}{ret}{textIdentifier}";
}
return ret;

View File

@@ -4,6 +4,7 @@
//
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
@@ -48,17 +49,28 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <returns>Stream reader</returns>
public IFileStreamReader GetReader(string fileName)
{
return new ServiceBufferFileStreamReader(new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamReader(
new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>
/// Returns a new Excel writer for writing results to a Excel file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">Path to the Excel output file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
/// <returns>Stream writer</returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new SaveAsExcelFileStreamWriter(new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite), SaveRequestParams);
return new SaveAsExcelFileStreamWriter(
new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite),
SaveRequestParams,
columns
);
}
/// <summary>

View File

@@ -30,8 +30,12 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="stream">FileStream to access the Excel file output</param>
/// <param name="requestParams">Excel save as request parameters</param>
public SaveAsExcelFileStreamWriter(Stream stream, SaveResultsAsExcelRequestParams requestParams)
: base(stream, requestParams)
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public SaveAsExcelFileStreamWriter(Stream stream, SaveResultsAsExcelRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
: base(stream, requestParams, columns)
{
saveParams = requestParams;
helper = new SaveAsExcelFileStreamWriterHelper(stream);
@@ -47,16 +51,13 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public override void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
public override void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
int columnStart = ColumnStartIndex ?? 0;
int columnEnd = (ColumnEndIndex != null) ? ColumnEndIndex.Value + 1 : columns.Count;
// Write out the header if we haven't already and the user chose to have it
if (saveParams.IncludeHeaders && !headerWritten)
{
sheet.AddRow();
for (int i = columnStart; i < columnEnd; i++)
for (int i = ColumnStartIndex; i <= ColumnEndIndex; i++)
{
sheet.AddCell(columns[i].ColumnName);
}
@@ -64,7 +65,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
}
sheet.AddRow();
for (int i = columnStart; i < columnEnd; i++)
for (int i = ColumnStartIndex; i <= ColumnEndIndex; i++)
{
sheet.AddCell(row[i]);
}

View File

@@ -4,6 +4,7 @@
//
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
@@ -52,10 +53,18 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// Returns a new JSON writer for writing results to a JSON file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">Path to the JSON output file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
/// <returns>Stream writer</returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new SaveAsJsonFileStreamWriter(new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite), SaveRequestParams);
return new SaveAsJsonFileStreamWriter(
new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite),
SaveRequestParams,
columns
);
}
/// <summary>

View File

@@ -33,8 +33,12 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="stream">FileStream to access the JSON file output</param>
/// <param name="requestParams">JSON save as request parameters</param>
public SaveAsJsonFileStreamWriter(Stream stream, SaveResultsRequestParams requestParams)
: base(stream, requestParams)
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public SaveAsJsonFileStreamWriter(Stream stream, SaveResultsRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
: base(stream, requestParams, columns)
{
// Setup the internal state
streamWriter = new StreamWriter(stream);
@@ -53,15 +57,13 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public override void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
public override void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
// Write the header for the object
jsonWriter.WriteStartObject();
// Write the items out as properties
int columnStart = ColumnStartIndex ?? 0;
int columnEnd = (ColumnEndIndex != null) ? ColumnEndIndex.Value + 1 : columns.Count;
for (int i = columnStart; i < columnEnd; i++)
for (int i = ColumnStartIndex; i <= ColumnEndIndex; i++)
{
jsonWriter.WritePropertyName(columns[i].ColumnName);
if (row[i].RawObject == null)

View File

@@ -7,6 +7,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.Utility;
namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
{
@@ -21,18 +22,31 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="stream">The stream that will be written to</param>
/// <param name="requestParams">The SaveAs request parameters</param>
protected SaveAsStreamWriter(Stream stream, SaveResultsRequestParams requestParams)
/// <param name="columns">
/// The entire list of columns for the result set. Used to determine which columns to
/// output.
/// </param>
protected SaveAsStreamWriter(Stream stream, SaveResultsRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
{
Validate.IsNotNull(nameof(stream), stream);
Validate.IsNotNull(nameof(columns), columns);
FileStream = stream;
var saveParams = requestParams;
if (requestParams.IsSaveSelection)
{
// ReSharper disable PossibleInvalidOperationException IsSaveSelection verifies these values exist
ColumnStartIndex = saveParams.ColumnStartIndex.Value;
ColumnEndIndex = saveParams.ColumnEndIndex.Value;
ColumnCount = saveParams.ColumnEndIndex.Value - saveParams.ColumnStartIndex.Value + 1;
ColumnStartIndex = requestParams.ColumnStartIndex.Value;
ColumnEndIndex = requestParams.ColumnEndIndex.Value;
// ReSharper restore PossibleInvalidOperationException
}
else
{
// Save request was for the entire result set, use default start/end
ColumnStartIndex = 0;
ColumnEndIndex = columns.Count - 1;
}
ColumnCount = ColumnEndIndex - ColumnStartIndex + 1;
}
#region Properties
@@ -40,22 +54,22 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <summary>
/// Index of the first column to write to the output file
/// </summary>
protected int? ColumnStartIndex { get; private set; }
protected int ColumnStartIndex { get; }
/// <summary>
/// Number of columns to write to the output file
/// </summary>
protected int? ColumnCount { get; private set; }
protected int ColumnCount { get; }
/// <summary>
/// Index of the last column to write to the output file
/// Index of the last column to write to the output file (inclusive).
/// </summary>
protected int? ColumnEndIndex { get; private set; }
protected int ColumnEndIndex { get; }
/// <summary>
/// The file stream to use to write the output file
/// </summary>
protected Stream FileStream { get; private set; }
protected Stream FileStream { get; }
#endregion
@@ -73,7 +87,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="row">The row of data to output</param>
/// <param name="columns">The list of columns to output</param>
public abstract void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns);
public abstract void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns);
/// <summary>
/// Not implemented, do not use.

View File

@@ -4,6 +4,7 @@
//
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
@@ -45,17 +46,28 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <returns>Stream reader</returns>
public IFileStreamReader GetReader(string fileName)
{
return new ServiceBufferFileStreamReader(new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamReader(
new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>
/// Returns a new XML writer for writing results to a XML file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">Path to the XML output file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
/// <returns>Stream writer</returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new SaveAsXmlFileStreamWriter(new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite), SaveRequestParams);
return new SaveAsXmlFileStreamWriter(
new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite),
SaveRequestParams,
columns
);
}
/// <summary>

View File

@@ -39,8 +39,12 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="stream">FileStream to access the JSON file output</param>
/// <param name="requestParams">XML save as request parameters</param>
public SaveAsXmlFileStreamWriter(Stream stream, SaveResultsAsXmlRequestParams requestParams)
: base(stream, requestParams)
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public SaveAsXmlFileStreamWriter(Stream stream, SaveResultsAsXmlRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
: base(stream, requestParams, columns)
{
// Setup the internal state
var encoding = GetEncoding(requestParams);
@@ -60,15 +64,13 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public override void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
public override void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
// Write the header for the object
xmlTextWriter.WriteStartElement(ItemElementTag);
// Write the items out as properties
int columnStart = ColumnStartIndex ?? 0;
int columnEnd = ColumnEndIndex + 1 ?? columns.Count;
for (int i = columnStart; i < columnEnd; i++)
for (int i = ColumnStartIndex; i <= ColumnEndIndex; i++)
{
// Write the column name as item tag
xmlTextWriter.WriteStartElement(columns[i].ColumnName);

View File

@@ -3,7 +3,9 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
using Microsoft.SqlTools.ServiceLayer.Utility;
@@ -40,7 +42,10 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <returns>A <see cref="ServiceBufferFileStreamReader"/></returns>
public IFileStreamReader GetReader(string fileName)
{
return new ServiceBufferFileStreamReader(new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamReader(
new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>
@@ -48,10 +53,17 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// SSMS formatted buffer file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">The file to write values to</param>
/// <param name="columns">
/// Ignored in order to fulfil the <see cref="IFileStreamFactory"/> contract.
/// @TODO: Refactor this out so that save-as writers do not use the same contract as service buffer writers.
/// </param>
/// <returns>A <see cref="ServiceBufferFileStreamWriter"/></returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new ServiceBufferFileStreamWriter(new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamWriter(
new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>

View File

@@ -209,7 +209,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
}
[Obsolete]
public void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
public void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
throw new InvalidOperationException("This type of writer is meant to write values from a DbDataReader only.");
}

View File

@@ -371,7 +371,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
// Open a writer for the file
//
var fileWriter = fileStreamFactory.GetWriter(outputFileName);
var fileWriter = fileStreamFactory.GetWriter(outputFileName, null);
using (fileWriter)
{
// If we can initialize the columns using the column schema, use that
@@ -528,7 +528,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
}
using (var fileReader = fileFactory.GetReader(outputFileName))
using (var fileWriter = fileFactory.GetWriter(saveParams.FilePath))
using (var fileWriter = fileFactory.GetWriter(saveParams.FilePath, Columns))
{
// Iterate over the rows that are in the selected row set
for (long i = rowStartIndex; i < rowEndIndex; ++i)
@@ -557,7 +557,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
{
if (failureHandler != null)
{
await failureHandler(saveParams, t.Exception.Message);
await failureHandler(saveParams, t.Exception?.Message);
}
});

View File

@@ -153,7 +153,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
{
private IFileStreamWriter writer;
private SerializeDataStartRequestParams requestParams;
private IList<DbColumnWrapper> columns;
private IReadOnlyList<DbColumnWrapper> columns;
public string FilePath { get; private set; }
@@ -164,7 +164,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
this.FilePath = requestParams.FilePath;
}
private IList<DbColumnWrapper> MapColumns(ColumnInfo[] columns)
private IReadOnlyList<DbColumnWrapper> MapColumns(ColumnInfo[] columns)
{
List<DbColumnWrapper> columnWrappers = new List<DbColumnWrapper>();
foreach (ColumnInfo column in columns)
@@ -258,7 +258,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
default:
throw new Exception(SR.SerializationServiceUnsupportedFormat(this.requestParams.SaveFormat));
}
this.writer = factory.GetWriter(requestParams.FilePath);
this.writer = factory.GetWriter(requestParams.FilePath, columns);
}
}
public void CloseStreams()

View File

@@ -5,7 +5,9 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
using Moq;
@@ -32,8 +34,8 @@ namespace Microsoft.SqlTools.ServiceLayer.Test.Common
});
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamWriter(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, _) => new ServiceBufferFileStreamWriter(new MemoryStream(storage[output]), new QueryExecutionSettings()));
return mock.Object;
}

View File

@@ -321,7 +321,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
Mock<IEditMetadataFactory> emf = new Mock<IEditMetadataFactory>();
EditSession s = new EditSession(emf.Object);
Assert.That(() => s.Initialize(initParams, c, qr, sh, fh), Throws.InstanceOf<ArgumentException>(), "I initialize it with a missing parameter. It should throw an exception");
Assert.Catch<ArgumentException>(() => s.Initialize(initParams, c, qr, sh, fh), "I initialize it with a missing parameter. It should throw an exception");
}
public static IEnumerable<object[]> InitializeNullParamsData

View File

@@ -4,8 +4,8 @@
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
@@ -18,144 +18,247 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
public class SaveAsCsvFileStreamWriterTests
{
[Test]
public void EncodeCsvFieldShouldWrap(
[Values("Something\rElse",
"Something\nElse",
"Something\"Else",
"Something,Else",
"\tSomething",
"Something\t",
" Something",
"Something ",
" \t\r\n\",\r\n\"\r ")] string field)
public void Constructor_NullStream()
{
// If: I CSV encode a field that has forbidden characters in it
string output = SaveAsCsvFileStreamWriter.EncodeCsvField(field, ',', '\"');
// Act
TestDelegate action = () => _ = new SaveAsCsvFileStreamWriter(
null,
new SaveResultsAsCsvRequestParams(),
Array.Empty<DbColumnWrapper>()
);
// Then: It should wrap it in quotes
Assert.True(Regex.IsMatch(output, "^\".*")
&& Regex.IsMatch(output, ".*\"$"));
// Assert
Assert.Throws<ArgumentNullException>(action);
}
[Test]
public void EncodeCsvFieldShouldNotWrap(
[Values(
"Something",
"Something valid.",
"Something\tvalid"
)] string field)
public void Constructor_NullColumns()
{
// Act
TestDelegate action = () => _ = new SaveAsCsvFileStreamWriter(
Stream.Null,
new SaveResultsAsCsvRequestParams(),
null
);
// Assert
Assert.Throws<ArgumentNullException>(action);
}
[Test]
public void Constructor_WithoutSelectionWithHeader_WritesHeaderWithAllColumns()
{
// Setup:
// ... Create a request params that has no selection made, headers should be printed
// ... Create a set of columns
// --- Create a memory location to store the output
var requestParams = new SaveResultsAsCsvRequestParams { IncludeHeaders = true };
var (columns, _) = GetTestValues(2);
using var outputStream = new MemoryStream();
byte[] output = new byte[8192];
// If: I construct a CSV file writer
using var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns);
writer.Dispose();
// Then:
// ... It should have written a line
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(1, lines.Length);
// ... It should have written a header line with two comma separated names
string[] headerValues = lines[0].Split(",");
Assert.AreEqual(2, headerValues.Length);
for (int i = 0; i < columns.Length; i++)
{
Assert.AreEqual(columns[i].ColumnName, headerValues[i]);
}
}
[Test]
public void Constructor_WithSelectionWithHeader_WritesHeaderWithSelectedColumns()
{
// Setup:
// ... Create a request params that has no selection made, headers should be printed
// ... Create a set of columns
// --- Create a memory location to store the output
var requestParams = new SaveResultsAsCsvRequestParams
{
IncludeHeaders = true,
ColumnStartIndex = 1,
ColumnEndIndex = 2,
RowStartIndex = 0, // Including b/c it is required to be a "save selection"
RowEndIndex = 10
};
var (columns, _) = GetTestValues(4);
using var outputStream = new MemoryStream();
byte[] output = new byte[8192];
// If: I construct a CSV file writer
using var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns);
writer.Dispose();
// Then:
// ... It should have written a line
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(1, lines.Length);
// ... It should have written a header line with two comma separated names
string[] headerValues = lines[0].Split(",");
Assert.AreEqual(2, headerValues.Length);
for (int i = 0; i < 2; i++)
{
Assert.AreEqual(columns[i + 1].ColumnName, headerValues[i]);
}
}
[Test]
public void Constructor_WithoutSelectionWithoutHeader_DoesNotWriteHeader()
{
// Setup:
// ... Create a request params that has no selection made, headers should not be printed
// ... Create a set of columns
// --- Create a memory location to store the output
var requestParams = new SaveResultsAsCsvRequestParams { IncludeHeaders = false };
var (columns, _) = GetTestValues(2);
byte[] output = new byte[8192];
// If: I construct a CSV file writer
using var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns);
writer.Dispose();
// Then:
// ... It not have written anything
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.IsEmpty(lines);
}
[TestCase("Something\rElse")] // Contains carriage return
[TestCase("Something\nElse")] // Contains line feed
[TestCase("Something\"Else")] // Contains default text identifier
[TestCase("Something,Else")] // Contains field separator
public void EncodeCsvField_ContainsDefaultControlCharacters_ShouldBeWrapped(string field)
{
// Setup: Create CsvFileStreamWriter using default control characters
using var writer = GetWriterForEncodingTests(null, null, null);
// If: I CSV encode a field that has forbidden characters in it
string output = writer.EncodeCsvField(field);
// Then: It should wrap it in quotes
Assert.True(Regex.IsMatch(output, "^\".*\"$", RegexOptions.Singleline));
}
[TestCase("Something\rElse")] // Contains carriage return [TODO: Don't support this]
[TestCase("Something\nElse")] // Contains line feed [TODO: Don't support this]
[TestCase("Something[Else")] // Contains default text identifier
[TestCase("Something$Else")] // Contains field separator
//[TestCase("Something||Else")] // Contains line break [TODO: Support this]
public void EncodeCsvField_ContainsNonDefaultControlCharacters_ShouldBeWrapped(string field)
{
// Setup: Create CsvFileStreamWriter using non-default control characters
var writer = GetWriterForEncodingTests("$foo", "[bar", "||");
// If: I CSV encode a field that has forbidden characters in it
string output = writer.EncodeCsvField(field);
// Then: It should wrap it in quotes
Assert.True(Regex.IsMatch(output, @"^\[.*\[$", RegexOptions.Singleline));
}
[TestCase("\tSomething")] // Starts with tab
[TestCase("Something\t")] // Ends with tab
[TestCase("\rSomething")] // Starts with carriage return
[TestCase("Something\r")] // Ends with carriage return
[TestCase("\nSomething")] // Starts with line feed
[TestCase("Something\n")] // Ends with line feed
[TestCase(" Something")] // Starts with space
[TestCase("Something ")] // Ends with space
[TestCase(" Something ")] // Starts and ends with space
public void EncodeCsvField_WhitespaceAtFrontOrBack_ShouldBeWrapped(string field)
{
// Setup: Create CsvFileStreamWriter that specifies the text identifier and field separator
var writer = GetWriterForEncodingTests(null, null, null);
// If: I CSV encode a field that has forbidden characters in it
string output = writer.EncodeCsvField(field);
// Then: It should wrap it in quotes
Assert.True(Regex.IsMatch(output, "^\".*\"$", RegexOptions.Singleline));
}
[TestCase("Something")]
[TestCase("Something valid.")]
[TestCase("Something\tvalid")]
public void EncodeCsvField_ShouldNotWrap(string field)
{
// Setup: Create CsvFileStreamWriter that specifies the text identifier and field separator
var writer = GetWriterForEncodingTests(null, null, null);
// If: I CSV encode a field that does not have forbidden characters in it
string output = SaveAsCsvFileStreamWriter.EncodeCsvField(field, ',', '\"');
string output = writer.EncodeCsvField(field);
// Then: It should not wrap it in quotes
Assert.False(Regex.IsMatch(output, "^\".*\"$"));
}
[Test]
public void EncodeCsvFieldReplace()
[TestCase(null, "Some\"thing", "\"Some\"\"thing\"")] // Default identifier
[TestCase("|$", "Some|thing", "|Some||thing|")] // Custom identifier
public void EncodeCsvField_ContainsTextIdentifier_DoublesIdentifierAndWraps(
string configuredIdentifier,
string input,
string expectedOutput)
{
// Setup: Create CsvFileStreamWriter that specifies the text identifier and field separator
var writer = GetWriterForEncodingTests(null, configuredIdentifier, null);
// If: I CSV encode a field that has a double quote in it,
string output = SaveAsCsvFileStreamWriter.EncodeCsvField("Some\"thing", ',', '\"');
string output = writer.EncodeCsvField(input);
// Then: It should be replaced with double double quotes
Assert.AreEqual("\"Some\"\"thing\"", output);
Assert.AreEqual(expectedOutput, output);
}
[Test]
public void EncodeCsvFieldNull()
public void EncodeCsvField_Null()
{
// Setup: Create CsvFileStreamWriter
var writer = GetWriterForEncodingTests(null, null, null);
// If: I CSV encode a null
string output = SaveAsCsvFileStreamWriter.EncodeCsvField(null, ',', '\"');
string output = writer.EncodeCsvField(null);
// Then: there should be a string version of null returned
Assert.AreEqual("NULL", output);
}
[Test]
public void WriteRowWithoutColumnSelectionOrHeader()
public void WriteRow_WithoutColumnSelection()
{
// Setup:
// ... Create a request params that has no selection made
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams();
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
var (columns, data) = GetTestValues(2);
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
using (var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns))
{
writer.WriteRow(data, columns);
}
// Then: It should write one line with 2 items, comma delimited
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
string[] lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(1, lines.Length);
string[] values = lines[0].Split(',');
Assert.AreEqual(2, values.Length);
}
[Test]
public void WriteRowWithHeader()
{
// Setup:
// ... Create a request params that has no selection made, headers should be printed
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams
{
IncludeHeaders = true
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have written two lines
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
string[] lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
// ... It should have written a header line with two, comma separated names
string[] headerValues = lines[0].Split(',');
Assert.AreEqual(2, headerValues.Length);
for (int i = 0; i < columns.Count; i++)
{
Assert.AreEqual(columns[i].ColumnName, headerValues[i]);
}
// Note: No need to check values, it is done as part of the previous test
}
[Test]
public void WriteRowWithColumnSelection()
public void WriteRow_WithColumnSelection()
{
// Setup:
// ... Create a request params that selects n-1 columns from the front and back
@@ -166,48 +269,25 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
ColumnStartIndex = 1,
ColumnEndIndex = 2,
RowStartIndex = 0, // Including b/c it is required to be a "save selection"
RowEndIndex = 10,
IncludeHeaders = true // Including headers to test both column selection logic
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" },
new DbCellValue { DisplayValue = "item3" },
new DbCellValue { DisplayValue = "item4" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2")),
new DbColumnWrapper(new TestDbColumn("column3")),
new DbColumnWrapper(new TestDbColumn("column4"))
RowEndIndex = 10
};
var (columns, data) = GetTestValues(4);
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have written two lines
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
string[] lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
// ... It should have written one line
var lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(1, lines.Length);
// ... It should have written a header line with two, comma separated names
string[] headerValues = lines[0].Split(',');
Assert.AreEqual(2, headerValues.Length);
for (int i = 1; i <= 2; i++)
{
Assert.AreEqual(columns[i].ColumnName, headerValues[i - 1]);
}
// ... The second line should have two, comma separated values
string[] dataValues = lines[1].Split(',');
// ... The line should have two, comma separated values
string[] dataValues = lines[0].Split(',');
Assert.AreEqual(2, dataValues.Length);
for (int i = 1; i <= 2; i++)
{
@@ -216,7 +296,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
}
[Test]
public void WriteRowWithCustomDelimiters()
public void WriteRow_CustomDelimiter()
{
// Setup:
// ... Create a request params that has custom delimiter say pipe("|") then this delimiter should be used
@@ -227,35 +307,24 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
Delimiter = "|",
IncludeHeaders = true
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
var (columns, data) = GetTestValues(2);
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
using (var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns))
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have written two lines
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
string[] lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(2, lines.Length);
// ... It should have written a header line with two, pipe("|") separated names
string[] headerValues = lines[0].Split('|');
Assert.AreEqual(2, headerValues.Length);
for (int i = 0; i < columns.Count; i++)
for (int i = 0; i < columns.Length; i++)
{
Assert.AreEqual(columns[i].ColumnName, headerValues[i]);
}
@@ -264,144 +333,49 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
}
[Test]
public void WriteRowsWithCustomLineSeperator()
public void WriteRow_CustomLineSeparator()
{
// Setup:
// ... Create a request params that has custom line seperator then this seperator should be used
// ... Create a request params that has custom line separator
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams
{
LineSeperator = "$$",
IncludeHeaders = true
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
byte[] output;
string outputString;
string[] lines;
SaveAsCsvFileStreamWriter writer;
// If: I set default seperator and write a row
requestParams.LineSeperator = null;
output = new byte[8192];
writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have splitten the lines by system's default line seperator
outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
// If: I set \n (line feed) as seperator and write a row
requestParams.LineSeperator = "\n";
output = new byte[8192];
writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have splitten the lines by \n
outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
lines = outputString.Split(new[] { '\n' }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
// If: I set \r\n (carriage return + line feed) as seperator and write a row
requestParams.LineSeperator = "\r\n";
output = new byte[8192];
writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have splitten the lines by \r\n
outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
lines = outputString.Split(new[] { "\r\n" }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
}
[Test]
public void WriteRowWithCustomTextIdentifier()
{
// Setup:
// ... Create a request params that has a text identifier set say single quotation marks("'") then this text identifier should be used
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams()
{
TextIdentifier = "\'",
Delimiter = ";"
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item;1" },
new DbCellValue { DisplayValue = "item,2" },
new DbCellValue { DisplayValue = "item\"3" },
new DbCellValue { DisplayValue = "item\'4" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2")),
new DbColumnWrapper(new TestDbColumn("column3")),
new DbColumnWrapper(new TestDbColumn("column4"))
};
var (columns, data) = GetTestValues(2);
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
// If: I set write a row
using (var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns))
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have splitten the columns by delimiter, embedded in text identifier when field contains delimiter or the text identifier
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
Assert.AreEqual("\'item;1\';item,2;item\"3;\'item\'\'4\'", outputString);
// ... The lines should be split by the custom line separator
var lines = ParseWriterOutput(output, "$$");
Assert.AreEqual(2, lines.Length);
}
[Test]
public void WriteRowWithCustomEncoding()
public void WriteRow_CustomEncoding()
{
// Setup:
// ... Create a request params that has custom delimiter say pipe("|") then this delimiter should be used
// ... Create a request params that uses a custom encoding
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams
{
Encoding = "Windows-1252"
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "ü" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1"))
};
var data = new[] { new DbCellValue { DisplayValue = "ü" } };
var columns = new[] { new DbColumnWrapper(new TestDbColumn("column1")) };
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
using (var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns))
{
writer.WriteRow(data, columns);
}
@@ -414,5 +388,40 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
}
private static (DbColumnWrapper[] columns, DbCellValue[] cells) GetTestValues(int columnCount)
{
var data = new DbCellValue[columnCount];
var columns = new DbColumnWrapper[columnCount];
for (int i = 0; i < columnCount; i++)
{
data[i] = new DbCellValue { DisplayValue = $"item{i}"};
columns[i] = new DbColumnWrapper(new TestDbColumn($"column{i}"));
}
return (columns, data);
}
private static SaveAsCsvFileStreamWriter GetWriterForEncodingTests(string delimiter, string identifier, string lineSeparator)
{
var settings = new SaveResultsAsCsvRequestParams
{
Delimiter = delimiter,
IncludeHeaders = false,
LineSeperator = lineSeparator,
TextIdentifier = identifier,
};
var mockStream = Stream.Null;
var mockColumns = Array.Empty<DbColumnWrapper>();
return new SaveAsCsvFileStreamWriter(mockStream, settings, mockColumns);
}
private static string[] ParseWriterOutput(byte[] output, string lineSeparator)
{
string outputString = Encoding.UTF8.GetString(output).Trim('\0');
string[] lines = outputString.Split(new[] { lineSeparator }, StringSplitOptions.None);
// Make sure the file ends with a new line and return all but the meaningful lines
Assert.IsEmpty(lines[lines.Length - 1]);
return lines.Take(lines.Length - 1).ToArray();
}
}
}

View File

@@ -27,7 +27,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
// If:
// ... I create and then destruct a json writer
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams);
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams, Array.Empty<DbColumnWrapper>());
jsonWriter.Dispose();
// Then:
@@ -59,7 +59,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
// If:
// ... I write two rows
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams);
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams, columns);
using (jsonWriter)
{
jsonWriter.WriteRow(data, columns);
@@ -117,7 +117,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
byte[] output = new byte[8192];
// If: I write two rows
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams);
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams, columns);
using (jsonWriter)
{
jsonWriter.WriteRow(data, columns);
@@ -170,7 +170,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
// If:
// ... I write two rows
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams);
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams, columns);
using (jsonWriter)
{
jsonWriter.WriteRow(data, columns);

View File

@@ -0,0 +1,227 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.IO;
using System.Text;
using System.Xml;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage;
using Microsoft.SqlTools.ServiceLayer.UnitTests.Utility;
using NUnit.Framework;
namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
{
[TestFixture]
public class SaveAsXmlFileStreamWriterTests
{
[TestCase(false)]
[TestCase(true)]
public void ConstructAndDispose(bool formatted)
{
// Setup: Create test request and storage for the output
var saveParams = new SaveResultsAsXmlRequestParams { Formatted = formatted };
var columns = Array.Empty<DbColumnWrapper>();
var output = new byte[8192];
// If: I create and dispose of an XML file writer
var xmlWriter = new SaveAsXmlFileStreamWriter(new MemoryStream(output), saveParams, columns);
xmlWriter.Dispose();
// Then:
// ... The output should be just the XML node and the root node
var rootNode = ParseOutput(output, Encoding.UTF8);
Assert.IsEmpty(rootNode.ChildNodes);
// ... If the output is formatted, there should be multiple lines
// otherwise, there should be only one line
if (formatted)
{
CollectionAssert.Contains(output, (byte)'\n');
}
else
{
CollectionAssert.DoesNotContain(output, (byte)'\n');
}
}
[Test]
public void WriteRow_WithoutColumnSelection()
{
// Setup:
// ... Create request params that has no selection made
// ... Create a set of data to write
// ... Create storage for the output
var saveParams = new SaveResultsAsXmlRequestParams();
var data = new[]
{
new DbCellValue { DisplayValue = "item1", RawObject = "item1" },
new DbCellValue { DisplayValue = "null", RawObject = null }
};
var columns = new[]
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
var output = new byte[8192];
// If: I write two rows
using (var xmlWriter = new SaveAsXmlFileStreamWriter(new MemoryStream(output), saveParams, columns))
{
xmlWriter.WriteRow(data, columns);
xmlWriter.WriteRow(data, columns);
}
// Then:
// ... XML should be well formed
var rootNode = ParseOutput(output, Encoding.UTF8);
// ... Data node should have two nodes for the two rows
Assert.AreEqual(2, rootNode.ChildNodes.Count);
for (int i = 0; i < 2; i++)
{
// ... Each row should have two nodes for the two cells
var row = rootNode.ChildNodes[i];
Assert.IsNotNull(row);
Assert.AreEqual(2, row.ChildNodes.Count);
for (int j = 0; j < 2; j++)
{
var cell = row.ChildNodes[j];
Assert.IsNotNull(cell);
// ... Node name should be column name
Assert.AreEqual(columns[j].ColumnName, cell.Name);
// ... Node value should be cell value
if (data[j].RawObject == null)
{
Assert.IsEmpty(cell.InnerText);
}
else
{
Assert.AreEqual(data[j].DisplayValue, cell.InnerText);
}
}
}
}
[Test]
public void WriteRow_WithColumnSelection()
{
// Setup:
// ... Create request params that has a selection made
// ... Create a set of data to write
// ... Create storage for the output
var saveParams = new SaveResultsAsXmlRequestParams
{
ColumnEndIndex = 2,
ColumnStartIndex = 1,
RowEndIndex = 0, // Required for being considered a "selection"
RowStartIndex = 0
};
var data = new[]
{
new DbCellValue { DisplayValue = "foo" },
new DbCellValue { DisplayValue = "item1", RawObject = "item1" },
new DbCellValue { DisplayValue = "null", RawObject = null },
new DbCellValue { DisplayValue = "bar" }
};
var columns = new[]
{
new DbColumnWrapper(new TestDbColumn("ignoredCol")),
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2")),
new DbColumnWrapper(new TestDbColumn("ignoredCol"))
};
var output = new byte[8192];
// If: I write two rows
using (var xmlWriter = new SaveAsXmlFileStreamWriter(new MemoryStream(output), saveParams, columns))
{
xmlWriter.WriteRow(data, columns);
xmlWriter.WriteRow(data, columns);
}
// Then:
// ... XML should be well formed
var rootNode = ParseOutput(output, Encoding.UTF8);
// ... Data node should have two nodes for the two rows
Assert.AreEqual(2, rootNode.ChildNodes.Count);
for (int i = 0; i < 2; i++)
{
// ... Each row should have two nodes for the two cells
var row = rootNode.ChildNodes[i];
Assert.IsNotNull(row);
Assert.AreEqual(2, row.ChildNodes.Count);
for (int j = 0; j < 1; j++)
{
var cell = row.ChildNodes[j];
var columnIndex = j + 1;
Assert.IsNotNull(cell);
// ... Node name should be column name
Assert.AreEqual(columns[columnIndex].ColumnName, cell.Name);
// ... Node value should be cell value
if (data[columnIndex].RawObject == null)
{
Assert.IsEmpty(cell.InnerText);
}
else
{
Assert.AreEqual(data[columnIndex].DisplayValue, cell.InnerText);
}
}
}
}
[Test]
public void WriteRow_NonDefaultEncoding()
{
// Setup:
// ... Create request params that uses a special encoding
// ... Create a set of data to write
// ... Create storage for the output
var saveParams = new SaveResultsAsXmlRequestParams { Encoding = "Windows-1252" };
var data = new[] { new DbCellValue { DisplayValue = "ü", RawObject = "ü" } };
var columns = new[] { new DbColumnWrapper(new TestDbColumn("column1")) };
byte[] output = new byte[8192];
// If: I write the row
using (var xmlWriter = new SaveAsXmlFileStreamWriter(new MemoryStream(output), saveParams, columns))
{
xmlWriter.WriteRow(data, columns);
}
// Then:
// ... The XML file should have been written properly in windows-1252 encoding
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
var encoding = Encoding.GetEncoding("Windows-1252");
var rootNode = ParseOutput(output, encoding);
// ... The umlaut should be written using Windows-1252
Assert.IsNotNull(rootNode.ChildNodes[0]); // <row>
Assert.IsNotNull(rootNode.ChildNodes[0].ChildNodes[0]); // <column1>
Assert.AreEqual(rootNode.ChildNodes[0].ChildNodes[0].InnerText, "ü");
}
private XmlNode ParseOutput(byte[] bytes, Encoding encoding)
{
var outputString = encoding.GetString(bytes)
.TrimStart(encoding.GetString(encoding.Preamble).ToCharArray()) // Trim any BOM
.Trim('\0');
var xmlDoc = new XmlDocument();
xmlDoc.LoadXml(outputString);
// Assert: Two elements at the root, XML and the root node
Assert.AreEqual(2, xmlDoc.ChildNodes.Count);
Assert.AreEqual("xml", xmlDoc.ChildNodes[0]?.Name);
Assert.AreEqual("data", xmlDoc.ChildNodes[1]?.Name);
return xmlDoc.ChildNodes[1];
}
}
}

View File

@@ -159,7 +159,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
// Setup: Create a new result set with valid db data reader
var fileStreamFactory = MemoryFileSystem.GetFileStreamFactory();
ResultSet resultSet = new ResultSet(Common.Ordinal, Common.Ordinal, fileStreamFactory);
Assert.That(() => testMethod(resultSet), Throws.InstanceOf<Exception>(), "I have a result set that has not been read. I attempt to call a method on it. It should throw an exception");
Assert.Catch(() => testMethod(resultSet), "I have a result set that has not been read. I attempt to call a method on it. It should throw an exception");
}
public static IEnumerable<object[]> CallMethodWithoutReadingData
@@ -376,7 +376,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
ResultSet resultSet = new ResultSet(Common.Ordinal, Common.Ordinal, fileStreamFactory);
await resultSet.ReadResultToEnd(mockReader, CancellationToken.None);
Assert.That(() => actionToPerform(resultSet), Throws.InstanceOf<Exception>(), "Attempting to read an invalid row should fail");
Assert.Catch<Exception>(() => actionToPerform(resultSet), "Attempting to read an invalid row should fail");
}
public static IEnumerable<object[]> RowInvalidParameterData

View File

@@ -110,7 +110,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
// ... All the rows should have been written successfully
saveWriter.Verify(
w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IList<DbColumnWrapper>>()),
w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()),
Times.Exactly(Common.StandardRows));
}
@@ -150,21 +150,21 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
// ... All the rows should have been written successfully
saveWriter.Verify(
w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IList<DbColumnWrapper>>()),
w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()),
Times.Exactly((int) (saveParams.RowEndIndex - saveParams.RowStartIndex + 1)));
}
private static Mock<IFileStreamWriter> GetMockWriter()
{
var mockWriter = new Mock<IFileStreamWriter>();
mockWriter.Setup(w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IList<DbColumnWrapper>>()));
mockWriter.Setup(w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()));
return mockWriter;
}
private static IFileStreamFactory GetMockFactory(IFileStreamWriter writer, Func<string, IFileStreamReader> readerGenerator)
{
var mockFactory = new Mock<IFileStreamFactory>();
mockFactory.Setup(f => f.GetWriter(It.IsAny<string>()))
mockFactory.Setup(f => f.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns(writer);
mockFactory.Setup(f => f.GetReader(It.IsAny<string>()))
.Returns(readerGenerator);

View File

@@ -473,63 +473,69 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
#region Private Helpers
private static IFileStreamFactory GetCsvStreamFactory(IDictionary<string, byte[]> storage, SaveResultsAsCsvRequestParams saveParams)
private static IFileStreamFactory GetCsvStreamFactory(
IDictionary<string, byte[]> storage,
SaveResultsAsCsvRequestParams saveParams)
{
Mock<IFileStreamFactory> mock = new Mock<IFileStreamFactory>();
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output =>
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, columns) =>
{
storage.Add(output, new byte[8192]);
return new SaveAsCsvFileStreamWriter(new MemoryStream(storage[output]), saveParams);
return new SaveAsCsvFileStreamWriter(new MemoryStream(storage[output]), saveParams, columns);
});
return mock.Object;
}
private static IFileStreamFactory GetJsonStreamFactory(IDictionary<string, byte[]> storage, SaveResultsAsJsonRequestParams saveParams)
private static IFileStreamFactory GetJsonStreamFactory(
IDictionary<string, byte[]> storage,
SaveResultsAsJsonRequestParams saveParams)
{
Mock<IFileStreamFactory> mock = new Mock<IFileStreamFactory>();
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output =>
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, columns) =>
{
storage.Add(output, new byte[8192]);
return new SaveAsJsonFileStreamWriter(new MemoryStream(storage[output]), saveParams);
return new SaveAsJsonFileStreamWriter(new MemoryStream(storage[output]), saveParams, columns);
});
return mock.Object;
}
private static IFileStreamFactory GetXmlStreamFactory(IDictionary<string, byte[]> storage,
private static IFileStreamFactory GetXmlStreamFactory(
IDictionary<string, byte[]> storage,
SaveResultsAsXmlRequestParams saveParams)
{
Mock<IFileStreamFactory> mock = new Mock<IFileStreamFactory>();
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output =>
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, columns) =>
{
storage.Add(output, new byte[8192]);
return new SaveAsXmlFileStreamWriter(new MemoryStream(storage[output]), saveParams);
return new SaveAsXmlFileStreamWriter(new MemoryStream(storage[output]), saveParams, columns);
});
return mock.Object;
}
private static IFileStreamFactory GetExcelStreamFactory(IDictionary<string, byte[]> storage,
private static IFileStreamFactory GetExcelStreamFactory(
IDictionary<string, byte[]> storage,
SaveResultsAsExcelRequestParams saveParams)
{
Mock<IFileStreamFactory> mock = new Mock<IFileStreamFactory>();
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output =>
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, columns) =>
{
storage.Add(output, new byte[8192]);
return new SaveAsExcelFileStreamWriter(new MemoryStream(storage[output]), saveParams);
return new SaveAsExcelFileStreamWriter(new MemoryStream(storage[output]), saveParams, columns);
});
return mock.Object;

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -8,6 +8,7 @@ using Microsoft.SqlTools.ServiceLayer.Scripting;
using Microsoft.SqlTools.ServiceLayer.Scripting.Contracts;
using NUnit.Framework;
using Assert = NUnit.Framework.Assert;
namespace Microsoft.SqlTools.ServiceLayer.UnitTests.Scripting
{
@@ -21,7 +22,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.Scripting
{
var scriptingObject = new ScriptingObject() { Name = "quoted'Name", Schema = "quoted'Schema", Type = "Table" };
var urn = scriptingObject.ToUrn("server", "quoted'db");
Assert.That(urn.ToString, Is.EqualTo("Server[@Name='SERVER']/Database[@Name='quoted''db']/Table[@Name='quoted''Name' and @Schema = 'quoted''Schema']"), "Urn should have escaped Name attributes");
Assert.AreEqual("Server[@Name='SERVER']/Database[@Name='quoted''db']/Table[@Name='quoted''Name' and @Schema = 'quoted''Schema']", urn.ToString(), "Urn should have escaped Name attributes");
Assert.That(urn.Type, Is.EqualTo("Table"), "Urn Type");
// These assertions are more for educational purposes than for testing, since the methods are Urn methods in SFC.
Assert.That(urn.GetNameForType("Database"), Is.EqualTo("quoted'db"), "GetNameForType('Database')");