Export headers in an empty result set (#1434)

* Minimal changes to make headers appear on empty result sets

* Columns for everyone!

* Updating tests - some don't pass yet

* Adding some more tests to verify the changes for column/row selection

* null default columns

* Updates to comments as per PR comments
This commit is contained in:
Benjamin Russell
2022-03-31 11:10:32 -05:00
committed by GitHub
parent 5d805bd678
commit 2ace786d95
24 changed files with 814 additions and 499 deletions

View File

@@ -3,6 +3,9 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System.Collections.Generic;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
{
/// <summary>
@@ -14,7 +17,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
IFileStreamReader GetReader(string fileName);
IFileStreamWriter GetWriter(string fileName);
IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns = null);
void DisposeFile(string fileName);

View File

@@ -16,7 +16,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
public interface IFileStreamWriter : IDisposable
{
int WriteRow(StorageDataReader dataReader);
void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns);
void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns);
void Seek(long offset);
void FlushBuffer();
}

View File

@@ -1,9 +1,10 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
@@ -48,17 +49,28 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <returns>Stream reader</returns>
public IFileStreamReader GetReader(string fileName)
{
return new ServiceBufferFileStreamReader(new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamReader(
new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>
/// Returns a new CSV writer for writing results to a CSV file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">Path to the CSV output file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
/// <returns>Stream writer</returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new SaveAsCsvFileStreamWriter(new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite), SaveRequestParams);
return new SaveAsCsvFileStreamWriter(
new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite),
SaveRequestParams,
columns
);
}
/// <summary>

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -20,21 +20,74 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
#region Member Variables
private readonly SaveResultsAsCsvRequestParams saveParams;
private bool headerWritten;
private readonly char delimiter;
private readonly Encoding encoding;
private readonly string lineSeparator;
private readonly char textIdentifier;
private readonly string textIdentifierString;
#endregion
/// <summary>
/// Constructor, stores the CSV specific request params locally, chains into the base
/// Constructor, stores the CSV specific request params locally, chains into the base
/// constructor
/// </summary>
/// <param name="stream">FileStream to access the CSV file output</param>
/// <param name="requestParams">CSV save as request parameters</param>
public SaveAsCsvFileStreamWriter(Stream stream, SaveResultsAsCsvRequestParams requestParams)
: base(stream, requestParams)
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public SaveAsCsvFileStreamWriter(Stream stream, SaveResultsAsCsvRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
: base(stream, requestParams, columns)
{
saveParams = requestParams;
// Parse the config
delimiter = ',';
if (!string.IsNullOrEmpty(requestParams.Delimiter))
{
delimiter = requestParams.Delimiter[0];
}
lineSeparator = Environment.NewLine;
if (!string.IsNullOrEmpty(requestParams.LineSeperator))
{
lineSeparator = requestParams.LineSeperator;
}
textIdentifier = '"';
if (!string.IsNullOrEmpty(requestParams.TextIdentifier))
{
textIdentifier = requestParams.TextIdentifier[0];
}
textIdentifierString = textIdentifier.ToString();
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
try
{
encoding = int.TryParse(requestParams.Encoding, out int codePage)
? Encoding.GetEncoding(codePage)
: Encoding.GetEncoding(requestParams.Encoding);
}
catch
{
// Fallback encoding when specified codepage is invalid
encoding = Encoding.GetEncoding("utf-8");
}
// Output the header if the user requested it
if (requestParams.IncludeHeaders)
{
// Build the string
var selectedColumns = columns.Skip(ColumnStartIndex)
.Take(ColumnCount)
.Select(c => EncodeCsvField(c.ColumnName) ?? string.Empty);
string headerLine = string.Join(delimiter, selectedColumns);
// Encode it and write it out
byte[] headerBytes = encoding.GetBytes(headerLine + lineSeparator);
FileStream.Write(headerBytes, 0, headerBytes.Length);
}
}
/// <summary>
@@ -42,76 +95,17 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// it, the headers for the column will be emitted as well.
/// </summary>
/// <param name="row">The data of the row to output to the file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public override void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
/// <param name="columns">The columns for the row to output</param>
public override void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
char delimiter = ',';
if(!string.IsNullOrEmpty(saveParams.Delimiter))
{
// first char in string
delimiter = saveParams.Delimiter[0];
}
string lineSeperator = Environment.NewLine;
if(!string.IsNullOrEmpty(saveParams.LineSeperator))
{
lineSeperator = saveParams.LineSeperator;
}
char textIdentifier = '"';
if(!string.IsNullOrEmpty(saveParams.TextIdentifier))
{
// first char in string
textIdentifier = saveParams.TextIdentifier[0];
}
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
int codepage;
Encoding encoding;
try
{
if(int.TryParse(saveParams.Encoding, out codepage))
{
encoding = Encoding.GetEncoding(codepage);
}
else
{
encoding = Encoding.GetEncoding(saveParams.Encoding);
}
}
catch
{
// Fallback encoding when specified codepage is invalid
encoding = Encoding.GetEncoding("utf-8");
}
// Write out the header if we haven't already and the user chose to have it
if (saveParams.IncludeHeaders && !headerWritten)
{
// Build the string
var selectedColumns = columns.Skip(ColumnStartIndex ?? 0).Take(ColumnCount ?? columns.Count)
.Select(c => EncodeCsvField(c.ColumnName, delimiter, textIdentifier) ?? string.Empty);
string headerLine = string.Join(delimiter, selectedColumns);
// Encode it and write it out
byte[] headerBytes = encoding.GetBytes(headerLine + lineSeperator);
FileStream.Write(headerBytes, 0, headerBytes.Length);
headerWritten = true;
}
// Build the string for the row
var selectedCells = row.Skip(ColumnStartIndex ?? 0)
.Take(ColumnCount ?? columns.Count)
.Select(c => EncodeCsvField(c.DisplayValue, delimiter, textIdentifier));
var selectedCells = row.Skip(ColumnStartIndex)
.Take(ColumnCount)
.Select(c => EncodeCsvField(c.DisplayValue));
string rowLine = string.Join(delimiter, selectedCells);
// Encode it and write it out
byte[] rowBytes = encoding.GetBytes(rowLine + lineSeperator);
byte[] rowBytes = encoding.GetBytes(rowLine + lineSeparator);
FileStream.Write(rowBytes, 0, rowBytes.Length);
}
@@ -124,7 +118,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <list type="bullet">
/// <item><description>The field begins or ends with a space</description></item>
/// <item><description>The field begins or ends with a tab</description></item>
/// <item><description>The field contains the ListSeparator string</description></item>
/// <item><description>The field contains the delimiter string</description></item>
/// <item><description>The field contains the '\n' character</description></item>
/// <item><description>The field contains the '\r' character</description></item>
/// <item><description>The field contains the '"' character</description></item>
@@ -132,27 +126,24 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="field">The field to encode</param>
/// <returns>The CSV encoded version of the original field</returns>
internal static string EncodeCsvField(string field, char delimiter, char textIdentifier)
internal string EncodeCsvField(string field)
{
string strTextIdentifier = textIdentifier.ToString();
// Special case for nulls
if (field == null)
{
return "NULL";
}
// Replace all quotes in the original field with double quotes
string ret = field.Replace(textIdentifierString, textIdentifierString + textIdentifierString);
// Whether this field has special characters which require it to be embedded in quotes
bool embedInQuotes = field.IndexOfAny(new[] { delimiter, '\r', '\n', textIdentifier }) >= 0 // Contains special characters
|| field.StartsWith(" ") || field.EndsWith(" ") // Start/Ends with space
|| field.StartsWith("\t") || field.EndsWith("\t"); // Starts/Ends with tab
//Replace all quotes in the original field with double quotes
string ret = field.Replace(strTextIdentifier, strTextIdentifier + strTextIdentifier);
if (embedInQuotes)
{
ret = strTextIdentifier + $"{ret}" + strTextIdentifier;
ret = $"{textIdentifier}{ret}{textIdentifier}";
}
return ret;

View File

@@ -1,9 +1,10 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
@@ -48,17 +49,28 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <returns>Stream reader</returns>
public IFileStreamReader GetReader(string fileName)
{
return new ServiceBufferFileStreamReader(new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamReader(
new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>
/// Returns a new Excel writer for writing results to a Excel file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">Path to the Excel output file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
/// <returns>Stream writer</returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new SaveAsExcelFileStreamWriter(new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite), SaveRequestParams);
return new SaveAsExcelFileStreamWriter(
new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite),
SaveRequestParams,
columns
);
}
/// <summary>

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -25,13 +25,17 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
#endregion
/// <summary>
/// Constructor, stores the Excel specific request params locally, chains into the base
/// Constructor, stores the Excel specific request params locally, chains into the base
/// constructor
/// </summary>
/// <param name="stream">FileStream to access the Excel file output</param>
/// <param name="requestParams">Excel save as request parameters</param>
public SaveAsExcelFileStreamWriter(Stream stream, SaveResultsAsExcelRequestParams requestParams)
: base(stream, requestParams)
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public SaveAsExcelFileStreamWriter(Stream stream, SaveResultsAsExcelRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
: base(stream, requestParams, columns)
{
saveParams = requestParams;
helper = new SaveAsExcelFileStreamWriterHelper(stream);
@@ -47,16 +51,13 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public override void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
public override void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
int columnStart = ColumnStartIndex ?? 0;
int columnEnd = (ColumnEndIndex != null) ? ColumnEndIndex.Value + 1 : columns.Count;
// Write out the header if we haven't already and the user chose to have it
if (saveParams.IncludeHeaders && !headerWritten)
{
sheet.AddRow();
for (int i = columnStart; i < columnEnd; i++)
for (int i = ColumnStartIndex; i <= ColumnEndIndex; i++)
{
sheet.AddCell(columns[i].ColumnName);
}
@@ -64,7 +65,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
}
sheet.AddRow();
for (int i = columnStart; i < columnEnd; i++)
for (int i = ColumnStartIndex; i <= ColumnEndIndex; i++)
{
sheet.AddCell(row[i]);
}

View File

@@ -1,9 +1,10 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
@@ -52,10 +53,18 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// Returns a new JSON writer for writing results to a JSON file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">Path to the JSON output file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
/// <returns>Stream writer</returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new SaveAsJsonFileStreamWriter(new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite), SaveRequestParams);
return new SaveAsJsonFileStreamWriter(
new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite),
SaveRequestParams,
columns
);
}
/// <summary>

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -33,8 +33,12 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="stream">FileStream to access the JSON file output</param>
/// <param name="requestParams">JSON save as request parameters</param>
public SaveAsJsonFileStreamWriter(Stream stream, SaveResultsRequestParams requestParams)
: base(stream, requestParams)
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public SaveAsJsonFileStreamWriter(Stream stream, SaveResultsRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
: base(stream, requestParams, columns)
{
// Setup the internal state
streamWriter = new StreamWriter(stream);
@@ -53,15 +57,13 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public override void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
public override void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
// Write the header for the object
jsonWriter.WriteStartObject();
// Write the items out as properties
int columnStart = ColumnStartIndex ?? 0;
int columnEnd = (ColumnEndIndex != null) ? ColumnEndIndex.Value + 1 : columns.Count;
for (int i = columnStart; i < columnEnd; i++)
for (int i = ColumnStartIndex; i <= ColumnEndIndex; i++)
{
jsonWriter.WritePropertyName(columns[i].ColumnName);
if (row[i].RawObject == null)

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -7,6 +7,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.Utility;
namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
{
@@ -21,18 +22,31 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="stream">The stream that will be written to</param>
/// <param name="requestParams">The SaveAs request parameters</param>
protected SaveAsStreamWriter(Stream stream, SaveResultsRequestParams requestParams)
/// <param name="columns">
/// The entire list of columns for the result set. Used to determine which columns to
/// output.
/// </param>
protected SaveAsStreamWriter(Stream stream, SaveResultsRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
{
Validate.IsNotNull(nameof(stream), stream);
Validate.IsNotNull(nameof(columns), columns);
FileStream = stream;
var saveParams = requestParams;
if (requestParams.IsSaveSelection)
{
// ReSharper disable PossibleInvalidOperationException IsSaveSelection verifies these values exist
ColumnStartIndex = saveParams.ColumnStartIndex.Value;
ColumnEndIndex = saveParams.ColumnEndIndex.Value;
ColumnCount = saveParams.ColumnEndIndex.Value - saveParams.ColumnStartIndex.Value + 1;
ColumnStartIndex = requestParams.ColumnStartIndex.Value;
ColumnEndIndex = requestParams.ColumnEndIndex.Value;
// ReSharper restore PossibleInvalidOperationException
}
else
{
// Save request was for the entire result set, use default start/end
ColumnStartIndex = 0;
ColumnEndIndex = columns.Count - 1;
}
ColumnCount = ColumnEndIndex - ColumnStartIndex + 1;
}
#region Properties
@@ -40,22 +54,22 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <summary>
/// Index of the first column to write to the output file
/// </summary>
protected int? ColumnStartIndex { get; private set; }
protected int ColumnStartIndex { get; }
/// <summary>
/// Number of columns to write to the output file
/// </summary>
protected int? ColumnCount { get; private set; }
protected int ColumnCount { get; }
/// <summary>
/// Index of the last column to write to the output file
/// Index of the last column to write to the output file (inclusive).
/// </summary>
protected int? ColumnEndIndex { get; private set; }
protected int ColumnEndIndex { get; }
/// <summary>
/// The file stream to use to write the output file
/// </summary>
protected Stream FileStream { get; private set; }
protected Stream FileStream { get; }
#endregion
@@ -73,7 +87,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="row">The row of data to output</param>
/// <param name="columns">The list of columns to output</param>
public abstract void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns);
public abstract void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns);
/// <summary>
/// Not implemented, do not use.

View File

@@ -1,9 +1,10 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
@@ -45,17 +46,28 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <returns>Stream reader</returns>
public IFileStreamReader GetReader(string fileName)
{
return new ServiceBufferFileStreamReader(new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamReader(
new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>
/// Returns a new XML writer for writing results to a XML file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">Path to the XML output file</param>
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
/// <returns>Stream writer</returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new SaveAsXmlFileStreamWriter(new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite), SaveRequestParams);
return new SaveAsXmlFileStreamWriter(
new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite),
SaveRequestParams,
columns
);
}
/// <summary>

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -24,10 +24,10 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
{
// Root element name for the output XML
private const string RootElementTag = "data";
// Item element name which will be used for every row
private const string ItemElementTag = "row";
#region Member Variables
private readonly XmlTextWriter xmlTextWriter;
@@ -39,8 +39,12 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// </summary>
/// <param name="stream">FileStream to access the JSON file output</param>
/// <param name="requestParams">XML save as request parameters</param>
public SaveAsXmlFileStreamWriter(Stream stream, SaveResultsAsXmlRequestParams requestParams)
: base(stream, requestParams)
/// <param name="columns">
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public SaveAsXmlFileStreamWriter(Stream stream, SaveResultsAsXmlRequestParams requestParams, IReadOnlyList<DbColumnWrapper> columns)
: base(stream, requestParams, columns)
{
// Setup the internal state
var encoding = GetEncoding(requestParams);
@@ -60,19 +64,17 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// The entire list of columns for the result set. They will be filtered down as per the
/// request params.
/// </param>
public override void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
public override void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
// Write the header for the object
xmlTextWriter.WriteStartElement(ItemElementTag);
// Write the items out as properties
int columnStart = ColumnStartIndex ?? 0;
int columnEnd = ColumnEndIndex + 1 ?? columns.Count;
for (int i = columnStart; i < columnEnd; i++)
for (int i = ColumnStartIndex; i <= ColumnEndIndex; i++)
{
// Write the column name as item tag
xmlTextWriter.WriteStartElement(columns[i].ColumnName);
if (row[i].RawObject != null)
{
xmlTextWriter.WriteString(row[i].DisplayValue);

View File

@@ -3,7 +3,9 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
using Microsoft.SqlTools.ServiceLayer.Utility;
@@ -40,7 +42,10 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// <returns>A <see cref="ServiceBufferFileStreamReader"/></returns>
public IFileStreamReader GetReader(string fileName)
{
return new ServiceBufferFileStreamReader(new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamReader(
new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>
@@ -48,10 +53,17 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
/// SSMS formatted buffer file, file share is ReadWrite to allow concurrent reads/writes to the file.
/// </summary>
/// <param name="fileName">The file to write values to</param>
/// <param name="columns">
/// Ignored in order to fulfil the <see cref="IFileStreamFactory"/> contract.
/// @TODO: Refactor this out so that save-as writers do not use the same contract as service buffer writers.
/// </param>
/// <returns>A <see cref="ServiceBufferFileStreamWriter"/></returns>
public IFileStreamWriter GetWriter(string fileName)
public IFileStreamWriter GetWriter(string fileName, IReadOnlyList<DbColumnWrapper> columns)
{
return new ServiceBufferFileStreamWriter(new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite), QueryExecutionSettings);
return new ServiceBufferFileStreamWriter(
new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite),
QueryExecutionSettings
);
}
/// <summary>

View File

@@ -167,7 +167,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
}
else
{
// not a long field
// not a long field
values[i] = reader.GetValue(i);
}
}
@@ -209,7 +209,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
}
[Obsolete]
public void WriteRow(IList<DbCellValue> row, IList<DbColumnWrapper> columns)
public void WriteRow(IList<DbCellValue> row, IReadOnlyList<DbColumnWrapper> columns)
{
throw new InvalidOperationException("This type of writer is meant to write values from a DbDataReader only.");
}
@@ -442,7 +442,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage
// Convert to a unicode byte array
byte[] bytes = Encoding.Unicode.GetBytes(sVal);
// convert char array into byte array and write it out
// convert char array into byte array and write it out
iTotalLen = WriteLength(bytes.Length);
iTotalLen += FileUtilities.WriteWithLength(fileStream, bytes, bytes.Length);
}

View File

@@ -306,12 +306,12 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
}
/// <summary>
/// Generates the execution plan from the table returned
/// Generates the execution plan from the table returned
/// </summary>
/// <returns>An execution plan object</returns>
public Task<Contracts.ExecutionPlan> GetExecutionPlan()
{
// Process the action just in case it hasn't been yet
// Process the action just in case it hasn't been yet
ProcessSpecialAction();
// Sanity check to make sure that results read has started
@@ -319,7 +319,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
{
throw new InvalidOperationException(SR.QueryServiceResultSetNotRead);
}
// Check that we this result set contains a showplan
// Check that we this result set contains a showplan
if (!specialAction.ExpectYukonXMLShowPlan)
{
throw new Exception(SR.QueryServiceExecutionPlanNotFound);
@@ -327,7 +327,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
return Task.Factory.StartNew(() =>
{
{
string content;
string format = null;
@@ -336,12 +336,12 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
// Determine the format and get the first col/row of XML
content = fileStreamReader.ReadRow(0, 0, Columns)[0].DisplayValue;
if (specialAction.ExpectYukonXMLShowPlan)
if (specialAction.ExpectYukonXMLShowPlan)
{
format = "xml";
}
}
return new Contracts.ExecutionPlan
{
Format = format,
@@ -371,7 +371,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
// Open a writer for the file
//
var fileWriter = fileStreamFactory.GetWriter(outputFileName);
var fileWriter = fileStreamFactory.GetWriter(outputFileName, null);
using (fileWriter)
{
// If we can initialize the columns using the column schema, use that
@@ -456,7 +456,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
}
/// <summary>
/// Updates the values in a row with the
/// Updates the values in a row with the
/// </summary>
/// <param name="rowId"></param>
/// <param name="dbDataReader"></param>
@@ -528,7 +528,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
}
using (var fileReader = fileFactory.GetReader(outputFileName))
using (var fileWriter = fileFactory.GetWriter(saveParams.FilePath))
using (var fileWriter = fileFactory.GetWriter(saveParams.FilePath, Columns))
{
// Iterate over the rows that are in the selected row set
for (long i = rowStartIndex; i < rowEndIndex; ++i)
@@ -551,13 +551,13 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
}
}
});
// Add exception handling to the save task
Task taskWithHandling = saveAsTask.ContinueWithOnFaulted(async t =>
{
if (failureHandler != null)
{
await failureHandler(saveParams, t.Exception.Message);
await failureHandler(saveParams, t.Exception?.Message);
}
});
@@ -691,7 +691,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
}
}
finally
{
{
// Release the sendResultsSemphore so the next invocation gets unblocked
//
sendResultsSemphore.Release();
@@ -706,7 +706,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
/// <summary>
/// If the result set represented by this class corresponds to a single XML
/// column that contains results of "for xml" query, set isXml = true
/// column that contains results of "for xml" query, set isXml = true
/// If the result set represented by this class corresponds to a single JSON
/// column that contains results of "for json" query, set isJson = true
/// </summary>
@@ -755,10 +755,10 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
/// <summary>
/// Determine the special action, if any, for this result set
/// </summary>
private SpecialAction ProcessSpecialAction()
{
private SpecialAction ProcessSpecialAction()
{
// Check if this result set is a showplan
// Check if this result set is a showplan
if (Columns.Length == 1 && string.Compare(Columns[0].ColumnName, YukonXmlShowPlanColumn, StringComparison.OrdinalIgnoreCase) == 0)
{
specialAction.ExpectYukonXMLShowPlan = true;
@@ -780,7 +780,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
{
throw new InvalidOperationException(SR.QueryServiceResultSetNotRead);
}
// NOTE: We are no longer checking to see if the data reader has rows before reading
// NOTE: We are no longer checking to see if the data reader has rows before reading
// b/c of a quirk in SqlClient. In some scenarios, a SqlException isn't thrown until we
// read. In order to get appropriate errors back to the user, we'll read first.
// Returning false from .ReadAsync means there aren't any rows.
@@ -791,7 +791,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
{
throw new InvalidOperationException(SR.QueryServiceResultSetAddNoRows);
}
using (IFileStreamWriter writer = fileStreamFactory.GetWriter(outputFileName))
{
// Write the row to the end of the file

View File

@@ -72,7 +72,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
{
inProgressSerializations.AddOrUpdate(serializer.FilePath, serializer, (key, old) => serializer);
}
Logger.Write(TraceEventType.Verbose, "HandleSerializeStartRequest");
SerializeDataResult result = serializer.ProcessRequest(serializeParams);
await requestContext.SendResult(result);
@@ -153,7 +153,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
{
private IFileStreamWriter writer;
private SerializeDataStartRequestParams requestParams;
private IList<DbColumnWrapper> columns;
private IReadOnlyList<DbColumnWrapper> columns;
public string FilePath { get; private set; }
@@ -164,7 +164,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
this.FilePath = requestParams.FilePath;
}
private IList<DbColumnWrapper> MapColumns(ColumnInfo[] columns)
private IReadOnlyList<DbColumnWrapper> MapColumns(ColumnInfo[] columns)
{
List<DbColumnWrapper> columnWrappers = new List<DbColumnWrapper>();
foreach (ColumnInfo column in columns)
@@ -258,7 +258,7 @@ namespace Microsoft.SqlTools.ServiceLayer.QueryExecution
default:
throw new Exception(SR.SerializationServiceUnsupportedFormat(this.requestParams.SaveFormat));
}
this.writer = factory.GetWriter(requestParams.FilePath);
this.writer = factory.GetWriter(requestParams.FilePath, columns);
}
}
public void CloseStreams()

View File

@@ -5,7 +5,9 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage;
using Microsoft.SqlTools.ServiceLayer.SqlContext;
using Moq;
@@ -32,8 +34,8 @@ namespace Microsoft.SqlTools.ServiceLayer.Test.Common
});
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamWriter(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, _) => new ServiceBufferFileStreamWriter(new MemoryStream(storage[output]), new QueryExecutionSettings()));
return mock.Object;
}

View File

@@ -138,7 +138,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
s.EditCache[rs.RowCount] = mockEdit;
// If: I create a row in the session
// Then:
// Then:
// ... An exception should be thrown
Assert.Throws<InvalidOperationException>(() => s.CreateRow());
@@ -321,7 +321,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
Mock<IEditMetadataFactory> emf = new Mock<IEditMetadataFactory>();
EditSession s = new EditSession(emf.Object);
Assert.That(() => s.Initialize(initParams, c, qr, sh, fh), Throws.InstanceOf<ArgumentException>(), "I initialize it with a missing parameter. It should throw an exception");
Assert.Catch<ArgumentException>(() => s.Initialize(initParams, c, qr, sh, fh), "I initialize it with a missing parameter. It should throw an exception");
}
public static IEnumerable<object[]> InitializeNullParamsData
@@ -551,7 +551,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
s.EditCache[0] = mockEdit;
// If: I delete a row in the session
// Then:
// Then:
// ... An exception should be thrown
Assert.Throws<InvalidOperationException>(() => s.DeleteRow(0));
@@ -680,7 +680,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
// If: I update a cell on a row that already has a pending edit
s.UpdateCell(0, 0, null);
// Then:
// Then:
// ... The mock update should still be in the cache
// ... And it should have had set cell called on it
Assert.That(s.EditCache.Values, Has.Member(mockEdit.Object));
@@ -697,10 +697,10 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
s.UpdateCell(0, 0, "");
// Then:
Assert.Multiple(() =>
{
Assert.That(s.EditCache.Keys, Has.Member(0));
Assert.That(s.EditCache[0], Is.InstanceOf<RowUpdate>(), "A new update row edit should have been added to the cache");
Assert.Multiple(() =>
{
Assert.That(s.EditCache.Keys, Has.Member(0));
Assert.That(s.EditCache[0], Is.InstanceOf<RowUpdate>(), "A new update row edit should have been added to the cache");
});
}
@@ -943,7 +943,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
// If: I script the edit cache to a local output path
string outputPath = s.ScriptEdits(file.FilePath);
// Then:
// Then:
// ... The output path used should be the same as the one we provided
Assert.AreEqual(file.FilePath, outputPath);
@@ -984,7 +984,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
[Test]
public async Task CommitNullSuccessHandler()
{
// Setup:
// Setup:
// ... Create a basic session
EditSession s = await GetBasicSession();
@@ -999,7 +999,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
[Test]
public async Task CommitNullFailureHandler()
{
// Setup:
// Setup:
// ... Create a basic session
EditSession s = await GetBasicSession();
@@ -1014,7 +1014,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
[Test]
public async Task CommitInProgress()
{
// Setup:
// Setup:
// ... Basic session and db connection
EditSession s = await GetBasicSession();
DbConnection conn = new TestSqlConnection(null);
@@ -1046,7 +1046,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.EditData
// If: I commit these changes (and await completion)
bool successCalled = false;
bool failureCalled = false;
s.CommitEdits(conn,
s.CommitEdits(conn,
() => {
successCalled = true;
return Task.FromResult(0);

View File

@@ -1,11 +1,11 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
@@ -18,144 +18,247 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
public class SaveAsCsvFileStreamWriterTests
{
[Test]
public void EncodeCsvFieldShouldWrap(
[Values("Something\rElse",
"Something\nElse",
"Something\"Else",
"Something,Else",
"\tSomething",
"Something\t",
" Something",
"Something ",
" \t\r\n\",\r\n\"\r ")] string field)
public void Constructor_NullStream()
{
// If: I CSV encode a field that has forbidden characters in it
string output = SaveAsCsvFileStreamWriter.EncodeCsvField(field, ',', '\"');
// Act
TestDelegate action = () => _ = new SaveAsCsvFileStreamWriter(
null,
new SaveResultsAsCsvRequestParams(),
Array.Empty<DbColumnWrapper>()
);
// Then: It should wrap it in quotes
Assert.True(Regex.IsMatch(output, "^\".*")
&& Regex.IsMatch(output, ".*\"$"));
// Assert
Assert.Throws<ArgumentNullException>(action);
}
[Test]
public void EncodeCsvFieldShouldNotWrap(
[Values(
"Something",
"Something valid.",
"Something\tvalid"
)] string field)
public void Constructor_NullColumns()
{
// Act
TestDelegate action = () => _ = new SaveAsCsvFileStreamWriter(
Stream.Null,
new SaveResultsAsCsvRequestParams(),
null
);
// Assert
Assert.Throws<ArgumentNullException>(action);
}
[Test]
public void Constructor_WithoutSelectionWithHeader_WritesHeaderWithAllColumns()
{
// Setup:
// ... Create a request params that has no selection made, headers should be printed
// ... Create a set of columns
// --- Create a memory location to store the output
var requestParams = new SaveResultsAsCsvRequestParams { IncludeHeaders = true };
var (columns, _) = GetTestValues(2);
using var outputStream = new MemoryStream();
byte[] output = new byte[8192];
// If: I construct a CSV file writer
using var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns);
writer.Dispose();
// Then:
// ... It should have written a line
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(1, lines.Length);
// ... It should have written a header line with two comma separated names
string[] headerValues = lines[0].Split(",");
Assert.AreEqual(2, headerValues.Length);
for (int i = 0; i < columns.Length; i++)
{
Assert.AreEqual(columns[i].ColumnName, headerValues[i]);
}
}
[Test]
public void Constructor_WithSelectionWithHeader_WritesHeaderWithSelectedColumns()
{
// Setup:
// ... Create a request params that has no selection made, headers should be printed
// ... Create a set of columns
// --- Create a memory location to store the output
var requestParams = new SaveResultsAsCsvRequestParams
{
IncludeHeaders = true,
ColumnStartIndex = 1,
ColumnEndIndex = 2,
RowStartIndex = 0, // Including b/c it is required to be a "save selection"
RowEndIndex = 10
};
var (columns, _) = GetTestValues(4);
using var outputStream = new MemoryStream();
byte[] output = new byte[8192];
// If: I construct a CSV file writer
using var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns);
writer.Dispose();
// Then:
// ... It should have written a line
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(1, lines.Length);
// ... It should have written a header line with two comma separated names
string[] headerValues = lines[0].Split(",");
Assert.AreEqual(2, headerValues.Length);
for (int i = 0; i < 2; i++)
{
Assert.AreEqual(columns[i + 1].ColumnName, headerValues[i]);
}
}
[Test]
public void Constructor_WithoutSelectionWithoutHeader_DoesNotWriteHeader()
{
// Setup:
// ... Create a request params that has no selection made, headers should not be printed
// ... Create a set of columns
// --- Create a memory location to store the output
var requestParams = new SaveResultsAsCsvRequestParams { IncludeHeaders = false };
var (columns, _) = GetTestValues(2);
byte[] output = new byte[8192];
// If: I construct a CSV file writer
using var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns);
writer.Dispose();
// Then:
// ... It not have written anything
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.IsEmpty(lines);
}
[TestCase("Something\rElse")] // Contains carriage return
[TestCase("Something\nElse")] // Contains line feed
[TestCase("Something\"Else")] // Contains default text identifier
[TestCase("Something,Else")] // Contains field separator
public void EncodeCsvField_ContainsDefaultControlCharacters_ShouldBeWrapped(string field)
{
// Setup: Create CsvFileStreamWriter using default control characters
using var writer = GetWriterForEncodingTests(null, null, null);
// If: I CSV encode a field that has forbidden characters in it
string output = writer.EncodeCsvField(field);
// Then: It should wrap it in quotes
Assert.True(Regex.IsMatch(output, "^\".*\"$", RegexOptions.Singleline));
}
[TestCase("Something\rElse")] // Contains carriage return [TODO: Don't support this]
[TestCase("Something\nElse")] // Contains line feed [TODO: Don't support this]
[TestCase("Something[Else")] // Contains default text identifier
[TestCase("Something$Else")] // Contains field separator
//[TestCase("Something||Else")] // Contains line break [TODO: Support this]
public void EncodeCsvField_ContainsNonDefaultControlCharacters_ShouldBeWrapped(string field)
{
// Setup: Create CsvFileStreamWriter using non-default control characters
var writer = GetWriterForEncodingTests("$foo", "[bar", "||");
// If: I CSV encode a field that has forbidden characters in it
string output = writer.EncodeCsvField(field);
// Then: It should wrap it in quotes
Assert.True(Regex.IsMatch(output, @"^\[.*\[$", RegexOptions.Singleline));
}
[TestCase("\tSomething")] // Starts with tab
[TestCase("Something\t")] // Ends with tab
[TestCase("\rSomething")] // Starts with carriage return
[TestCase("Something\r")] // Ends with carriage return
[TestCase("\nSomething")] // Starts with line feed
[TestCase("Something\n")] // Ends with line feed
[TestCase(" Something")] // Starts with space
[TestCase("Something ")] // Ends with space
[TestCase(" Something ")] // Starts and ends with space
public void EncodeCsvField_WhitespaceAtFrontOrBack_ShouldBeWrapped(string field)
{
// Setup: Create CsvFileStreamWriter that specifies the text identifier and field separator
var writer = GetWriterForEncodingTests(null, null, null);
// If: I CSV encode a field that has forbidden characters in it
string output = writer.EncodeCsvField(field);
// Then: It should wrap it in quotes
Assert.True(Regex.IsMatch(output, "^\".*\"$", RegexOptions.Singleline));
}
[TestCase("Something")]
[TestCase("Something valid.")]
[TestCase("Something\tvalid")]
public void EncodeCsvField_ShouldNotWrap(string field)
{
// Setup: Create CsvFileStreamWriter that specifies the text identifier and field separator
var writer = GetWriterForEncodingTests(null, null, null);
// If: I CSV encode a field that does not have forbidden characters in it
string output = SaveAsCsvFileStreamWriter.EncodeCsvField(field, ',', '\"');
string output = writer.EncodeCsvField(field);
// Then: It should not wrap it in quotes
Assert.False(Regex.IsMatch(output, "^\".*\"$"));
}
[Test]
public void EncodeCsvFieldReplace()
[TestCase(null, "Some\"thing", "\"Some\"\"thing\"")] // Default identifier
[TestCase("|$", "Some|thing", "|Some||thing|")] // Custom identifier
public void EncodeCsvField_ContainsTextIdentifier_DoublesIdentifierAndWraps(
string configuredIdentifier,
string input,
string expectedOutput)
{
// Setup: Create CsvFileStreamWriter that specifies the text identifier and field separator
var writer = GetWriterForEncodingTests(null, configuredIdentifier, null);
// If: I CSV encode a field that has a double quote in it,
string output = SaveAsCsvFileStreamWriter.EncodeCsvField("Some\"thing", ',', '\"');
string output = writer.EncodeCsvField(input);
// Then: It should be replaced with double double quotes
Assert.AreEqual("\"Some\"\"thing\"", output);
Assert.AreEqual(expectedOutput, output);
}
[Test]
public void EncodeCsvFieldNull()
public void EncodeCsvField_Null()
{
// Setup: Create CsvFileStreamWriter
var writer = GetWriterForEncodingTests(null, null, null);
// If: I CSV encode a null
string output = SaveAsCsvFileStreamWriter.EncodeCsvField(null, ',', '\"');
string output = writer.EncodeCsvField(null);
// Then: there should be a string version of null returned
Assert.AreEqual("NULL", output);
}
[Test]
public void WriteRowWithoutColumnSelectionOrHeader()
public void WriteRow_WithoutColumnSelection()
{
// Setup:
// Setup:
// ... Create a request params that has no selection made
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams();
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
var (columns, data) = GetTestValues(2);
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
using (var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns))
{
writer.WriteRow(data, columns);
}
// Then: It should write one line with 2 items, comma delimited
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
string[] lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(1, lines.Length);
string[] values = lines[0].Split(',');
Assert.AreEqual(2, values.Length);
}
[Test]
public void WriteRowWithHeader()
{
// Setup:
// ... Create a request params that has no selection made, headers should be printed
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams
{
IncludeHeaders = true
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have written two lines
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
string[] lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
// ... It should have written a header line with two, comma separated names
string[] headerValues = lines[0].Split(',');
Assert.AreEqual(2, headerValues.Length);
for (int i = 0; i < columns.Count; i++)
{
Assert.AreEqual(columns[i].ColumnName, headerValues[i]);
}
// Note: No need to check values, it is done as part of the previous test
}
[Test]
public void WriteRowWithColumnSelection()
public void WriteRow_WithColumnSelection()
{
// Setup:
// ... Create a request params that selects n-1 columns from the front and back
@@ -166,48 +269,25 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
ColumnStartIndex = 1,
ColumnEndIndex = 2,
RowStartIndex = 0, // Including b/c it is required to be a "save selection"
RowEndIndex = 10,
IncludeHeaders = true // Including headers to test both column selection logic
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" },
new DbCellValue { DisplayValue = "item3" },
new DbCellValue { DisplayValue = "item4" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2")),
new DbColumnWrapper(new TestDbColumn("column3")),
new DbColumnWrapper(new TestDbColumn("column4"))
RowEndIndex = 10
};
var (columns, data) = GetTestValues(4);
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have written two lines
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
string[] lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
// ... It should have written one line
var lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(1, lines.Length);
// ... It should have written a header line with two, comma separated names
string[] headerValues = lines[0].Split(',');
Assert.AreEqual(2, headerValues.Length);
for (int i = 1; i <= 2; i++)
{
Assert.AreEqual(columns[i].ColumnName, headerValues[i - 1]);
}
// ... The second line should have two, comma separated values
string[] dataValues = lines[1].Split(',');
// ... The line should have two, comma separated values
string[] dataValues = lines[0].Split(',');
Assert.AreEqual(2, dataValues.Length);
for (int i = 1; i <= 2; i++)
{
@@ -216,7 +296,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
}
[Test]
public void WriteRowWithCustomDelimiters()
public void WriteRow_CustomDelimiter()
{
// Setup:
// ... Create a request params that has custom delimiter say pipe("|") then this delimiter should be used
@@ -227,35 +307,24 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
Delimiter = "|",
IncludeHeaders = true
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
var (columns, data) = GetTestValues(2);
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
using (var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns))
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have written two lines
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
string[] lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
string[] lines = ParseWriterOutput(output, Environment.NewLine);
Assert.AreEqual(2, lines.Length);
// ... It should have written a header line with two, pipe("|") separated names
string[] headerValues = lines[0].Split('|');
Assert.AreEqual(2, headerValues.Length);
for (int i = 0; i < columns.Count; i++)
for (int i = 0; i < columns.Length; i++)
{
Assert.AreEqual(columns[i].ColumnName, headerValues[i]);
}
@@ -264,144 +333,49 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
}
[Test]
public void WriteRowsWithCustomLineSeperator()
public void WriteRow_CustomLineSeparator()
{
// Setup:
// ... Create a request params that has custom line seperator then this seperator should be used
// ... Create a request params that has custom line separator
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams
{
LineSeperator = "$$",
IncludeHeaders = true
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item1" },
new DbCellValue { DisplayValue = "item2" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
byte[] output;
string outputString;
string[] lines;
SaveAsCsvFileStreamWriter writer;
// If: I set default seperator and write a row
requestParams.LineSeperator = null;
output = new byte[8192];
writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have splitten the lines by system's default line seperator
outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
lines = outputString.Split(new[] { Environment.NewLine }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
// If: I set \n (line feed) as seperator and write a row
requestParams.LineSeperator = "\n";
output = new byte[8192];
writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have splitten the lines by \n
outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
lines = outputString.Split(new[] { '\n' }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
// If: I set \r\n (carriage return + line feed) as seperator and write a row
requestParams.LineSeperator = "\r\n";
output = new byte[8192];
writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have splitten the lines by \r\n
outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
lines = outputString.Split(new[] { "\r\n" }, StringSplitOptions.None);
Assert.AreEqual(2, lines.Length);
}
[Test]
public void WriteRowWithCustomTextIdentifier()
{
// Setup:
// ... Create a request params that has a text identifier set say single quotation marks("'") then this text identifier should be used
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams()
{
TextIdentifier = "\'",
Delimiter = ";"
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "item;1" },
new DbCellValue { DisplayValue = "item,2" },
new DbCellValue { DisplayValue = "item\"3" },
new DbCellValue { DisplayValue = "item\'4" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2")),
new DbColumnWrapper(new TestDbColumn("column3")),
new DbColumnWrapper(new TestDbColumn("column4"))
};
var (columns, data) = GetTestValues(2);
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
// If: I set write a row
using (var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns))
{
writer.WriteRow(data, columns);
}
// Then:
// ... It should have splitten the columns by delimiter, embedded in text identifier when field contains delimiter or the text identifier
string outputString = Encoding.UTF8.GetString(output).TrimEnd('\0', '\r', '\n');
Assert.AreEqual("\'item;1\';item,2;item\"3;\'item\'\'4\'", outputString);
// ... The lines should be split by the custom line separator
var lines = ParseWriterOutput(output, "$$");
Assert.AreEqual(2, lines.Length);
}
[Test]
public void WriteRowWithCustomEncoding()
public void WriteRow_CustomEncoding()
{
// Setup:
// ... Create a request params that has custom delimiter say pipe("|") then this delimiter should be used
// ... Create a request params that uses a custom encoding
// ... Create a set of data to write
// ... Create a memory location to store the data
var requestParams = new SaveResultsAsCsvRequestParams
{
Encoding = "Windows-1252"
};
List<DbCellValue> data = new List<DbCellValue>
{
new DbCellValue { DisplayValue = "ü" }
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
new DbColumnWrapper(new TestDbColumn("column1"))
};
var data = new[] { new DbCellValue { DisplayValue = "ü" } };
var columns = new[] { new DbColumnWrapper(new TestDbColumn("column1")) };
byte[] output = new byte[8192];
// If: I write a row
SaveAsCsvFileStreamWriter writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams);
using (writer)
using (var writer = new SaveAsCsvFileStreamWriter(new MemoryStream(output), requestParams, columns))
{
writer.WriteRow(data, columns);
}
@@ -414,5 +388,40 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
}
private static (DbColumnWrapper[] columns, DbCellValue[] cells) GetTestValues(int columnCount)
{
var data = new DbCellValue[columnCount];
var columns = new DbColumnWrapper[columnCount];
for (int i = 0; i < columnCount; i++)
{
data[i] = new DbCellValue { DisplayValue = $"item{i}"};
columns[i] = new DbColumnWrapper(new TestDbColumn($"column{i}"));
}
return (columns, data);
}
private static SaveAsCsvFileStreamWriter GetWriterForEncodingTests(string delimiter, string identifier, string lineSeparator)
{
var settings = new SaveResultsAsCsvRequestParams
{
Delimiter = delimiter,
IncludeHeaders = false,
LineSeperator = lineSeparator,
TextIdentifier = identifier,
};
var mockStream = Stream.Null;
var mockColumns = Array.Empty<DbColumnWrapper>();
return new SaveAsCsvFileStreamWriter(mockStream, settings, mockColumns);
}
private static string[] ParseWriterOutput(byte[] output, string lineSeparator)
{
string outputString = Encoding.UTF8.GetString(output).Trim('\0');
string[] lines = outputString.Split(new[] { lineSeparator }, StringSplitOptions.None);
// Make sure the file ends with a new line and return all but the meaningful lines
Assert.IsEmpty(lines[lines.Length - 1]);
return lines.Take(lines.Length - 1).ToArray();
}
}
}

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -27,7 +27,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
// If:
// ... I create and then destruct a json writer
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams);
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams, Array.Empty<DbColumnWrapper>());
jsonWriter.Dispose();
// Then:
@@ -59,7 +59,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
// If:
// ... I write two rows
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams);
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams, columns);
using (jsonWriter)
{
jsonWriter.WriteRow(data, columns);
@@ -117,7 +117,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
byte[] output = new byte[8192];
// If: I write two rows
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams);
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams, columns);
using (jsonWriter)
{
jsonWriter.WriteRow(data, columns);
@@ -158,7 +158,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
new DbCellValue {DisplayValue = "1", RawObject = 1},
new DbCellValue {DisplayValue = "1.234", RawObject = 1.234},
new DbCellValue {DisplayValue = "2017-07-08T00:00:00", RawObject = new DateTime(2017, 07, 08)},
};
List<DbColumnWrapper> columns = new List<DbColumnWrapper>
{
@@ -170,7 +170,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
// If:
// ... I write two rows
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams);
var jsonWriter = new SaveAsJsonFileStreamWriter(new MemoryStream(output), saveParams, columns);
using (jsonWriter)
{
jsonWriter.WriteRow(data, columns);

View File

@@ -0,0 +1,227 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.IO;
using System.Text;
using System.Xml;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.Contracts;
using Microsoft.SqlTools.ServiceLayer.QueryExecution.DataStorage;
using Microsoft.SqlTools.ServiceLayer.UnitTests.Utility;
using NUnit.Framework;
namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.DataStorage
{
[TestFixture]
public class SaveAsXmlFileStreamWriterTests
{
[TestCase(false)]
[TestCase(true)]
public void ConstructAndDispose(bool formatted)
{
// Setup: Create test request and storage for the output
var saveParams = new SaveResultsAsXmlRequestParams { Formatted = formatted };
var columns = Array.Empty<DbColumnWrapper>();
var output = new byte[8192];
// If: I create and dispose of an XML file writer
var xmlWriter = new SaveAsXmlFileStreamWriter(new MemoryStream(output), saveParams, columns);
xmlWriter.Dispose();
// Then:
// ... The output should be just the XML node and the root node
var rootNode = ParseOutput(output, Encoding.UTF8);
Assert.IsEmpty(rootNode.ChildNodes);
// ... If the output is formatted, there should be multiple lines
// otherwise, there should be only one line
if (formatted)
{
CollectionAssert.Contains(output, (byte)'\n');
}
else
{
CollectionAssert.DoesNotContain(output, (byte)'\n');
}
}
[Test]
public void WriteRow_WithoutColumnSelection()
{
// Setup:
// ... Create request params that has no selection made
// ... Create a set of data to write
// ... Create storage for the output
var saveParams = new SaveResultsAsXmlRequestParams();
var data = new[]
{
new DbCellValue { DisplayValue = "item1", RawObject = "item1" },
new DbCellValue { DisplayValue = "null", RawObject = null }
};
var columns = new[]
{
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2"))
};
var output = new byte[8192];
// If: I write two rows
using (var xmlWriter = new SaveAsXmlFileStreamWriter(new MemoryStream(output), saveParams, columns))
{
xmlWriter.WriteRow(data, columns);
xmlWriter.WriteRow(data, columns);
}
// Then:
// ... XML should be well formed
var rootNode = ParseOutput(output, Encoding.UTF8);
// ... Data node should have two nodes for the two rows
Assert.AreEqual(2, rootNode.ChildNodes.Count);
for (int i = 0; i < 2; i++)
{
// ... Each row should have two nodes for the two cells
var row = rootNode.ChildNodes[i];
Assert.IsNotNull(row);
Assert.AreEqual(2, row.ChildNodes.Count);
for (int j = 0; j < 2; j++)
{
var cell = row.ChildNodes[j];
Assert.IsNotNull(cell);
// ... Node name should be column name
Assert.AreEqual(columns[j].ColumnName, cell.Name);
// ... Node value should be cell value
if (data[j].RawObject == null)
{
Assert.IsEmpty(cell.InnerText);
}
else
{
Assert.AreEqual(data[j].DisplayValue, cell.InnerText);
}
}
}
}
[Test]
public void WriteRow_WithColumnSelection()
{
// Setup:
// ... Create request params that has a selection made
// ... Create a set of data to write
// ... Create storage for the output
var saveParams = new SaveResultsAsXmlRequestParams
{
ColumnEndIndex = 2,
ColumnStartIndex = 1,
RowEndIndex = 0, // Required for being considered a "selection"
RowStartIndex = 0
};
var data = new[]
{
new DbCellValue { DisplayValue = "foo" },
new DbCellValue { DisplayValue = "item1", RawObject = "item1" },
new DbCellValue { DisplayValue = "null", RawObject = null },
new DbCellValue { DisplayValue = "bar" }
};
var columns = new[]
{
new DbColumnWrapper(new TestDbColumn("ignoredCol")),
new DbColumnWrapper(new TestDbColumn("column1")),
new DbColumnWrapper(new TestDbColumn("column2")),
new DbColumnWrapper(new TestDbColumn("ignoredCol"))
};
var output = new byte[8192];
// If: I write two rows
using (var xmlWriter = new SaveAsXmlFileStreamWriter(new MemoryStream(output), saveParams, columns))
{
xmlWriter.WriteRow(data, columns);
xmlWriter.WriteRow(data, columns);
}
// Then:
// ... XML should be well formed
var rootNode = ParseOutput(output, Encoding.UTF8);
// ... Data node should have two nodes for the two rows
Assert.AreEqual(2, rootNode.ChildNodes.Count);
for (int i = 0; i < 2; i++)
{
// ... Each row should have two nodes for the two cells
var row = rootNode.ChildNodes[i];
Assert.IsNotNull(row);
Assert.AreEqual(2, row.ChildNodes.Count);
for (int j = 0; j < 1; j++)
{
var cell = row.ChildNodes[j];
var columnIndex = j + 1;
Assert.IsNotNull(cell);
// ... Node name should be column name
Assert.AreEqual(columns[columnIndex].ColumnName, cell.Name);
// ... Node value should be cell value
if (data[columnIndex].RawObject == null)
{
Assert.IsEmpty(cell.InnerText);
}
else
{
Assert.AreEqual(data[columnIndex].DisplayValue, cell.InnerText);
}
}
}
}
[Test]
public void WriteRow_NonDefaultEncoding()
{
// Setup:
// ... Create request params that uses a special encoding
// ... Create a set of data to write
// ... Create storage for the output
var saveParams = new SaveResultsAsXmlRequestParams { Encoding = "Windows-1252" };
var data = new[] { new DbCellValue { DisplayValue = "ü", RawObject = "ü" } };
var columns = new[] { new DbColumnWrapper(new TestDbColumn("column1")) };
byte[] output = new byte[8192];
// If: I write the row
using (var xmlWriter = new SaveAsXmlFileStreamWriter(new MemoryStream(output), saveParams, columns))
{
xmlWriter.WriteRow(data, columns);
}
// Then:
// ... The XML file should have been written properly in windows-1252 encoding
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
var encoding = Encoding.GetEncoding("Windows-1252");
var rootNode = ParseOutput(output, encoding);
// ... The umlaut should be written using Windows-1252
Assert.IsNotNull(rootNode.ChildNodes[0]); // <row>
Assert.IsNotNull(rootNode.ChildNodes[0].ChildNodes[0]); // <column1>
Assert.AreEqual(rootNode.ChildNodes[0].ChildNodes[0].InnerText, "ü");
}
private XmlNode ParseOutput(byte[] bytes, Encoding encoding)
{
var outputString = encoding.GetString(bytes)
.TrimStart(encoding.GetString(encoding.Preamble).ToCharArray()) // Trim any BOM
.Trim('\0');
var xmlDoc = new XmlDocument();
xmlDoc.LoadXml(outputString);
// Assert: Two elements at the root, XML and the root node
Assert.AreEqual(2, xmlDoc.ChildNodes.Count);
Assert.AreEqual("xml", xmlDoc.ChildNodes[0]?.Name);
Assert.AreEqual("data", xmlDoc.ChildNodes[1]?.Name);
return xmlDoc.ChildNodes[1];
}
}
}

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -159,7 +159,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
// Setup: Create a new result set with valid db data reader
var fileStreamFactory = MemoryFileSystem.GetFileStreamFactory();
ResultSet resultSet = new ResultSet(Common.Ordinal, Common.Ordinal, fileStreamFactory);
Assert.That(() => testMethod(resultSet), Throws.InstanceOf<Exception>(), "I have a result set that has not been read. I attempt to call a method on it. It should throw an exception");
Assert.Catch(() => testMethod(resultSet), "I have a result set that has not been read. I attempt to call a method on it. It should throw an exception");
}
public static IEnumerable<object[]> CallMethodWithoutReadingData
@@ -200,7 +200,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
$"Complete Check failed.\r\n\t\t resultSummariesFromUpdatedCallback:{string.Join("\r\n\t\t\t", resultSummariesFromUpdatedCallback)}");
// ... The no of rows in the final updateResultSet/AvailableResultSet should be equal to that in the Complete Result Set.
// ... The no of rows in the final updateResultSet/AvailableResultSet should be equal to that in the Complete Result Set.
//
Assert.True(resultSummaryFromCompleteCallback.RowCount == resultSummariesFromUpdatedCallback.Last().RowCount,
$"The row counts of the complete Result Set and Final update result set do not match"
@@ -310,7 +310,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
// If:
// ... I attempt to read back the results
// Then:
// Then:
// ... I should only get one row
//
var task = resultSet.GetSubset(0, 10);
@@ -320,7 +320,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
}
[Test, Sequential]
public async Task GetSubsetInvalidParameters([Values(-1,20,0)] int startRow,
public async Task GetSubsetInvalidParameters([Values(-1,20,0)] int startRow,
[Values(0,0,-1)] int rowCount)
{
// If:
@@ -338,7 +338,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
}
[Test]
public async Task GetSubsetSuccess([Values(0,1)]int startRow,
public async Task GetSubsetSuccess([Values(0,1)]int startRow,
[Values(3,20)] int rowCount)
{
// If:
@@ -376,7 +376,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
ResultSet resultSet = new ResultSet(Common.Ordinal, Common.Ordinal, fileStreamFactory);
await resultSet.ReadResultToEnd(mockReader, CancellationToken.None);
Assert.That(() => actionToPerform(resultSet), Throws.InstanceOf<Exception>(), "Attempting to read an invalid row should fail");
Assert.Catch<Exception>(() => actionToPerform(resultSet), "Attempting to read an invalid row should fail");
}
public static IEnumerable<object[]> RowInvalidParameterData
@@ -423,7 +423,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
[Test]
public async Task AddRowNoRows()
{
// Setup:
// Setup:
// ... Create a standard result set with standard data
var fileFactory = MemoryFileSystem.GetFileStreamFactory();
var mockReader = GetReader(Common.StandardTestDataSet, false, Constants.StandardQuery);
@@ -434,7 +434,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
var emptyReader = GetReader(new[] {new TestResultSet(5, 0)}, false, Constants.StandardQuery);
// If: I add a row with a reader that has no rows
// Then:
// Then:
// ... I should get an exception
Assert.ThrowsAsync<InvalidOperationException>(() => resultSet.AddRow(emptyReader));
@@ -454,17 +454,17 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
// ... Create a mock reader that will throw on read
var throwingReader = GetReader(new[] {new TestResultSet(5, 0)}, true, Constants.StandardQuery);
Assert.ThrowsAsync<TestDbException>(() => resultSet.AddRow(throwingReader), "I add a row with a reader that throws on read. I should get an exception");
// ... The row count should not have changed
Assert.AreEqual(Common.StandardRows, resultSet.RowCount);
Assert.ThrowsAsync<TestDbException>(() => resultSet.AddRow(throwingReader), "I add a row with a reader that throws on read. I should get an exception");
// ... The row count should not have changed
Assert.AreEqual(Common.StandardRows, resultSet.RowCount);
}
[Test]
public async Task AddRowSuccess()
{
// Setup:
// Setup:
// ... Create a standard result set with standard data
var fileFactory = MemoryFileSystem.GetFileStreamFactory();
var mockReader = GetReader(Common.StandardTestDataSet, false, Constants.StandardQuery);
@@ -490,7 +490,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
[Test]
public async Task UpdateRowNoRows()
{
// Setup:
// Setup:
// ... Create a standard result set with standard data
var fileFactory = MemoryFileSystem.GetFileStreamFactory();
var mockReader = GetReader(Common.StandardTestDataSet, false, Constants.StandardQuery);
@@ -501,7 +501,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
var emptyReader = GetReader(new[] { new TestResultSet(5, 0) }, false, Constants.StandardQuery);
// If: I add a row with a reader that has no rows
// Then:
// Then:
// ... I should get an exception
Assert.ThrowsAsync<InvalidOperationException>(() => resultSet.UpdateRow(0, emptyReader));
@@ -512,7 +512,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.Execution
[Test]
public async Task UpdateRowSuccess()
{
// Setup:
// Setup:
// ... Create a standard result set with standard data
var fileFactory = MemoryFileSystem.GetFileStreamFactory();
var mockReader = GetReader(Common.StandardTestDataSet, false, Constants.StandardQuery);

View File

@@ -27,7 +27,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
// If: I attempt to save with a null set of params
// Then: I should get a null argument exception
ResultSet rs = new ResultSet(
Common.Ordinal, Common.Ordinal,
Common.Ordinal, Common.Ordinal,
MemoryFileSystem.GetFileStreamFactory());
Assert.Throws<ArgumentNullException>(() => rs.SaveAs(
null,
@@ -41,7 +41,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
// If: I attempt to save with a null set of params
// Then: I should get a null argument exception
ResultSet rs = new ResultSet(
Common.Ordinal, Common.Ordinal,
Common.Ordinal, Common.Ordinal,
MemoryFileSystem.GetFileStreamFactory());
Assert.Throws<ArgumentNullException>(() => rs.SaveAs(
new SaveResultsRequestParams(),
@@ -54,11 +54,11 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
// If: I attempt to save a result set that hasn't completed execution
// Then: I should get an invalid operation exception
ResultSet rs = new ResultSet(
Common.Ordinal, Common.Ordinal,
Common.Ordinal, Common.Ordinal,
MemoryFileSystem.GetFileStreamFactory());
Assert.Throws<InvalidOperationException>(() => rs.SaveAs(
new SaveResultsRequestParams(),
MemoryFileSystem.GetFileStreamFactory(),
new SaveResultsRequestParams(),
MemoryFileSystem.GetFileStreamFactory(),
null, null));
}
@@ -78,7 +78,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
// Then: I should get an invalid operation exception
var requestParams = new SaveResultsRequestParams {FilePath = Constants.OwnerUri};
Assert.Throws<InvalidOperationException>(() => rs.SaveAs(
requestParams, GetMockFactory(GetMockWriter().Object, null),
requestParams, GetMockFactory(GetMockWriter().Object, null),
null, null));
}
@@ -110,7 +110,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
// ... All the rows should have been written successfully
saveWriter.Verify(
w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IList<DbColumnWrapper>>()),
w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()),
Times.Exactly(Common.StandardRows));
}
@@ -150,21 +150,21 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
// ... All the rows should have been written successfully
saveWriter.Verify(
w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IList<DbColumnWrapper>>()),
w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()),
Times.Exactly((int) (saveParams.RowEndIndex - saveParams.RowStartIndex + 1)));
}
private static Mock<IFileStreamWriter> GetMockWriter()
{
var mockWriter = new Mock<IFileStreamWriter>();
mockWriter.Setup(w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IList<DbColumnWrapper>>()));
mockWriter.Setup(w => w.WriteRow(It.IsAny<IList<DbCellValue>>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()));
return mockWriter;
}
private static IFileStreamFactory GetMockFactory(IFileStreamWriter writer, Func<string, IFileStreamReader> readerGenerator)
{
var mockFactory = new Mock<IFileStreamFactory>();
mockFactory.Setup(f => f.GetWriter(It.IsAny<string>()))
mockFactory.Setup(f => f.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns(writer);
mockFactory.Setup(f => f.GetReader(It.IsAny<string>()))
.Returns(readerGenerator);

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -50,7 +50,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
[Test]
public async Task SaveResultAsCsvFailure()
{
// Given:
// Given:
// ... A working query and workspace service
WorkspaceService<SqlToolsSettings> ws = Common.GetPrimedWorkspaceService(Constants.StandardQuery);
ConcurrentDictionary<string, byte[]> storage;
@@ -96,7 +96,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
[Test]
public async Task SaveResultsAsCsvSuccess()
{
// Given:
// Given:
// ... A working query and workspace service
WorkspaceService<SqlToolsSettings> ws = Common.GetPrimedWorkspaceService(Constants.StandardQuery);
ConcurrentDictionary<string, byte[]> storage;
@@ -164,7 +164,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
[Test]
public async Task SaveResultAsJsonFailure()
{
// Given:
// Given:
// ... A working query and workspace service
WorkspaceService<SqlToolsSettings> ws = Common.GetPrimedWorkspaceService(Constants.StandardQuery);
ConcurrentDictionary<string, byte[]> storage;
@@ -208,7 +208,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
[Test]
public async Task SaveResultsAsJsonSuccess()
{
// Given:
// Given:
// ... A working query and workspace service
WorkspaceService<SqlToolsSettings> ws = Common.GetPrimedWorkspaceService(Constants.StandardQuery);
ConcurrentDictionary<string, byte[]> storage;
@@ -246,7 +246,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
}
#endregion
#region XML tests
[Test]
@@ -275,7 +275,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
[Test]
public async Task SaveResultAsXmlFailure()
{
// Given:
// Given:
// ... A working query and workspace service
WorkspaceService<SqlToolsSettings> ws = Common.GetPrimedWorkspaceService(Constants.StandardQuery);
ConcurrentDictionary<string, byte[]> storage;
@@ -319,7 +319,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
[Test]
public async Task SaveResultsAsXmlSuccess()
{
// Given:
// Given:
// ... A working query and workspace service
WorkspaceService<SqlToolsSettings> ws = Common.GetPrimedWorkspaceService(Constants.StandardQuery);
ConcurrentDictionary<string, byte[]> storage;
@@ -342,7 +342,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
Formatted = true
};
qes.XmlFileFactory = GetXmlStreamFactory(storage, saveParams);
var efv = new EventFlowValidator<SaveResultRequestResult>()
.AddStandardResultValidator()
.Complete();
@@ -359,9 +359,9 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
}
#endregion
#region Excel Tests
#region Excel Tests
[Test]
public async Task SaveResultsExcelNonExistentQuery()
{
@@ -388,7 +388,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
[Test]
public async Task SaveResultAsExcelFailure()
{
// Given:
// Given:
// ... A working query and workspace service
WorkspaceService<SqlToolsSettings> ws = Common.GetPrimedWorkspaceService(Constants.StandardQuery);
ConcurrentDictionary<string, byte[]> storage;
@@ -432,7 +432,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
[Test]
public async Task SaveResultsAsExcelSuccess()
{
// Given:
// Given:
// ... A working query and workspace service
WorkspaceService<SqlToolsSettings> ws = Common.GetPrimedWorkspaceService(Constants.StandardQuery);
ConcurrentDictionary<string, byte[]> storage;
@@ -468,70 +468,76 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.QueryExecution.SaveResults
// ... There should not have been an error
efv.Validate();
}
#endregion
#region Private Helpers
private static IFileStreamFactory GetCsvStreamFactory(IDictionary<string, byte[]> storage, SaveResultsAsCsvRequestParams saveParams)
private static IFileStreamFactory GetCsvStreamFactory(
IDictionary<string, byte[]> storage,
SaveResultsAsCsvRequestParams saveParams)
{
Mock<IFileStreamFactory> mock = new Mock<IFileStreamFactory>();
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output =>
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, columns) =>
{
storage.Add(output, new byte[8192]);
return new SaveAsCsvFileStreamWriter(new MemoryStream(storage[output]), saveParams);
return new SaveAsCsvFileStreamWriter(new MemoryStream(storage[output]), saveParams, columns);
});
return mock.Object;
}
private static IFileStreamFactory GetJsonStreamFactory(IDictionary<string, byte[]> storage, SaveResultsAsJsonRequestParams saveParams)
private static IFileStreamFactory GetJsonStreamFactory(
IDictionary<string, byte[]> storage,
SaveResultsAsJsonRequestParams saveParams)
{
Mock<IFileStreamFactory> mock = new Mock<IFileStreamFactory>();
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output =>
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, columns) =>
{
storage.Add(output, new byte[8192]);
return new SaveAsJsonFileStreamWriter(new MemoryStream(storage[output]), saveParams);
return new SaveAsJsonFileStreamWriter(new MemoryStream(storage[output]), saveParams, columns);
});
return mock.Object;
}
private static IFileStreamFactory GetXmlStreamFactory(IDictionary<string, byte[]> storage,
private static IFileStreamFactory GetXmlStreamFactory(
IDictionary<string, byte[]> storage,
SaveResultsAsXmlRequestParams saveParams)
{
Mock<IFileStreamFactory> mock = new Mock<IFileStreamFactory>();
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output =>
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, columns) =>
{
storage.Add(output, new byte[8192]);
return new SaveAsXmlFileStreamWriter(new MemoryStream(storage[output]), saveParams);
return new SaveAsXmlFileStreamWriter(new MemoryStream(storage[output]), saveParams, columns);
});
return mock.Object;
}
private static IFileStreamFactory GetExcelStreamFactory(IDictionary<string, byte[]> storage,
private static IFileStreamFactory GetExcelStreamFactory(
IDictionary<string, byte[]> storage,
SaveResultsAsExcelRequestParams saveParams)
{
Mock<IFileStreamFactory> mock = new Mock<IFileStreamFactory>();
mock.Setup(fsf => fsf.GetReader(It.IsAny<string>()))
.Returns<string>(output => new ServiceBufferFileStreamReader(new MemoryStream(storage[output]), new QueryExecutionSettings()));
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>()))
.Returns<string>(output =>
mock.Setup(fsf => fsf.GetWriter(It.IsAny<string>(), It.IsAny<IReadOnlyList<DbColumnWrapper>>()))
.Returns<string, IReadOnlyList<DbColumnWrapper>>((output, columns) =>
{
storage.Add(output, new byte[8192]);
return new SaveAsExcelFileStreamWriter(new MemoryStream(storage[output]), saveParams);
return new SaveAsExcelFileStreamWriter(new MemoryStream(storage[output]), saveParams, columns);
});
return mock.Object;
}

View File

@@ -1,4 +1,4 @@
//
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
@@ -8,6 +8,7 @@ using Microsoft.SqlTools.ServiceLayer.Scripting;
using Microsoft.SqlTools.ServiceLayer.Scripting.Contracts;
using NUnit.Framework;
using Assert = NUnit.Framework.Assert;
namespace Microsoft.SqlTools.ServiceLayer.UnitTests.Scripting
{
@@ -21,7 +22,7 @@ namespace Microsoft.SqlTools.ServiceLayer.UnitTests.Scripting
{
var scriptingObject = new ScriptingObject() { Name = "quoted'Name", Schema = "quoted'Schema", Type = "Table" };
var urn = scriptingObject.ToUrn("server", "quoted'db");
Assert.That(urn.ToString, Is.EqualTo("Server[@Name='SERVER']/Database[@Name='quoted''db']/Table[@Name='quoted''Name' and @Schema = 'quoted''Schema']"), "Urn should have escaped Name attributes");
Assert.AreEqual("Server[@Name='SERVER']/Database[@Name='quoted''db']/Table[@Name='quoted''Name' and @Schema = 'quoted''Schema']", urn.ToString(), "Urn should have escaped Name attributes");
Assert.That(urn.Type, Is.EqualTo("Table"), "Urn Type");
// These assertions are more for educational purposes than for testing, since the methods are Urn methods in SFC.
Assert.That(urn.GetNameForType("Database"), Is.EqualTo("quoted'db"), "GetNameForType('Database')");