Separating Migration into its own project (#1828)

* Moving Migration service to its own project

* Adding loc files to the project

* Adding Migration to build

* Adding Migration Integration Tests

* Trying loops

* Fixing params

* Fixing indent

* Cleaning up yaml

* Getting command line arg for auto flush log

* Adding tde service
This commit is contained in:
Aasim Khan
2023-01-30 10:30:28 -08:00
committed by GitHub
parent 82171740bc
commit ab5a1e6c85
38 changed files with 6163 additions and 64 deletions

View File

@@ -1,3 +1,44 @@
parameters:
- name: platforms
type: object
default:
- name: 'osx.10.11-x64'
displayName: 'osx'
archiveName: 'osx-x64'
archiveFileFormat: 'tar.gz'
archiveType: 'tar'
- name: 'osx-arm64'
displayName: 'osx arm'
archiveName: 'osx-arm64'
archiveFileFormat: 'tar.gz'
archiveType: 'tar'
- name: 'rhel.7.2-x64'
displayName: 'rhel'
archiveName: 'rhel-x64'
archiveFileFormat: 'tar.gz'
archiveType: 'tar'
- name: 'win-x64'
displayName: 'Windows 64 bit'
archiveName: 'win-x64'
archiveFileFormat: 'zip'
archiveType: 'zip'
- name: 'win-x86'
displayName: 'Windows 32 bit'
archiveName: 'win-x86'
archiveFileFormat: 'zip'
archiveType: 'zip'
# - name: 'win10-arm64'
# displayName: 'Windows 10 ARM 64 bit'
# archiveFileFormat: 'zip'
# - name: 'win10-arm'
# displayName: 'Windows 10 ARM'
# archiveFileFormat: 'zip'
- name: projects
type: object
default:
- 'Microsoft.SqlTools.ServiceLayer'
- 'Microsoft.SqlTools.Migration'
steps:
- task: UseDotNet@2
displayName: 'Use defined .NET Core sdk'
@@ -109,7 +150,7 @@ steps:
inputs:
ConnectedServiceName: 'Code Signing'
FolderPath: '$(Build.SourcesDirectory)/artifacts/publish'
Pattern: 'Microsoft.SqlTools.ManagedBatchParser.dll,MicrosoftSqlToolsCredentials.exe,MicrosoftSqlToolsServiceLayer.exe,SqlSerializationService.exe,SqlToolsResourceProviderService.exe,Microsoft.SqlTools.Hosting.dll,Microsoft.SqlTools.ResourceProvider.Core.dll,Microsoft.SqlTools.ResourceProvider.DefaultImpl.dll,MicrosoftSqlToolsCredentials.dll,MicrosoftSqlToolsServiceLayer.dll,Newtonsoft.Json.dll,SqlSerializationService.dll,SqlToolsResourceProviderService.dll,System.Data.SqlClient.dll,System.Net.Sockets.dll'
Pattern: 'Microsoft.SqlTools.ManagedBatchParser.dll,MicrosoftSqlToolsCredentials.exe,MicrosoftSqlToolsServiceLayer.exe,SqlSerializationService.exe,SqlToolsResourceProviderService.exe,Microsoft.SqlTools.Hosting.dll,Microsoft.SqlTools.ResourceProvider.Core.dll,Microsoft.SqlTools.ResourceProvider.DefaultImpl.dll,MicrosoftSqlToolsCredentials.dll,MicrosoftSqlToolsServiceLayer.dll,Newtonsoft.Json.dll,SqlSerializationService.dll,SqlToolsResourceProviderService.dll,System.Data.SqlClient.dll,System.Net.Sockets.dll,MicrosoftSqlToolsMigration.exe,MicrosoftSqlToolsMigration.dll'
signConfigType: inlineSignParams
inlineOperation: |
[
@@ -199,57 +240,15 @@ steps:
MaxConcurrency: '50'
MaxRetryAttempts: '5'
- ${{ each project in parameters.projects }}:
- ${{ each platform in parameters.platforms }}:
- task: ArchiveFiles@1
displayName: 'Archive osx build'
displayName: 'Archive ${{ platform.displayName }} ${{ project }} build'
inputs:
rootFolder: '$(Build.SourcesDirectory)/artifacts/publish/Microsoft.SqlTools.ServiceLayer/osx.10.11-x64/net6.0'
rootFolder: '$(Build.SourcesDirectory)/artifacts/publish/${{ project }}/${{ platform.name }}/net6.0'
includeRootFolder: false
archiveType: tar
archiveFile: '$(Build.SourcesDirectory)/artifacts/package/Microsoft.SqlTools.ServiceLayer-osx-x64-net6.0.tar.gz'
- task: ArchiveFiles@1
displayName: 'Archive osx arm build'
inputs:
rootFolder: '$(Build.SourcesDirectory)/artifacts/publish/Microsoft.SqlTools.ServiceLayer/osx-arm64/net6.0'
includeRootFolder: false
archiveType: tar
archiveFile: '$(Build.SourcesDirectory)/artifacts/package/Microsoft.SqlTools.ServiceLayer-osx-arm64-net6.0.tar.gz'
- task: ArchiveFiles@1
displayName: 'Archive rhel build'
inputs:
rootFolder: '$(Build.SourcesDirectory)/artifacts/publish/Microsoft.SqlTools.ServiceLayer/rhel.7.2-x64/net6.0'
includeRootFolder: false
archiveType: tar
archiveFile: '$(Build.SourcesDirectory)/artifacts/package/Microsoft.SqlTools.ServiceLayer-rhel-x64-net6.0.tar.gz'
- task: ArchiveFiles@1
displayName: 'Archive windows 64 bit build'
inputs:
rootFolder: '$(Build.SourcesDirectory)/artifacts/publish/Microsoft.SqlTools.ServiceLayer/win-x64/net6.0'
includeRootFolder: false
archiveFile: '$(Build.SourcesDirectory)/artifacts/package/Microsoft.SqlTools.ServiceLayer-win-x64-net6.0.zip'
- task: ArchiveFiles@1
displayName: 'Archive windows 32 bit build'
inputs:
rootFolder: '$(Build.SourcesDirectory)/artifacts/publish/Microsoft.SqlTools.ServiceLayer/win-x86/net6.0'
includeRootFolder: false
archiveFile: '$(Build.SourcesDirectory)/artifacts/package/Microsoft.SqlTools.ServiceLayer-win-x86-net6.0.zip'
# - task: ArchiveFiles@1
# displayName: 'Archive windows10 arm 32 bit build'
# inputs:
# rootFolder: '$(Build.SourcesDirectory)/artifacts/publish/Microsoft.SqlTools.ServiceLayer/win10-arm/net6.0'
# includeRootFolder: false
# archiveFile: '$(Build.SourcesDirectory)/artifacts/package/Microsoft.SqlTools.ServiceLayer-win10-arm-net6.0.zip'
# - task: ArchiveFiles@1
# displayName: 'Archive windows10 arm 64 bit build'
# inputs:
# rootFolder: '$(Build.SourcesDirectory)/artifacts/publish/Microsoft.SqlTools.ServiceLayer/win10-arm64/net6.0'
# includeRootFolder: false
# archiveFile: '$(Build.SourcesDirectory)/artifacts/package/Microsoft.SqlTools.ServiceLayer-win10-arm64-net6.0.zip'
archiveType: ${{ platform.archiveType }}
archiveFile: '$(Build.SourcesDirectory)/artifacts/package/${{ project }}-${{ platform.archiveName }}-net6.0.${{ platform.archiveFileFormat }}'
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: build archives'

View File

@@ -21,6 +21,7 @@ net6projectArray=(
"./src/Microsoft.SqlTools.ResourceProvider.Core"
"./src/Microsoft.SqlTools.ResourceProvider.DefaultImpl"
"./src/Microsoft.SqlTools.ServiceLayer"
"./src/Microsoft.SqlTools.Migration"
"./test/Microsoft.Kusto.ServiceLayer.UnitTests"
"./test/Microsoft.SqlTools.ManagedBatchParser.IntegrationTests"
"./test/Microsoft.SqlTools.ServiceLayer.IntegrationTests"

View File

@@ -56,6 +56,14 @@ public class BuildPlan
// The set of projects that we want to call dotnet pack on which require publishing being done first
public string[] PackagePublishedProjects { get; set; }
public string[] DotnetToolProjects { get; set; }
public Project[] Projects{ get; set; }
}
public class Project
{
public string Name { get; set; }
public string[] PackageProjects { get; set; }
public string[] TestProjects { get; set; }
}
var buildPlan = JsonConvert.DeserializeObject<BuildPlan>(
@@ -429,8 +437,29 @@ Task("OnlyPublish")
.IsDependentOn("CodeGen")
.Does(() =>
{
var packageName = buildPlan.PackageName;
foreach (var project in buildPlan.MainProjects)
PublishProject(buildPlan.PackageName, buildPlan.MainProjects);
});
/// <summary>
/// Build, publish and package artifacts.
/// Targets all RIDs specified in build.json unless restricted by RestrictToLocalRuntime.
/// No dependencies on other tasks to support quick builds.
/// </summary>
Task("PublishExternalProjects")
.IsDependentOn("Setup")
.IsDependentOn("SrGen")
.IsDependentOn("CodeGen")
.Does(() =>
{
foreach(var project in buildPlan.Projects)
{
PublishProject(project.Name, project.PackageProjects);
}
});
void PublishProject(string packageName, string[] projects)
{
foreach (var project in projects)
{
var projectFolder = System.IO.Path.Combine(sourceFolder, project);
foreach (var framework in buildPlan.Frameworks)
@@ -483,7 +512,9 @@ Task("OnlyPublish")
}
CreateRunScript(System.IO.Path.Combine(publishFolder, project, "default"), scriptFolder);
}
});
}
/// <summary>
/// Alias for OnlyPublish.
@@ -492,6 +523,7 @@ Task("OnlyPublish")
Task("AllPublish")
.IsDependentOn("Restore")
.IsDependentOn("OnlyPublish")
.IsDependentOn("PublishExternalProjects")
.Does(() =>
{
});
@@ -514,6 +546,7 @@ Task("LocalPublish")
.IsDependentOn("Restore")
.IsDependentOn("RestrictToLocalRuntime")
.IsDependentOn("OnlyPublish")
.IsDependentOn("PublishExternalProjects")
.Does(() =>
{
});

View File

@@ -33,7 +33,8 @@
"Microsoft.SqlTools.ResourceProvider",
"Microsoft.Kusto.ServiceLayer",
"Microsoft.SqlTools.ServiceLayer",
"Microsoft.SqlTools.ManagedBatchParser"
"Microsoft.SqlTools.ManagedBatchParser",
"Microsoft.SqlTools.Migration"
],
"FxBuildProjects": [
"Microsoft.SqlTools.ManagedBatchParser"
@@ -57,6 +58,18 @@
],
"DotnetToolProjects": [
"Microsoft.SqlTools.ServiceLayer",
"Microsoft.Kusto.ServiceLayer"
"Microsoft.Kusto.ServiceLayer",
"Microsoft.SqlTools.Migration"
],
"Projects": [
{
"Name": "Microsoft.SqlTools.Migration",
"PackageProjects": [
"Microsoft.SqlTools.Migration"
],
"TestProjects": [
"Microsoft.SqlTools.Migration.UnitTests"
]
}
]
}

View File

@@ -92,6 +92,10 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.SqlTools.Resource
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.SqlTools.ResourceProvider.DefaultImpl", "src\Microsoft.SqlTools.ResourceProvider.DefaultImpl\Microsoft.SqlTools.ResourceProvider.DefaultImpl.csproj", "{07296730-DAB7-4B0B-9D09-ABD9A5025D68}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.SqlTools.Migration", "src\Microsoft.SqlTools.Migration\Microsoft.SqlTools.Migration.csproj", "{22DB0C12-6848-4503-AD1C-DAD6A1D631AE}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.SqlTools.Migration.IntegrationTests", "test\Microsoft.SqlTools.Migration.IntegrationTests\Microsoft.SqlTools.Migration.IntegrationTests.csproj", "{5C7F4DAC-F794-4C21-A031-DCAAFAF3C0A9}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -213,6 +217,18 @@ Global
{07296730-DAB7-4B0B-9D09-ABD9A5025D68}.Integration|Any CPU.Build.0 = Debug|Any CPU
{07296730-DAB7-4B0B-9D09-ABD9A5025D68}.Release|Any CPU.ActiveCfg = Release|Any CPU
{07296730-DAB7-4B0B-9D09-ABD9A5025D68}.Release|Any CPU.Build.0 = Release|Any CPU
{22DB0C12-6848-4503-AD1C-DAD6A1D631AE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{22DB0C12-6848-4503-AD1C-DAD6A1D631AE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{22DB0C12-6848-4503-AD1C-DAD6A1D631AE}.Integration|Any CPU.ActiveCfg = Debug|Any CPU
{22DB0C12-6848-4503-AD1C-DAD6A1D631AE}.Integration|Any CPU.Build.0 = Debug|Any CPU
{22DB0C12-6848-4503-AD1C-DAD6A1D631AE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{22DB0C12-6848-4503-AD1C-DAD6A1D631AE}.Release|Any CPU.Build.0 = Release|Any CPU
{5C7F4DAC-F794-4C21-A031-DCAAFAF3C0A9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5C7F4DAC-F794-4C21-A031-DCAAFAF3C0A9}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5C7F4DAC-F794-4C21-A031-DCAAFAF3C0A9}.Integration|Any CPU.ActiveCfg = Debug|Any CPU
{5C7F4DAC-F794-4C21-A031-DCAAFAF3C0A9}.Integration|Any CPU.Build.0 = Debug|Any CPU
{5C7F4DAC-F794-4C21-A031-DCAAFAF3C0A9}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5C7F4DAC-F794-4C21-A031-DCAAFAF3C0A9}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -238,6 +254,8 @@ Global
{AFCDED82-B659-4BE1-86ED-0F4F8BC661AE} = {AB9CA2B8-6F70-431C-8A1D-67479D8A7BE4}
{212F6FE0-4E73-438B-B4BE-7F3A3B3D7378} = {2BBD7364-054F-4693-97CD-1C395E3E84A9}
{07296730-DAB7-4B0B-9D09-ABD9A5025D68} = {2BBD7364-054F-4693-97CD-1C395E3E84A9}
{22DB0C12-6848-4503-AD1C-DAD6A1D631AE} = {2BBD7364-054F-4693-97CD-1C395E3E84A9}
{5C7F4DAC-F794-4C21-A031-DCAAFAF3C0A9} = {AB9CA2B8-6F70-431C-8A1D-67479D8A7BE4}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {B31CDF4B-2851-45E5-8C5F-BE97125D9DD8}

View File

@@ -50,6 +50,12 @@
"LclFile": "src\\Microsoft.Kusto.ServiceLayer\\Localization\\LCL\\{Lang}\\sr.xlf.lcl",
"CopyOption": "LangIDOnName",
"OutputPath": "src\\Microsoft.Kusto.ServiceLayer\\Localization\\transXliff"
},
{
"SourceFile": "src\\Microsoft.SqlTools.Migration\\Localization\\sr.xlf",
"LclFile": "src\\Microsoft.SqlTools.Migration\\Localization\\LCL\\{Lang}\\sr.xlf.lcl",
"CopyOption": "LangIDOnName",
"OutputPath": "src\\Microsoft.SqlTools.Migration\\Localization\\transXliff"
}
]
}

View File

@@ -206,8 +206,11 @@ namespace Microsoft.SqlTools.Extensibility
{
service.InitializeService(this);
this.initializedServices.Add(service);
if (this.options.InitializeServiceCallback != null)
{
this.options.InitializeServiceCallback(this, service);
}
}
/// <summary>
/// Registers and initializes the given services

View File

@@ -35,7 +35,7 @@ namespace Microsoft.SqlTools.Utility
private static string logFileFullPath;
internal static TraceSource TraceSource { get; set; }
internal static string LogFileFullPath
public static string LogFileFullPath
{
get => logFileFullPath;
private set

View File

@@ -0,0 +1,44 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using Microsoft.SqlTools.Hosting.Protocol.Contracts;
namespace Microsoft.SqlTools.Migration.Contracts
{
/// <summary>
/// Parameters for the certificate migration progress event
/// </summary>
public class CertificateMigrationProgressParams
{
/// <summary>
/// Database name
/// </summary>
public string Name { get; set; }
/// <summary>
/// Message related to the success status. true should describe a positive outcome. false should have an error.
/// </summary>
public string Message { get; set; }
/// <summary>
/// Result of migration
/// </summary>
public bool Success { get; set; }
}
/// <summary>
/// Create a certificate migration request. This should be register at the client.
/// </summary>
public class CertificateMigrationProgressEvent
{
/// <summary>
/// Name and parameters for the event definition.
/// </summary>
public static readonly
EventType<CertificateMigrationProgressParams> Type =
EventType<CertificateMigrationProgressParams>.Create("migration/tdemigrationprogress");
}
}

View File

@@ -0,0 +1,111 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System.Collections.Generic;
using Microsoft.SqlTools.Hosting.Protocol.Contracts;
namespace Microsoft.SqlTools.Migration.Contracts
{
/// <summary>
/// Parameters for the certificate migration operation
/// </summary>
public class CertificateMigrationParams
{
/// <summary>
/// List of databses to migrate the certificates
/// </summary>
public List<string> EncryptedDatabases { get; set; } = new List<string>();
/// <summary>
/// Source connection string to the server
/// </summary>
public string SourceSqlConnectionString { get; set; }
/// <summary>
/// Target subscription id
/// </summary>
public string TargetSubscriptionId { get; set; }
/// <summary>
/// Target resource group name
/// </summary>
public string TargetResourceGroupName { get; set; }
/// <summary>
/// Target manages instance name
/// </summary>
public string TargetManagedInstanceName { get; set; }
/// <summary>
/// Place where certificates will be exported
/// </summary>
public string NetworkSharePath { get; set; }
/// <summary>
/// Domain for the user credentials able to read from the shared path
/// </summary>
public string NetworkShareDomain { get; set; }
/// <summary>
/// Username for the credentials able to read from the shared path
/// </summary>
public string NetworkShareUserName { get; set; }
/// <summary>
/// Password for the credentials able to read from the shared path
/// </summary>
public string NetworkSharePassword { get; set; }
/// <summary>
/// Access token for the ARM client
/// </summary>
public string AccessToken { get; set; }
}
/// <summary>
/// Result for the certificate migration operation
/// </summary>
public class CertificateMigrationResult
{
/// <summary>
/// List of the status of each certificate migration result attempted.
/// </summary>
public List<CertificateMigrationEntryResult> MigrationStatuses { get; set; } = new List<CertificateMigrationEntryResult>();
}
/// <summary>
/// Result for an individual database certificate migration
/// </summary>
public class CertificateMigrationEntryResult
{
/// <summary>
/// The name of the database this result represent
/// </summary>
public string DbName { get; set; }
/// <summary>
/// The result of the migration.
/// </summary>
public bool Success { get; set; }
/// <summary>
/// Description of the success status or the error message encountered when the migration was not successful
/// </summary>
public string Message { get; set; }
}
/// <summary>
/// Certificate migration request definition
/// </summary>
public class CertificateMigrationRequest
{
/// <summary>
/// Name, parameter and return type for the certicate migration operation
/// </summary>
public static readonly
RequestType<CertificateMigrationParams, CertificateMigrationResult> Type =
RequestType<CertificateMigrationParams, CertificateMigrationResult>.Create("migration/tdemigration");
}
}

View File

@@ -0,0 +1,181 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using Microsoft.SqlServer.Migration.SkuRecommendation.Contracts.Models;
using Microsoft.SqlServer.Migration.SkuRecommendation.Models.Sql;
using Microsoft.SqlTools.Hosting.Protocol.Contracts;
using System.Collections.Generic;
namespace Microsoft.SqlTools.Migration.Contracts
{
public class GetSkuRecommendationsParams
{
/// <summary>
/// Folder from which collected performance data will be read from
/// </summary>
public string DataFolder { get; set; }
/// <summary>
/// Interval at which collected performance data was originally queried at, in seconds
/// </summary>
public int PerfQueryIntervalInSec { get; set; }
/// <summary>
/// List of target platforms to consider when generating recommendations
/// </summary>
public List<string> TargetPlatforms { get; set; }
/// <summary>
/// Name of the SQL instance to generate recommendations for
/// </summary>
public string TargetSqlInstance { get; set; }
/// <summary>
/// Target percentile to use when performing perf data aggregation
/// </summary>
public int TargetPercentile { get; set; }
/// <summary>
/// Scaling ("comfort") factor when evalulating performance requirements
/// </summary>
public int ScalingFactor { get; set; }
/// <summary>
/// Start time of collected data points to consider
///
/// TO-DO: do we really need this? it's pretty safe to assume that most users would want us to evaluate all the collected data and not just part of it
/// </summary>
public string StartTime { get; set; }
/// <summary>
/// End time of collected data points to consider
///
/// TO-DO: do we really need this? it's pretty safe to assume that most users would want us to evaluate all the collected data and not just part of it
/// </summary>
public string EndTime { get; set; }
/// <summary>
/// Whether or not to consider preview SKUs when generating SKU recommendations
/// </summary>
public bool IncludePreviewSkus { get; set; }
/// <summary>
/// List of databases to consider when generating recommendations
/// </summary>
public List<string> DatabaseAllowList { get; set; }
}
public class GetSkuRecommendationsResult
{
/// <summary>
/// List of SQL DB recommendation results, if applicable
/// </summary>
public List<SkuRecommendationResult> SqlDbRecommendationResults { get; set; }
/// <summary>
/// How long the SQL DB recommendations took to generate, in milliseconds. Equal to -1 if SQL DB recommendations are not applicable.
/// </summary>
public long SqlDbRecommendationDurationInMs { get; set; }
/// <summary>
/// List of SQL MI recommendation results, if applicable
/// </summary>
public List<SkuRecommendationResult> SqlMiRecommendationResults { get; set; }
/// <summary>
/// How long the SQL MI recommendations took to generate, in milliseconds. Equal to -1 if SQL MI recommendations are not applicable.
/// </summary>
public long SqlMiRecommendationDurationInMs { get; set; }
/// <summary>
/// List of SQL VM recommendation results, if applicable
/// </summary>
public List<SkuRecommendationResult> SqlVmRecommendationResults { get; set; }
/// <summary>
/// How long the SQL VM recommendations took to generate, in milliseconds. Equal to -1 if SQL VM recommendations are not applicable.
/// </summary>
public long SqlVmRecommendationDurationInMs { get; set; }
/// <summary>
/// List of SQL DB recommendation results generated by the elastic model, if applicable
/// </summary>
public List<SkuRecommendationResult> ElasticSqlDbRecommendationResults { get; set; }
/// <summary>
/// How long the SQL DB recommendations took to generate using the elastic model, in milliseconds. Equal to -1 if SQL DB elastic recommendations are not applicable.
/// </summary>
public long ElasticSqlDbRecommendationDurationInMs { get; set; }
/// <summary>
/// List of SQL MI recommendation results generated by the elastic model, if applicable
/// </summary>
public List<SkuRecommendationResult> ElasticSqlMiRecommendationResults { get; set; }
/// <summary>
/// How long the SQL MI recommendations took to generate using the elastic model, in milliseconds. Equal to -1 if SQL MI elastic recommendations are not applicable.
/// </summary>
public long ElasticSqlMiRecommendationDurationInMs { get; set; }
/// <summary>
/// List of SQL VM recommendation results generated by the elastic model, if applicable
/// </summary>
public List<SkuRecommendationResult> ElasticSqlVmRecommendationResults { get; set; }
/// <summary>
/// How long the SQL VM recommendations took to generate using the elastic model, in milliseconds. Equal to -1 if SQL VM elastic recommendations are not applicable.
/// </summary>
public long ElasticSqlVmRecommendationDurationInMs { get; set; }
/// <summary>
/// SQL instance requirements, representing an aggregated view of the performance requirements of the source instance
/// </summary>
public SqlInstanceRequirements InstanceRequirements { get; set; }
/// <summary>
/// File paths where the recommendation reports were saved
/// </summary>
public List<string> SkuRecommendationReportPaths { get; set; }
/// <summary>
/// File paths where the recommendation reports generated by the elastic model were saved
/// </summary>
public List<string> ElasticSkuRecommendationReportPaths { get; set; }
}
// Helper class containing recommendation results, durations, and report paths, which is recommendation model-agnostic
internal class RecommendationResultSet {
internal List<SkuRecommendationResult> sqlDbResults;
internal List<SkuRecommendationResult> sqlMiResults;
internal List<SkuRecommendationResult> sqlVmResults;
internal long sqlDbDurationInMs;
internal long sqlMiDurationInMs;
internal long sqlVmDurationInMs;
internal string sqlDbReportPath;
internal string sqlMiReportPath;
internal string sqlVmReportPath;
// Create a new empty RecommendationResultSet
internal RecommendationResultSet()
{
this.sqlDbResults = new List<SkuRecommendationResult>();
this.sqlMiResults = new List<SkuRecommendationResult>();
this.sqlVmResults = new List<SkuRecommendationResult>();
this.sqlDbDurationInMs = -1;
this.sqlMiDurationInMs = -1;
this.sqlVmDurationInMs = -1;
this.sqlDbReportPath = "";
this.sqlMiReportPath = "";
this.sqlVmReportPath = "";
}
}
public class GetSkuRecommendationsRequest
{
public static readonly
RequestType<GetSkuRecommendationsParams, GetSkuRecommendationsResult> Type =
RequestType<GetSkuRecommendationsParams, GetSkuRecommendationsResult>.Create("migration/getskurecommendations");
}
}

View File

@@ -0,0 +1,17 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
namespace Microsoft.SqlTools.Migration.Contracts
{
/// <summary>
/// Describes an item returned by SQL Assessment RPC methods
/// </summary>
public class ImpactedObjectInfo
{
public string Name { get; set; }
public string ImpactDetail { get; set; }
public string ObjectType { get; set; }
}
}

View File

@@ -0,0 +1,90 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
namespace Microsoft.SqlTools.Migration.Contracts
{
/// <summary>
/// Describes an item returned by SQL Assessment RPC methods
/// </summary>
public class MigrationAssessmentInfo
{
/// <summary>
/// Gets or sets assessment ruleset version.
/// </summary>
public string RulesetVersion { get; set; }
/// <summary>
/// Gets or sets assessment ruleset name
/// </summary>
public string RulesetName { get; set; }
/// <summary>
/// Gets or sets assessment ruleset name
/// </summary>
public string RuleId { get; set; }
/// <summary>
/// Gets or sets the assessed object's name.
/// </summary>
public string TargetType { get; set; }
/// <summary>
/// Gets or sets the database name.
/// </summary>
public string DatabaseName { get; set; }
/// <summary>
/// Gets or sets the server name.
/// </summary>
public string ServerName { get; set; }
/// <summary>
/// Gets or sets check's ID.
/// </summary>
public string CheckId { get; set; }
/// <summary>
/// Gets or sets tags assigned to this item.
/// </summary>
public string[] Tags { get; set; }
/// <summary>
/// Gets or sets a display name for this item.
/// </summary>
public string DisplayName { get; set; }
/// <summary>
/// Gets or sets a brief description of the item's purpose.
/// </summary>
public string Description { get; set; }
/// <summary>
/// Gets or sets a <see cref="string"/> containing
/// an link to a page providing detailed explanation
/// of the best practice.
/// </summary>
public string HelpLink { get; set; }
/// <summary>
/// Gets or sets a <see cref="string"/> indicating
/// severity level assigned to this items.
/// Values are: "Information", "Warning", "Critical".
/// </summary>
public string Level { get; set; }
public string Message { get; set; }
public string AppliesToMigrationTargetPlatform { get; set; }
public string IssueCategory { get; set; }
public ImpactedObjectInfo[] ImpactedObjects { get; set; }
/// <summary>
/// This flag is set if the assessment result is a blocker for migration to Target Platform.
/// </summary>
public bool DatabaseRestoreFails { get; set; }
}
}

View File

@@ -0,0 +1,66 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using Microsoft.SqlTools.Hosting.Protocol.Contracts;
using Microsoft.SqlServer.Migration.Assessment.Common.Contracts.Models;
namespace Microsoft.SqlTools.Migration.Contracts
{
public class MigrationAssessmentsParams
{
/// <summary>
/// Owner URI
/// </summary>
public string ConnectionString { get; set; }
/// <summary>
/// List of databases to assess
/// </summary>
public string[] Databases { get; set; }
/// <summary>
/// Folder path to XEvents files to be assessed, if applicable. Empty string to disable XEvents assessment.
/// </summary>
public string XEventsFilesFolderPath { get; set; }
}
public class MigrationAssessmentResult
{
/// <summary>
/// Errors that happen while running the assessment
/// </summary>
public ErrorModel[] Errors { get; set; }
/// <summary>
/// Result of the assessment
/// </summary>
public ServerAssessmentProperties AssessmentResult { get; set; }
/// <summary>
/// Start time of the assessment
/// </summary>
public string StartTime { get; set; }
/// <summary>
/// End time of the assessment
/// </summary>
public string EndedTime { get; set; }
/// <summary>
/// Contains the raw assessment response
/// </summary>
public ISqlMigrationAssessmentModel RawAssessmentResult { get; set; }
/// <summary>
/// File path where the assessment report was saved
/// </summary>
public string AssessmentReportPath { get; set; }
}
/// <summary>
/// Retreive metadata for the table described in the TableMetadataParams value
/// </summary>
public class MigrationAssessmentsRequest
{
public static readonly
RequestType<MigrationAssessmentsParams, MigrationAssessmentResult> Type =
RequestType<MigrationAssessmentsParams, MigrationAssessmentResult>.Create("migration/getassessments");
}
}

View File

@@ -0,0 +1,133 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using Microsoft.SqlServer.Migration.Assessment.Common.Contracts.TargetAssessment.Models;
namespace Microsoft.SqlTools.Migration.Contracts
{
public class ServerAssessmentProperties
{
/// <summary>
/// Name of the server
/// </summary>
public string Name { get; set; }
/// <summary>
/// Cpu cores for the server host
/// </summary>
public long CpuCoreCount { get; set; }
/// <summary>
/// Server host physical memory size
/// </summary>
public double PhysicalServerMemory { get; set; }
/// <summary>
/// Host operating system of the SQL server
/// </summary>
public string ServerHostPlatform { get; set; }
/// <summary>
/// Version of the SQL server
/// </summary>
public string ServerVersion { get; set; }
/// <summary>
/// SQL server engine edition
/// </summary>
public string ServerEngineEdition { get; set; }
/// <summary>
/// SQL server edition
/// </summary>
public string ServerEdition { get; set; }
/// <summary>
/// We use this flag to indicate if the SQL server is part of the failover cluster
/// </summary>
public bool IsClustered { get; set; }
/// <summary>
/// Returns the total number of dbs assessed
/// </summary>
public long NumberOfUserDatabases { get; set; }
/// <summary>
/// Returns the assessment status
/// </summary>
public int SqlAssessmentStatus { get; set; }
/// <summary>
/// Count of Dbs assessed
/// </summary>
public long AssessedDatabaseCount{get; set;}
/// <summary>
/// Give assessed server stats for SQL MI compatibility
/// </summary>
public IServerTargetReadiness SQLManagedInstanceTargetReadiness { get; set; }
/// <summary>
/// Server assessment results
/// </summary>
public MigrationAssessmentInfo[] Items { get; set; }
/// <summary>
/// Server assessment errors
/// </summary>
public ErrorModel[] Errors { get; set; }
/// <summary>
/// List of databases that are assessed
/// </summary>
public DatabaseAssessmentProperties[] Databases { get; set; }
}
public class DatabaseAssessmentProperties
{
/// <summary>
/// Name of the database
/// </summary>
public string Name { get; set; }
/// <summary>
/// Compatibility level of the database
/// </summary>
public string CompatibilityLevel { get; set; }
/// <summary>
/// Size of the database
/// </summary>
public double DatabaseSize { get; set; }
/// <summary>
/// Flag that indicates if the database is replicated
/// </summary>
public bool IsReplicationEnabled { get; set; }
/// <summary>
/// Time taken for assessing the database
/// </summary>
public double AssessmentTimeInMilliseconds { get; set; }
/// <summary>
/// Database Assessment Results
/// </summary>
public MigrationAssessmentInfo[] Items { get; set; }
/// <summary>
/// Database assessment errors
/// </summary>
public ErrorModel[] Errors {get; set;}
/// <summary>
/// Flags that indicate if the database is ready for migration
/// </summary>
public IDatabaseTargetReadiness SQLManagedInstanceTargetReadiness { get; set; }
}
public class ErrorModel
{
/// <summary>
/// Id of the assessment error
/// </summary>
public string ErrorId { get; set; }
/// <summary>
/// Error message
/// </summary>
public string Message { get; set; }
/// <summary>
/// Summary of the Error
/// </summary>
public string ErrorSummary { get; set; }
/// <summary>
/// Possible causes for the error
/// </summary>
public string PossibleCauses { get; set; }
/// <summary>
/// Possible mitigation for the error
/// </summary>
public string Guidance { get; set; }
}
}

View File

@@ -0,0 +1,112 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using Microsoft.SqlTools.Hosting.Protocol.Contracts;
using System;
using System.Collections.Generic;
namespace Microsoft.SqlTools.Migration.Contracts
{
public class StartPerfDataCollectionParams
{
/// <summary>
/// connection string for the database connection
/// </summary>
public string ConnectionString { get; set; }
/// <summary>
/// Folder from which collected performance data will be written to
/// </summary>
public string DataFolder { get; set; }
/// <summary>
/// Interval at which performance data will be collected, in seconds
/// </summary>
public int PerfQueryIntervalInSec { get; set; }
/// <summary>
/// Interval at which static (common) data will be collected, in seconds
/// </summary>
public int StaticQueryIntervalInSec { get; set; }
/// <summary>
/// Number of iterations of performance data collection to run before aggregating and saving to disk
/// </summary>
public int NumberOfIterations { get; set; }
}
public class StopPerfDataCollectionParams
{
// TO-DO: currently stop data collection doesn't require any parameters
}
public class RefreshPerfDataCollectionParams
{
/// <summary>
/// The last time data collection status was refreshed
/// </summary>
public DateTime LastRefreshedTime { get; set; }
}
public class StartPerfDataCollectionResult
{
/// <summary>
/// The time data collection started
/// </summary>
public DateTime DateTimeStarted { get; set; }
}
public class StopPerfDataCollectionResult
{
/// <summary>
/// The time data collection stopped
/// </summary>
public DateTime DateTimeStopped { get; set; }
}
public class RefreshPerfDataCollectionResult
{
/// <summary>
/// List of status messages captured during data collection
/// </summary>
public List<string> Messages { get; set; }
/// <summary>
/// List of error messages captured during data collection
/// </summary>
public List<string> Errors { get; set; }
/// <summary>
/// The last time data collecton status was refreshed
/// </summary>
public DateTime RefreshTime { get; set; }
/// <summary>
/// Whether or not data collection is currently running
/// </summary>
public bool IsCollecting { get; set; }
}
public class StartPerfDataCollectionRequest
{
public static readonly
RequestType<StartPerfDataCollectionParams, StartPerfDataCollectionResult> Type =
RequestType<StartPerfDataCollectionParams, StartPerfDataCollectionResult>.Create("migration/startperfdatacollection");
}
public class StopPerfDataCollectionRequest
{
public static readonly
RequestType<StopPerfDataCollectionParams, StopPerfDataCollectionResult> Type =
RequestType<StopPerfDataCollectionParams, StopPerfDataCollectionResult>.Create("migration/stopperfdatacollection");
}
public class RefreshPerfDataCollectionRequest
{
public static readonly
RequestType<RefreshPerfDataCollectionParams, RefreshPerfDataCollectionResult> Type =
RequestType<RefreshPerfDataCollectionParams, RefreshPerfDataCollectionResult>.Create("migration/refreshperfdatacollection");
}
}

View File

@@ -0,0 +1,135 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using Microsoft.SqlServer.DataCollection.Common.Contracts.OperationsInfrastructure;
using Microsoft.SqlTools.Hosting.Protocol.Contracts;
using System.Collections.Generic;
namespace Microsoft.SqlTools.Migration.Contracts
{
/// <summary>
/// Represents the steps in login migration.
/// </summary>
public enum LoginMigrationStep
{
/// <summary>
/// Run pre-migration validations
/// </summary>
StartValidations,
/// <summary>
/// Step to hash passwords and migrate logins
/// </summary>
MigrateLogins,
/// <summary>
/// Step to establish users and logins from source to target
/// </summary>
EstablishUserMapping,
/// <summary>
/// Step to migrate server roles
/// </summary>
MigrateServerRoles,
/// <summary>
/// Step to establish roles
/// </summary>
EstablishServerRoleMapping,
/// <summary>
/// Step to map all the grant/deny permissions for logins
/// </summary>
SetLoginPermissions,
/// <summary>
/// Step to map all server roles grant/deny permissions
/// </summary>
SetServerRolePermissions
}
public class StartLoginMigrationParams
{
/// <summary>
/// Connection string to connect to source
/// </summary>
public string SourceConnectionString { get; set; }
/// <summary>
/// Connection string to connect to target
/// </summary>
public string TargetConnectionString { get; set; }
/// <summary>
/// List of logins to migrate
/// </summary>
public List<string> LoginList { get; set; }
/// <summary>
/// Azure active directory domain name (required for Windows Auth)
/// </summary>
public string AADDomainName{ get; set; }
}
public class LoginMigrationResult
{
/// <summary>
/// Start time of the assessment
/// </summary>
public IDictionary<string, IEnumerable<ReportableException>> ExceptionMap { get; set; }
/// <summary>
/// The login migration step that just completed
/// </summary>
public LoginMigrationStep CompletedStep { get; set; }
/// <summary>
/// How long this step took
/// </summary>
public string ElapsedTime{ get; set; }
}
public class StartLoginMigrationRequest
{
public static readonly
RequestType<StartLoginMigrationParams, LoginMigrationResult> Type =
RequestType<StartLoginMigrationParams, LoginMigrationResult>.Create("migration/startloginmigration");
}
public class ValidateLoginMigrationRequest
{
public static readonly
RequestType<StartLoginMigrationParams, LoginMigrationResult> Type =
RequestType<StartLoginMigrationParams, LoginMigrationResult>.Create("migration/validateloginmigration");
}
public class MigrateLoginsRequest
{
public static readonly
RequestType<StartLoginMigrationParams, LoginMigrationResult> Type =
RequestType<StartLoginMigrationParams, LoginMigrationResult>.Create("migration/migratelogins");
}
public class EstablishUserMappingRequest
{
public static readonly
RequestType<StartLoginMigrationParams, LoginMigrationResult> Type =
RequestType<StartLoginMigrationParams, LoginMigrationResult>.Create("migration/establishusermapping");
}
public class MigrateServerRolesAndSetPermissionsRequest
{
public static readonly
RequestType<StartLoginMigrationParams, LoginMigrationResult> Type =
RequestType<StartLoginMigrationParams, LoginMigrationResult>.Create("migration/migrateserverrolesandsetpermissions");
}
public class LoginMigrationNotification
{
public static readonly
EventType<LoginMigrationResult> Type =
EventType<LoginMigrationResult>.Create("migration/loginmigrationnotification");
}
}

View File

@@ -0,0 +1,21 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using Microsoft.SqlTools.Hosting.Protocol.Contracts;
namespace Microsoft.SqlTools.Migration.Contracts
{
public class ValidateNetworkFileShareRequestParams
{
public string Path { get; set; }
public string Username { get; set; }
public string Password { get; set; }
}
public class ValidateNetworkFileShareRequest
{
public static readonly RequestType<ValidateNetworkFileShareRequestParams, bool> Type = RequestType<ValidateNetworkFileShareRequestParams, bool>.Create("migration/validateNetworkFileShare");
}
}

View File

@@ -0,0 +1,20 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using Microsoft.SqlTools.Hosting.Protocol.Contracts;
namespace Microsoft.SqlTools.Migration.Contracts
{
public class ValidateWindowsAccountRequestParams
{
public string Username { get; set; }
public string Password { get; set; }
}
public class ValidateWindowsAccountRequest
{
public static readonly RequestType<ValidateWindowsAccountRequestParams, bool> Type = RequestType<ValidateWindowsAccountRequestParams, bool>.Create("migration/validateWindowsAccount");
}
}

View File

@@ -0,0 +1,62 @@
// WARNING:
// This file was generated by the Microsoft DataWarehouse String Resource Tool 7.0.0.0
// from information in sr.strings
// DO NOT MODIFY THIS FILE'S CONTENTS, THEY WILL BE OVERWRITTEN
//
namespace Microsoft.SqlTools.Migration
{
using System;
using System.Reflection;
using System.Resources;
using System.Globalization;
[System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class SR
{
protected SR()
{ }
public static CultureInfo Culture
{
get
{
return Keys.Culture;
}
set
{
Keys.Culture = value;
}
}
[System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
public class Keys
{
static ResourceManager resourceManager = new ResourceManager("Microsoft.SqlTools.Migration.Localization.SR", typeof(SR).GetTypeInfo().Assembly);
static CultureInfo _culture = null;
private Keys()
{ }
public static CultureInfo Culture
{
get
{
return _culture;
}
set
{
_culture = value;
}
}
public static string GetString(string key)
{
return resourceManager.GetString(key, _culture);
}
}
}
}

View File

@@ -0,0 +1,120 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype=">text/microsoft-resx</resheader>
<resheader name="version=">2.0</resheader>
<resheader name="reader=">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer=">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1="><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing=">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64=">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64=">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata=">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true=">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
</root>

View File

@@ -0,0 +1,24 @@
# String resource file
#
# When processed by the String Resource Tool, this file generates
# both a .CS and a .RESX file with the same name as the file.
# The .CS file contains a class which can be used to access these
# string resources, including the ability to format in
# parameters, which are identified with the .NET {x} format
# (see String.Format help).
#
# Comments below assume the file name is SR.strings.
#
# Lines starting with a semicolon ";" are also treated as comments, but
# in a future version they will be extracted and made available in LocStudio
# Put your comments to localizers _before_ the string they apply to.
#
# SMO build specific comment
# after generating the .resx file, run srgen on it and get the .resx file
# please remember to also check that .resx in, along with the
# .strings and .cs files
[strings]
############################################################################
# Migration Service

View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="urn:oasis:names:tc:xliff:document:1.2 xliff-core-1.2-transitional.xsd">
<file datatype="xml" original="sr.resx" source-language="en">
<body />
</file>
</xliff>

View File

@@ -0,0 +1,39 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<AssemblyName>MicrosoftSqlToolsMigration</AssemblyName>
<OutputType>Exe</OutputType>
<ValidateExecutableReferencesMatchSelfContained>false</ValidateExecutableReferencesMatchSelfContained>
<EnableDefaultItems>false</EnableDefaultItems>
<EnableDefaultCompileItems>false</EnableDefaultCompileItems>
<EnableDefaultEmbeddedResourceItems>false</EnableDefaultEmbeddedResourceItems>
<EnableDefaultNoneItems>false</EnableDefaultNoneItems>
<EmbeddedResourceUseDependentUponConvention>false</EmbeddedResourceUseDependentUponConvention>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
<DefineConstants>$(DefineConstants);NETCOREAPP1_0;TRACE</DefineConstants>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<PreserveCompilationContext>true</PreserveCompilationContext>
<RuntimeIdentifiers>$(ToolsServiceTargetRuntimes)</RuntimeIdentifiers>
<NoWarn>NU1605</NoWarn>
<ErrorOnDuplicatePublishOutputFiles>false</ErrorOnDuplicatePublishOutputFiles>
<Nullable>enable</Nullable>
</PropertyGroup>
<PropertyGroup>
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SqlServer.Assessment" />
<PackageReference Include="Microsoft.SqlServer.Migration.Logins" />
<PackageReference Include="Microsoft.SqlServer.Migration.Assessment" />
<PackageReference Include="Microsoft.SqlServer.Migration.Tde" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../Microsoft.SqlTools.Hosting/Microsoft.SqlTools.Hosting.csproj" />
</ItemGroup>
<ItemGroup>
<Compile Include="**\*.cs" Exclude="**/obj/**/*.cs" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Localization\*.resx" />
<None Include="Localization\sr.strings" />
</ItemGroup>
</Project>

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,35 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.Threading;
using System.Threading.Tasks;
using Azure.Core;
namespace Microsoft.SqlTools.Migration.Models
{
/// <summary>
/// Temp token creadentials to interact with ArmClient class.
/// The token passed to this class should be a newly request token, because this class doesn't renew the token.
/// Once MSAL is rolled out on ADS, we will implement a way to use the same ADS token cache configured by ADS.
/// </summary>
internal class StaticTokenCredential : TokenCredential
{
private readonly AccessToken _token;
/// <summary>
/// Build credentials using a token that will not change.
/// </summary>
/// <param name="accessToken">Newly created token that should last for the duration of the whole operation.</param>
public StaticTokenCredential(string accessToken) => _token = new AccessToken(
accessToken: accessToken,
expiresOn: DateTimeOffset.Now.AddHours(1)); //Default to an hour, the current duration of a newly create token.
public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
=> _token;
public override ValueTask<AccessToken> GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
=> new ValueTask<AccessToken>(_token);
}
}

View File

@@ -0,0 +1,64 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.IO;
using System.Diagnostics;
using Microsoft.SqlTools.Extensibility;
using Microsoft.SqlTools.Utility;
namespace Microsoft.SqlTools.Migration
{
internal class Program
{
private const string ServiceName = "MicrosoftSqlToolsMigration.exe";
internal static void Main(string[] args)
{
try
{
CommandOptions commandOptions = new CommandOptions(args, ServiceName);
if (commandOptions.ShouldExit)
{
return;
}
string logFilePath = "MicrosoftSqlToolsMigration";
if (!string.IsNullOrWhiteSpace(commandOptions.LogFilePath))
{
logFilePath = Path.Combine(commandOptions.LogFilePath, logFilePath);
}
else
{
logFilePath = Logger.GenerateLogFilePath(logFilePath);
}
Logger.Initialize(SourceLevels.Verbose, logFilePath, "Migration", commandOptions.AutoFlushLog);
Logger.Verbose("Starting SqlTools Migration Server...");
ExtensionServiceHost serviceHost = new ExtensionServiceHost(
new ExtensibleServiceHostOptions
{
HostName = "Migration",
HostProfileId = "SqlTools.Migration",
HostVersion = new Version(1, 0, 0, 0),
InitializeServiceCallback = (server, serivce) => { }
});
serviceHost.RegisterAndInitializeService(new MigrationService());
serviceHost.WaitForExit();
Logger.Verbose("SqlTools Migration Server exiting....");
}
catch (Exception ex)
{
Logger.Error(ex);
throw ex;
}
}
}
}

View File

@@ -0,0 +1,47 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("SqlTools Editor Services Host Protocol Library")]
[assembly: AssemblyDescription("Provides message types and client/server APIs for the SqlTools Editor Services JSON protocol.")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("SqlTools Editor Services")]
[assembly: AssemblyCopyright("<22> Microsoft Corporation. All rights reserved.")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("78caf6c3-5955-4b15-a302-2bd6b7871d5b")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
[assembly: AssemblyInformationalVersion("1.0.0.0")]
[assembly: InternalsVisibleTo("Microsoft.SqlTools.Migration.IntegrationTests")]
// Allowing internals visible access to Moq library to help testing
[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2")]

View File

@@ -0,0 +1,382 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using Microsoft.SqlServer.DataCollection.Common.Contracts.Advisor;
using Microsoft.SqlServer.DataCollection.Common.Contracts.ErrorHandling;
using Microsoft.SqlServer.DataCollection.Common.Contracts.SqlQueries;
using Microsoft.SqlServer.DataCollection.Common.ErrorHandling;
using Microsoft.SqlServer.Migration.SkuRecommendation;
using Microsoft.SqlServer.Migration.SkuRecommendation.Contracts.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
namespace Microsoft.SqlTools.Migration
{
/// <summary>
/// Controller to manage the collection, aggregation, and persistence of SQL performance and static data for SKU recommendation.
/// </summary>
public class SqlDataQueryController : IDisposable
{
// Timers to control performance and static data collection intervals
private IList<System.Timers.Timer> timers = new List<System.Timers.Timer>() { };
private int perfQueryIntervalInSec;
private int numberOfIterations;
// Output folder to store data in
private string outputFolder;
// Name of the server handled by this controller
private string serverName;
// Data collector and cache
private DataPointsCollector dataCollector = null;
private SqlPerfDataPointsCache perfDataCache = null;
// Whether or not this controller has been disposed
private bool disposedValue = false;
private ISqlAssessmentLogger _logger;
// since this "console app" doesn't have any console to write to, store any messages so that they can be periodically fetched
private List<KeyValuePair<string, DateTime>> messages;
private List<KeyValuePair<string, DateTime>> errors;
/// <summary>
/// Create a new SqlDataQueryController.
/// </summary>
/// <param name="connectionString">SQL connection string</param>
/// <param name="outputFolder">Output folder to save results to</param>
/// <param name="perfQueryIntervalInSec">Interval, in seconds, at which perf counters are collected</param>
/// <param name="numberOfIterations">Number of iterations of perf counter collection before aggreagtion</param>
/// <param name="staticQueryIntervalInSec">Interval, in seconds, at which static/common counters are colltected</param>
/// <param name="logger">Logger</param>
public SqlDataQueryController(
string connectionString,
string outputFolder,
int perfQueryIntervalInSec,
int numberOfIterations,
int staticQueryIntervalInSec,
ISqlAssessmentLogger logger = null)
{
this.outputFolder = outputFolder;
this.perfQueryIntervalInSec = perfQueryIntervalInSec;
this.numberOfIterations = numberOfIterations;
this._logger = logger ?? new DefaultPerfDataCollectionLogger();
this.messages = new List<KeyValuePair<string, DateTime>>();
this.errors = new List<KeyValuePair<string, DateTime>>();
perfDataCache = new SqlPerfDataPointsCache(this.outputFolder, _logger);
dataCollector = new DataPointsCollector(new string[] { connectionString }, _logger);
// set up timers to run perf/static collection at specified intervals
System.Timers.Timer perfDataCollectionTimer = new System.Timers.Timer();
perfDataCollectionTimer.Elapsed += (sender, e) => PerfDataQueryEvent();
perfDataCollectionTimer.Interval = perfQueryIntervalInSec * 1000;
timers.Add(perfDataCollectionTimer);
System.Timers.Timer staticDataCollectionTimer = new System.Timers.Timer();
staticDataCollectionTimer.Elapsed += (sender, e) => StaticDataQueryAndPersistEvent();
staticDataCollectionTimer.Interval = staticQueryIntervalInSec * 1000;
timers.Add(staticDataCollectionTimer);
}
/// <summary>
/// Start this SqlDataQueryController.
/// </summary>
public void Start()
{
foreach (var timer in timers)
{
timer.Start();
}
}
/// <summary>
/// Returns whether or not this SqlDataQueryController is currently running.
/// </summary>
public bool IsRunning()
{
return this.timers.All(timer => timer.Enabled);
}
/// <summary>
/// Collect performance data, adding the collected points to the cache.
/// </summary>
private void PerfDataQueryEvent()
{
try
{
int currentIteration = perfDataCache.CurrentIteration;
// Get raw perf data points
var validationResult = dataCollector.CollectPerfDataPoints(CancellationToken.None, TimeSpan.FromSeconds(this.perfQueryIntervalInSec)).Result.FirstOrDefault();
if (validationResult != null && validationResult.Status == SqlAssessmentStatus.Completed)
{
IList<ISqlPerfDataPoints> result = validationResult.SqlPerfDataPoints;
perfDataCache.AddingPerfData(result);
serverName = this.perfDataCache.ServerName;
this.messages.Add(new KeyValuePair<string, DateTime>(
string.Format("Performance data query iteration: {0} of {1}, collected {2} data points.", currentIteration, numberOfIterations, result.Count),
DateTime.UtcNow));
// perform aggregation and persistence once enough iterations have completed
if (currentIteration == numberOfIterations)
{
PerfDataAggregateAndPersistEvent();
}
}
else if (validationResult != null && validationResult.Status == SqlAssessmentStatus.Error)
{
var error = validationResult.Errors.FirstOrDefault();
Logging(error);
}
}
catch (Exception e)
{
Logging(e);
}
}
/// <summary>
/// Aggregate and persist the cached points, saving the aggregated points to disk.
/// </summary>
internal void PerfDataAggregateAndPersistEvent()
{
try
{
// Aggregate the records in the Cache
int rawDataPointsCount = this.perfDataCache.GetRawDataPointsCount();
this.perfDataCache.AggregatingPerfData();
int aggregatedDataPointsCount = this.perfDataCache.GetAggregatedDataPointsCount();
// Persist into local csv.
if (aggregatedDataPointsCount > 0)
{
this.perfDataCache.PersistingCacheAsCsv();
this.messages.Add(new KeyValuePair<string, DateTime>(
string.Format("Aggregated {0} raw data points to {1} performance counters, and saved to {2}.", rawDataPointsCount, aggregatedDataPointsCount, this.outputFolder),
DateTime.UtcNow));
}
}
catch (Exception e)
{
Logging(e);
}
}
/// <summary>
/// Collect and persist static data, saving the collected points to disk.
/// </summary>
private void StaticDataQueryAndPersistEvent()
{
try
{
var validationResult = this.dataCollector.CollectCommonDataPoints(CancellationToken.None).Result.FirstOrDefault();
if (validationResult != null && validationResult.Status == SqlAssessmentStatus.Completed)
{
// Common data result
IList<ISqlCommonDataPoints> staticDataResult = new List<ISqlCommonDataPoints>();
staticDataResult.Add(validationResult.SqlCommonDataPoints);
serverName = staticDataResult.Select(p => p.ServerName).FirstOrDefault();
// Save to csv
var persistor = new DataPointsPersistor(this.outputFolder);
serverName = staticDataResult.Select(p => p.ServerName).FirstOrDefault();
persistor.SaveCommonDataPoints(staticDataResult, serverName);
this.messages.Add(new KeyValuePair<string, DateTime>(
string.Format("Collected static configuration data, and saved to {0}.", this.outputFolder),
DateTime.UtcNow));
}
else if (validationResult != null && validationResult.Status == SqlAssessmentStatus.Error)
{
var error = validationResult.Errors.FirstOrDefault();
Logging(error);
}
}
catch (Exception e)
{
Logging(e);
}
}
/// <summary>
/// Log exceptions to file.
/// </summary>
/// <param name="ex">Exception to log</param>
private void Logging(Exception ex)
{
this.errors.Add(new KeyValuePair<string, DateTime>(ex.Message, DateTime.UtcNow));
var error = new UnhandledSqlExceptionErrorModel(ex, ErrorScope.General);
_logger.Log(error, ErrorLevel.Error, TelemetryScope.PerfCollection);
_logger.Log(TelemetryScope.PerfCollection, ex.Message);
}
/// <summary>
/// Log errors to file.
/// </summary>
/// <param name="error">Error to log</param>
private void Logging(IErrorModel error)
{
this.errors.Add(new KeyValuePair<string, DateTime>(error.RawException.Message, DateTime.UtcNow));
_logger.Log(error, ErrorLevel.Error, TelemetryScope.PerfCollection);
_logger.Log(TelemetryScope.PerfCollection, error.RawException.Message);
}
/// <summary>
/// Fetches the latest messages, and then clears the message list.
/// </summary>
/// <param name="startTime">Only return messages from after this time</param>
/// <returns>List of queued messages</returns>
public List<string> FetchLatestMessages(DateTime startTime)
{
List<string> latestMessages = this.messages.Where(kvp => kvp.Value > startTime).Select(kvp => kvp.Key).ToList();
this.messages.Clear();
return latestMessages;
}
/// <summary>
/// Fetches the latest messages, and then clears the message list.
/// </summary>
/// <param name="startTime">Only return messages from after this time</param>
/// <returns>List of queued errors</returns>
public List<string> FetchLatestErrors(DateTime startTime)
{
List<string> latestErrors = this.errors.Where(kvp => kvp.Value > startTime).Select(kvp => kvp.Key).ToList();
this.messages.Clear();
return latestErrors;
}
/// <summary>
/// Dispose of this SqlDataQueryController.
/// </summary>
public void Dispose()
{
// Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
Dispose(disposing: true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (!disposedValue)
{
if (disposing)
{
foreach (var timer in timers)
{
timer.Stop();
}
if (perfDataCache.CurrentIteration > 2)
{
PerfDataAggregateAndPersistEvent(); // flush cache if there are enough data points
}
this.perfDataCache = null;
}
disposedValue = true;
}
}
}
/// <summary>
/// Cache to store intermediate SQL performance data before it is aggregated and persisted for SKU recommendation.
/// </summary>
public class SqlPerfDataPointsCache
{
public string ServerName { get; private set; }
public int CurrentIteration { get; private set; }
private string outputFolder;
private ISqlAssessmentLogger logger;
private IList<IList<ISqlPerfDataPoints>> perfDataPoints = new List<IList<ISqlPerfDataPoints>>();
private IList<AggregatedPerformanceCounters> perfAggregated = new List<AggregatedPerformanceCounters>();
/// <summary>
/// Create a new SqlPerfDataPointsCache.
/// </summary>
/// <param name="outputFolder">Output folder to save results to</param>
/// <param name="logger">Logger</param>
public SqlPerfDataPointsCache(string outputFolder, ISqlAssessmentLogger logger = null)
{
this.outputFolder = outputFolder;
this.logger = logger ?? new DefaultPerfDataCollectionLogger();
CurrentIteration = 1;
}
/// <summary>
/// Add the collected data points to the cache.
/// </summary>
/// <param name="result">Collected data points</param>
public void AddingPerfData(IList<ISqlPerfDataPoints> result)
{
ServerName = result.Select(p => p.ServerName).FirstOrDefault();
perfDataPoints.Add(result);
CurrentIteration++;
}
/// <summary>
/// Return the number of raw data points.
/// </summary>
public int GetRawDataPointsCount()
{
// flatten list
return perfDataPoints.SelectMany(x => x).Count();
}
/// <summary>
/// Return the number of aggregated data points.
/// </summary>
public int GetAggregatedDataPointsCount()
{
return perfAggregated.Count;
}
/// <summary>
/// Aggregate the cached data points.
/// </summary>
public void AggregatingPerfData()
{
try
{
var aggregator = new CounterAggregator(logger);
perfAggregated = aggregator.AggregateDatapoints(perfDataPoints);
}
catch (Exception ex)
{
throw ex;
}
finally
{
perfDataPoints.Clear();
// reset the iteration counter
CurrentIteration = 1;
}
}
/// <summary>
/// Save the cached and aggregated data points to disk.
/// </summary>
public void PersistingCacheAsCsv()
{
// Save to csv
var persistor = new DataPointsPersistor(outputFolder);
persistor.SavePerfDataPoints(perfAggregated, machineId: ServerName, overwrite: false);
}
}
}

View File

@@ -0,0 +1,31 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System.Collections.Generic;
using System.Linq;
using Microsoft.SqlServer.DataCollection.Common.Contracts.OperationsInfrastructure;
namespace Microsoft.SqlTools.Migration.Utils
{
internal static class ExtensionMethods
{
public static void AddExceptions(this IDictionary<string, IEnumerable<ReportableException>> exceptionMap1, IDictionary<string, IEnumerable<ReportableException>> exceptionMap2)
{
foreach (var keyValuePair2 in exceptionMap2)
{
// If the dictionary already contains the key then merge them
if (exceptionMap1.ContainsKey(keyValuePair2.Key))
{
foreach (var value in keyValuePair2.Value)
{
exceptionMap1[keyValuePair2.Key].Append(value);
}
continue;
}
exceptionMap1.Add(keyValuePair2);
}
}
}
}

View File

@@ -0,0 +1,17 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
namespace Microsoft.SqlTools.Migration.Utils
{
internal static class MigrationServiceHelper
{
public static string FormatTimeSpan(TimeSpan ts)
{
return String.Format("{0:00}:{1:00}:{2:00}.{3:00}", ts.Hours, ts.Minutes, ts.Seconds, ts.Milliseconds / 10);
}
}
}

View File

@@ -0,0 +1,8 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using NUnit.Framework;
[assembly: NonParallelizable]

View File

@@ -0,0 +1,43 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
<DebugType>portable</DebugType>
<AssemblyName>Microsoft.SqlTools.Migration.IntegrationTests</AssemblyName>
<PackageId>Microsoft.SqlTools.Migration.IntegrationTests</PackageId>
<GenerateRuntimeConfigurationFiles>true</GenerateRuntimeConfigurationFiles>
<DefineConstants>$(DefineConstants);TRACE</DefineConstants>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../src/Microsoft.SqlTools.Hosting/Microsoft.SqlTools.Hosting.csproj" />
<ProjectReference Include="../../src/Microsoft.SqlTools.Migration/Microsoft.SqlTools.Migration.csproj" />
<ProjectReference Include="../Microsoft.SqlTools.ServiceLayer.Test.Common/Microsoft.SqlTools.ServiceLayer.Test.Common.csproj" />
<ProjectReference Include="../Microsoft.SqlTools.Test.CompletionExtension/Microsoft.SqlTools.Test.CompletionExtension.csproj" />
</ItemGroup>
<Choose>
<When Condition="'$(BUILD_DOTNET_TOOL)' == 'true'">
<ItemGroup>
<Compile Remove="Migration\*" />
</ItemGroup>
</When>
</Choose>
<ItemGroup>
<PackageReference Include="Moq" />
<PackageReference Include="System.Net.Http" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="nunit" />
<PackageReference Include="nunit3testadapter" />
<PackageReference Include="nunit.console" />
<PackageReference Include="coverlet.collector">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<Service Include="{82a7f48d-3b50-4b1e-b82e-3ada8210c358}" />
</ItemGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Integration' ">
<DefineConstants>$(DefineConstants);WINDOWS_ONLY_BUILD</DefineConstants>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,5 @@
ServerName,DatabaseName,LogicalName,PhysicalFullName,FileType,SizeMB,IsMemoryOptimizedDataOptionEnabled,TimeDataCollected
TEST,test,test,C:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\DATA\test.mdf,Rows,3,False,2021-10-28 19:08:03
TEST,test,test_log,C:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\DATA\test_log.ldf,Log,1,False,2021-10-28 19:08:03
TEST,test1,AdventureWorks2008R2_Data,C:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\DATA\test1.mdf,Rows,195.9375,False,2021-10-28 19:08:03
TEST,test1,AdventureWorks2008R2_Log,C:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\DATA\test1_1.LDF,Log,3.75,False,2021-10-28 19:08:03
1 ServerName DatabaseName LogicalName PhysicalFullName FileType SizeMB IsMemoryOptimizedDataOptionEnabled TimeDataCollected
2 TEST test test C:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\DATA\test.mdf Rows 3 False 2021-10-28 19:08:03
3 TEST test test_log C:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\DATA\test_log.ldf Log 1 False 2021-10-28 19:08:03
4 TEST test1 AdventureWorks2008R2_Data C:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\DATA\test1.mdf Rows 195.9375 False 2021-10-28 19:08:03
5 TEST test1 AdventureWorks2008R2_Log C:\Program Files\Microsoft SQL Server\MSSQL10_50.MSSQLSERVER\MSSQL\DATA\test1_1.LDF Log 3.75 False 2021-10-28 19:08:03

View File

@@ -0,0 +1,2 @@
ServerName,Edition,HyperthreadRatio,IsClustered,IsHadrEnabled,LogicalCpuCount,MaxServerMemoryInUse,NumberCoresUsed,NumberOfUserDatabases,PhysicalCpuCount,ProductVersion,SqlStartTime,SumOfUserDatabasesSize,TempDbSize,NumOfLogins,TimeDataCollected
TEST,Enterprise Edition (64-bit),2,False,False,2,2147483647,2,2,1,10.50.6592.0,2021-10-26 19:26:06,203,8,13,2021-10-28 19:08:03
1 ServerName Edition HyperthreadRatio IsClustered IsHadrEnabled LogicalCpuCount MaxServerMemoryInUse NumberCoresUsed NumberOfUserDatabases PhysicalCpuCount ProductVersion SqlStartTime SumOfUserDatabasesSize TempDbSize NumOfLogins TimeDataCollected
2 TEST Enterprise Edition (64-bit) 2 False False 2 2147483647 2 2 1 10.50.6592.0 2021-10-26 19:26:06 203 8 13 2021-10-28 19:08:03

View File

@@ -0,0 +1,121 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.SqlTools.Hosting.Protocol;
using Microsoft.SqlTools.Migration.IntegrationTests.Utility;
using Microsoft.SqlTools.Migration.Contracts;
using Microsoft.SqlTools.ServiceLayer.Test.Common;
using Microsoft.SqlTools.ServiceLayer.Test.Common.RequestContextMocking;
using Moq;
using NUnit.Framework;
namespace Microsoft.SqlTools.Migration.IntegrationTests.Migration
{
public class MigrationServiceTests
{
[Test]
public async Task TestHandleMigrationAssessmentRequest()
{
using (SelfCleaningTempFile queryTempFile = new SelfCleaningTempFile())
{
var connectionResult = await LiveConnectionHelper.InitLiveConnectionInfoAsync("master", queryTempFile.FilePath);
var requestParams = new MigrationAssessmentsParams()
{
ConnectionString = connectionResult.ConnectionInfo.ConnectionDetails.ConnectionString
};
var requestContext = new Mock<RequestContext<MigrationAssessmentResult>>();
MigrationService service = new MigrationService();
await service.HandleMigrationAssessmentsRequest(requestParams, requestContext.Object);
requestContext.VerifyAll();
}
}
[Test]
[Ignore("Disable failing test")]
public async Task TestHandleMigrationGetSkuRecommendationsRequest()
{
GetSkuRecommendationsResult result = null;
var requestParams = new GetSkuRecommendationsParams()
{
DataFolder = Path.Combine("..", "..", "..", "Migration", "Data"),
TargetPlatforms = new List<string> { "AzureSqlManagedInstance" },
TargetSqlInstance = "Test",
TargetPercentile = 95,
StartTime = new DateTime(2020, 01, 01).ToString("yyyy-MM-dd HH:mm:ss"),
EndTime = DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss"),
PerfQueryIntervalInSec = 30,
ScalingFactor = 1,
DatabaseAllowList = new List<string> { "test", "test1" }
};
var requestContext = RequestContextMocks.Create<GetSkuRecommendationsResult>(r => result = r).AddErrorHandling(null);
MigrationService service = new MigrationService();
await service.HandleGetSkuRecommendationsRequest(requestParams, requestContext.Object);
Assert.IsNotNull(result, "Get SKU Recommendation result is null");
Assert.IsNotNull(result.SqlMiRecommendationResults, "Get MI SKU Recommendation baseline result is null");
Assert.IsNotNull(result.ElasticSqlMiRecommendationResults, "Get MI SKU Recommendation elastic result is null");
// TODO: Include Negative Justification in future when we start recommending more than one SKU.
Assert.Greater(result.SqlMiRecommendationResults.First().PositiveJustifications.Count, 0, "No positive justification for MI SKU Recommendation result");
Assert.Greater(result.ElasticSqlMiRecommendationResults.First().PositiveJustifications.Count, 0, "No positive justification for MI SKU elastic Recommendation result");
Assert.IsNotNull(result.InstanceRequirements);
Assert.AreEqual(result.InstanceRequirements.InstanceId, "TEST");
Assert.AreEqual(result.InstanceRequirements.DatabaseLevelRequirements.Count, 2);
Assert.AreEqual(result.InstanceRequirements.DatabaseLevelRequirements.Sum(db => db.FileLevelRequirements.Count), 4);
}
[Test]
public async Task TestHandleStartStopPerfDataCollectionRequest()
{
StartPerfDataCollectionResult result = null;
using (SelfCleaningTempFile queryTempFile = new SelfCleaningTempFile())
{
var connectionResult = await LiveConnectionHelper.InitLiveConnectionInfoAsync("master", queryTempFile.FilePath);
string folderPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "SkuRecommendationTest");
Directory.CreateDirectory(folderPath);
var requestParams = new StartPerfDataCollectionParams()
{
ConnectionString = connectionResult.ConnectionInfo.ConnectionDetails.ConnectionString,
DataFolder = folderPath,
PerfQueryIntervalInSec = 30,
NumberOfIterations = 20,
StaticQueryIntervalInSec = 3600,
};
var requestContext = RequestContextMocks.Create<StartPerfDataCollectionResult>(r => result = r).AddErrorHandling(null);
MigrationService service = new MigrationService();
await service.HandleStartPerfDataCollectionRequest(requestParams, requestContext.Object);
Assert.IsNotNull(result, "Start Perf Data Collection result is null");
Assert.IsNotNull(result.DateTimeStarted, "Time perf data collection started is null");
// Stop data collection
StopPerfDataCollectionResult stopResult = null;
var stopRequestParams = new StopPerfDataCollectionParams()
{
};
var stopRequestContext = RequestContextMocks.Create<StopPerfDataCollectionResult>(r => stopResult = r).AddErrorHandling(null);
await service.HandleStopPerfDataCollectionRequest(stopRequestParams, stopRequestContext.Object);
Assert.IsNotNull(stopResult, "Stop Perf Data Collection result is null");
Assert.IsNotNull(stopResult.DateTimeStopped, "Time perf data collection stoped is null");
}
}
}
}

View File

@@ -0,0 +1,160 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
using System;
using Microsoft.Data.SqlClient;
using System.IO;
using System.Reflection;
using System.Threading.Tasks;
using Microsoft.SqlServer.Management.Common;
using Microsoft.SqlTools.ServiceLayer.Connection;
using Microsoft.SqlTools.ServiceLayer.Connection.Contracts;
using Microsoft.SqlTools.ServiceLayer.Test.Common;
using Microsoft.SqlTools.ServiceLayer.Workspace.Contracts;
using NUnit.Framework;
using System.Threading;
namespace Microsoft.SqlTools.Migration.IntegrationTests.Utility
{
public class LiveConnectionException : Exception
{
public LiveConnectionException(string message)
: base(message) { }
}
public class LiveConnectionHelper
{
public static string GetTestSqlFile(string fileName = null)
{
string filePath = null;
if (string.IsNullOrEmpty(fileName))
{
filePath = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), "sqltest.sql");
}
else
{
filePath = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), fileName + ".sql");
}
if (File.Exists(filePath))
{
File.Delete(filePath);
}
File.WriteAllText(filePath, "SELECT * FROM sys.objects\n");
return filePath;
}
public static TestConnectionResult InitLiveConnectionInfo(string databaseName = null, string ownerUri = null)
=> InitLiveConnectionInfoAsync(databaseName, ownerUri, ServiceLayer.Connection.ConnectionType.Default).ConfigureAwait(false).GetAwaiter().GetResult();
public static async Task<TestConnectionResult> InitLiveConnectionInfoAsync(string databaseName = "master", string ownerUri = null,
string connectionType = ServiceLayer.Connection.ConnectionType.Default, TestServerType serverType = TestServerType.OnPrem)
{
ScriptFile scriptFile = null;
if (string.IsNullOrEmpty(ownerUri))
{
ownerUri = GetTestSqlFile();
scriptFile = TestServiceProvider.Instance.WorkspaceService.Workspace.GetFile(ownerUri);
ownerUri = scriptFile.ClientUri;
}
if (string.IsNullOrEmpty(databaseName))
{
databaseName = "master";
}
ConnectParams connectParams = TestServiceProvider.Instance.ConnectionProfileService.GetConnectionParameters(serverType, databaseName);
// try to connect up to 3 times, sleeping in between retries
const int RetryCount = 3;
const int RetryDelayMs = 15000;
for (int attempt = 0; attempt < RetryCount; ++attempt)
{
var connectionService = GetLiveTestConnectionService();
var connectionResult =
await connectionService.Connect(new ConnectParams
{
OwnerUri = ownerUri,
Connection = connectParams.Connection,
Type = connectionType
});
if (!string.IsNullOrEmpty(connectionResult.ErrorMessage))
{
Console.WriteLine(connectionResult.ErrorMessage);
}
ConnectionInfo connInfo;
connectionService.TryFindConnection(ownerUri, out connInfo);
// if the connection wasn't successful then cleanup and try again (up to max retry count)
if (connInfo == null)
{
connectionService.Disconnect(new DisconnectParams()
{
OwnerUri = ownerUri
});
// don't sleep on the final iterations since we won't try again
if (attempt < RetryCount - 1)
{
Thread.Sleep(RetryDelayMs);
}
}
else
{
return new TestConnectionResult() { ConnectionInfo = connInfo, ScriptFile = scriptFile };
}
}
throw new LiveConnectionException(string.Format("Could not establish a connection to {0}:{1}",
connectParams.Connection.ServerName, connectParams.Connection.DatabaseName));
}
public static ConnectionInfo InitLiveConnectionInfoForDefinition(string databaseName = null)
{
using (SelfCleaningTempFile queryTempFile = new SelfCleaningTempFile())
{
ConnectParams connectParams = TestServiceProvider.Instance.ConnectionProfileService.GetConnectionParameters(TestServerType.OnPrem, databaseName);
string ownerUri = queryTempFile.FilePath;
InitLiveConnectionInfo(databaseName, ownerUri);
var connectionService = GetLiveTestConnectionService();
ConnectionInfo connInfo;
connectionService.TryFindConnection(ownerUri, out connInfo);
Assert.NotNull(connInfo);
return connInfo;
}
}
public static ServerConnection InitLiveServerConnectionForDefinition(ConnectionInfo connInfo)
{
SqlConnection sqlConn = new SqlConnection(ConnectionService.BuildConnectionString(connInfo.ConnectionDetails));
return new ServerConnection(sqlConn);
}
/// <summary>
/// Creates a test sql connection factory instance
/// </summary>
public static ISqlConnectionFactory GetLiveTestSqlConnectionFactory()
{
// connect to a real server instance
return ConnectionService.Instance.ConnectionFactory;
}
public static ConnectionService GetLiveTestConnectionService()
{
// connect to a real server instance
return ConnectionService.Instance;
}
public class TestConnectionResult
{
public ConnectionInfo ConnectionInfo { get; set; }
public ScriptFile ScriptFile { get; set; }
public TextDocumentPosition TextDocumentPosition { get; set; }
}
}
}