Update for extension XLFs (#14650)

* updates to extension XLFs

* revert dacpac

* dacpac updated
This commit is contained in:
Alex Ma
2021-03-12 10:55:23 -08:00
committed by GitHub
parent f83925ce38
commit 8acc28e344
8 changed files with 5527 additions and 2530 deletions

View File

@@ -73,6 +73,10 @@
<trans-unit id="title.configurePython">
<source xml:lang="en">Configure Python for Notebooks</source>
</trans-unit>
<trans-unit id="title.openClusterDashboard">
<source xml:lang="en">Cluster
Dashboard</source>
</trans-unit>
<trans-unit id="title.searchServers">
<source xml:lang="en">Search: Servers</source>
</trans-unit>
@@ -88,12 +92,27 @@
<trans-unit id="title.showLogFile">
<source xml:lang="en">Show Log File</source>
</trans-unit>
<trans-unit id="mssql.disabled">
<source xml:lang="en">Disabled</source>
</trans-unit>
<trans-unit id="mssql.enabled">
<source xml:lang="en">Enabled</source>
</trans-unit>
<trans-unit id="mssql.exportNotebookToSql">
<source xml:lang="en">Export Notebook as SQL</source>
</trans-unit>
<trans-unit id="mssql.exportSqlAsNotebook">
<source xml:lang="en">Export SQL as Notebook</source>
</trans-unit>
<trans-unit id="mssql.configuration.title">
<source xml:lang="en">MSSQL configuration</source>
</trans-unit>
<trans-unit id="mssql.query.displayBitAsNumber">
<source xml:lang="en">Should BIT columns be displayed as numbers (1 or 0)? If false, BIT columns will be displayed as 'true' or 'false'</source>
</trans-unit>
<trans-unit id="mssql.query.maxXmlCharsToStore">
<source xml:lang="en">Number of XML characters to store after running a query</source>
</trans-unit>
<trans-unit id="mssql.format.alignColumnDefinitionsInColumns">
<source xml:lang="en">Should column definitions be aligned?</source>
</trans-unit>
@@ -121,7 +140,91 @@
<trans-unit id="mssql.logFilesRemovalLimit">
<source xml:lang="en">Maximum number of old files to remove upon startup that have expired mssql.logRetentionMinutes. Files that do not get cleaned up due to this limitation get cleaned up next time Azure Data Studio starts up.</source>
</trans-unit>
<trans-unit id="ignorePlatformWarning">
<trans-unit id="mssql.intelliSense.enableIntelliSense">
<source xml:lang="en">Should IntelliSense be enabled</source>
</trans-unit>
<trans-unit id="mssql.intelliSense.enableErrorChecking">
<source xml:lang="en">Should IntelliSense error checking be enabled</source>
</trans-unit>
<trans-unit id="mssql.intelliSense.enableSuggestions">
<source xml:lang="en">Should IntelliSense suggestions be enabled</source>
</trans-unit>
<trans-unit id="mssql.intelliSense.enableQuickInfo">
<source xml:lang="en">Should IntelliSense quick info be enabled</source>
</trans-unit>
<trans-unit id="mssql.intelliSense.lowerCaseSuggestions">
<source xml:lang="en">Should IntelliSense suggestions be lowercase</source>
</trans-unit>
<trans-unit id="mssql.query.setRowCount">
<source xml:lang="en">Maximum number of rows to return before the server stops processing your query.</source>
</trans-unit>
<trans-unit id="mssql.query.textSize">
<source xml:lang="en">Maximum size of text and ntext data returned from a SELECT statement</source>
</trans-unit>
<trans-unit id="mssql.query.executionTimeout">
<source xml:lang="en">An execution time-out of 0 indicates an unlimited wait (no time-out)</source>
</trans-unit>
<trans-unit id="mssql.query.noCount">
<source xml:lang="en">Enable SET NOCOUNT option</source>
</trans-unit>
<trans-unit id="mssql.query.noExec">
<source xml:lang="en">Enable SET NOEXEC option</source>
</trans-unit>
<trans-unit id="mssql.query.parseOnly">
<source xml:lang="en">Enable SET PARSEONLY option</source>
</trans-unit>
<trans-unit id="mssql.query.arithAbort">
<source xml:lang="en">Enable SET ARITHABORT option</source>
</trans-unit>
<trans-unit id="mssql.query.statisticsTime">
<source xml:lang="en">Enable SET STATISTICS TIME option</source>
</trans-unit>
<trans-unit id="mssql.query.statisticsIO">
<source xml:lang="en">Enable SET STATISTICS IO option</source>
</trans-unit>
<trans-unit id="mssql.query.xactAbortOn">
<source xml:lang="en">Enable SET XACT_ABORT ON option</source>
</trans-unit>
<trans-unit id="mssql.query.transactionIsolationLevel">
<source xml:lang="en">Enable SET TRANSACTION ISOLATION LEVEL option</source>
</trans-unit>
<trans-unit id="mssql.query.deadlockPriority">
<source xml:lang="en">Enable SET DEADLOCK_PRIORITY option</source>
</trans-unit>
<trans-unit id="mssql.query.lockTimeout">
<source xml:lang="en">Enable SET LOCK TIMEOUT option (in milliseconds)</source>
</trans-unit>
<trans-unit id="mssql.query.queryGovernorCostLimit">
<source xml:lang="en">Enable SET QUERY_GOVERNOR_COST_LIMIT</source>
</trans-unit>
<trans-unit id="mssql.query.ansiDefaults">
<source xml:lang="en">Enable SET ANSI_DEFAULTS</source>
</trans-unit>
<trans-unit id="mssql.query.quotedIdentifier">
<source xml:lang="en">Enable SET QUOTED_IDENTIFIER</source>
</trans-unit>
<trans-unit id="mssql.query.ansiNullDefaultOn">
<source xml:lang="en">Enable SET ANSI_NULL_DFLT_ON</source>
</trans-unit>
<trans-unit id="mssql.query.implicitTransactions">
<source xml:lang="en">Enable SET IMPLICIT_TRANSACTIONS</source>
</trans-unit>
<trans-unit id="mssql.query.cursorCloseOnCommit">
<source xml:lang="en">Enable SET CURSOR_CLOSE_ON_COMMIT</source>
</trans-unit>
<trans-unit id="mssql.query.ansiPadding">
<source xml:lang="en">Enable SET ANSI_PADDING</source>
</trans-unit>
<trans-unit id="mssql.query.ansiWarnings">
<source xml:lang="en">Enable SET ANSI_WARNINGS</source>
</trans-unit>
<trans-unit id="mssql.query.ansiNulls">
<source xml:lang="en">Enable SET ANSI_NULLS</source>
</trans-unit>
<trans-unit id="mssql.query.alwaysEncryptedParameterization">
<source xml:lang="en">Enable Parameterization for Always Encrypted</source>
</trans-unit>
<trans-unit id="mssql.ignorePlatformWarning">
<source xml:lang="en">[Optional] Do not show unsupported platform warnings</source>
</trans-unit>
<trans-unit id="onprem.databaseProperties.recoveryModel">
@@ -242,10 +345,28 @@
<source xml:lang="en">The SQL Server language record name</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.columnEncryptionSetting.displayName">
<source xml:lang="en">Column encryption</source>
<source xml:lang="en">Always Encrypted</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.columnEncryptionSetting.description">
<source xml:lang="en">Default column encryption setting for all the commands on the connection</source>
<source xml:lang="en">Enables or disables Always Encrypted for the connection</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.enclaveAttestationProtocol.displayName">
<source xml:lang="en">Attestation Protocol</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.enclaveAttestationProtocol.description">
<source xml:lang="en">Specifies a protocol for attesting a server-side enclave used with Always Encrypted with secure enclaves</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.enclaveAttestationProtocol.categoryValues.AAS">
<source xml:lang="en">Azure Attestation</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.enclaveAttestationProtocol.categoryValues.HGS">
<source xml:lang="en">Host Guardian Service</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.enclaveAttestationUrl.displayName">
<source xml:lang="en">Enclave Attestation URL</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.enclaveAttestationUrl.description">
<source xml:lang="en">Specifies an endpoint for attesting a server-side enclave used with Always Encrypted with secure enclaves</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.encrypt.displayName">
<source xml:lang="en">Encrypt</source>
@@ -362,7 +483,22 @@
<source xml:lang="en">Type system version</source>
</trans-unit>
<trans-unit id="mssql.connectionOptions.typeSystemVersion.description">
<source xml:lang="en">Indicates which server type system then provider will expose through the DataReader</source>
<source xml:lang="en">Indicates which server type system the provider will expose through the DataReader</source>
</trans-unit>
<trans-unit id="databasesListProperties.name">
<source xml:lang="en">Name</source>
</trans-unit>
<trans-unit id="databasesListProperties.status">
<source xml:lang="en">Status</source>
</trans-unit>
<trans-unit id="databasesListProperties.size">
<source xml:lang="en">Size (MB)</source>
</trans-unit>
<trans-unit id="databasesListProperties.lastBackup">
<source xml:lang="en">Last backup</source>
</trans-unit>
<trans-unit id="objectsListProperties.name">
<source xml:lang="en">Name</source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/localizedConstants" source-language="en" datatype="plaintext"><body>
@@ -381,6 +517,12 @@
<trans-unit id="mssql.ownerPostfix">
<source xml:lang="en"> - Owner</source>
</trans-unit>
<trans-unit id="mssql.owner">
<source xml:lang="en">Owner</source>
</trans-unit>
<trans-unit id="mssql.group">
<source xml:lang="en">Group</source>
</trans-unit>
<trans-unit id="mssql.owningGroupPostfix">
<source xml:lang="en"> - Owning Group</source>
</trans-unit>
@@ -403,7 +545,7 @@
<source xml:lang="en">Delete</source>
</trans-unit>
<trans-unit id="mssql.stickyHeader">
<source xml:lang="en">Sticky</source>
<source xml:lang="en">Sticky Bit</source>
</trans-unit>
<trans-unit id="mssql.inheritDefaultsLabel">
<source xml:lang="en">Inherit Defaults</source>
@@ -429,6 +571,12 @@
<trans-unit id="mssql.namedUsersAndGroups">
<source xml:lang="en">Named Users and Groups</source>
</trans-unit>
<trans-unit id="mssql.defaultUserAndGroups">
<source xml:lang="en">Default User and Groups</source>
</trans-unit>
<trans-unit id="mssql.userOrGroupIcon">
<source xml:lang="en">User or Group Icon</source>
</trans-unit>
<trans-unit id="mssql.apply">
<source xml:lang="en">Apply</source>
</trans-unit>
@@ -438,45 +586,48 @@
<trans-unit id="mssql.errorApplyingAclChanges">
<source xml:lang="en">Unexpected error occurred while applying changes : {0}</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_LocalFileDestinationHint">
<trans-unit id="sparkJobSubmission.LocalFileDestinationHint">
<source xml:lang="en">Local file will be uploaded to HDFS. </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_SubmissionEndMessage">
<trans-unit id="sparkJobSubmission.SubmissionEndMessage">
<source xml:lang="en">.......................... Submit Spark Job End ............................</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_PrepareUploadingFile">
<trans-unit id="sparkJobSubmission.PrepareUploadingFile">
<source xml:lang="en">Uploading file from local {0} to HDFS folder: {1}</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_UploadingFileSucceeded">
<trans-unit id="sparkJobSubmission.UploadingFileSucceeded">
<source xml:lang="en">Upload file to cluster Succeeded!</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_UploadingFileFailed">
<trans-unit id="sparkJobSubmission.UploadingFileFailed">
<source xml:lang="en">Upload file to cluster Failed. {0}</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_PrepareSubmitJob">
<trans-unit id="sparkJobSubmission.PrepareSubmitJob">
<source xml:lang="en">Submitting job {0} ... </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_SubmitJobFinished">
<trans-unit id="sparkJobSubmission.SubmitJobFinished">
<source xml:lang="en">The Spark Job has been submitted.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_SubmitJobFailed">
<trans-unit id="sparkJobSubmission.SubmitJobFailed">
<source xml:lang="en">Spark Job Submission Failed. {0} </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_YarnUIMessage">
<trans-unit id="sparkJobSubmission.YarnUIMessage">
<source xml:lang="en">YarnUI Url: {0} </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_SparkHistoryLinkMessage">
<trans-unit id="sparkJobSubmission.SparkHistoryLinkMessage">
<source xml:lang="en">Spark History Url: {0} </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_GetApplicationIdFailed">
<trans-unit id="sparkJobSubmission.GetApplicationIdFailed">
<source xml:lang="en">Get Application Id Failed. {0}</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_LocalFileNotExisted">
<trans-unit id="sparkJobSubmission.LocalFileNotExisted">
<source xml:lang="en">Local file {0} does not existed. </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_NoSqlBigDataClusterFound">
<trans-unit id="sparkJobSubmission.NoSqlBigDataClusterFound">
<source xml:lang="en">No SQL Server Big Data Cluster found.</source>
</trans-unit>
<trans-unit id="sparkConnectionRequired">
<source xml:lang="en">Please connect to the Spark cluster before View {0} History.</source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/objectExplorerNodeProvider/fileSources" source-language="en" datatype="plaintext"><body>
<trans-unit id="maxSizeNotice">
@@ -501,13 +652,27 @@
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionService" source-language="en" datatype="plaintext"><body>
<trans-unit id="sparkJobSubmission_LivyNoBatchIdReturned">
<trans-unit id="sparkJobSubmission.LivyNoBatchIdReturned">
<source xml:lang="en">No Spark job batch id is returned from response.{0}[Error] {1}</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_LivyNoLogReturned">
<trans-unit id="sparkJobSubmission.LivyNoLogReturned">
<source xml:lang="en">No log is returned within response.{0}[Error] {1}</source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/sqlClusterLookUp" source-language="en" datatype="plaintext"><body>
<trans-unit id="promptBDCUsername">
<source xml:lang="en">{0}Please provide the username to connect to the BDC Controller:</source>
</trans-unit>
<trans-unit id="promptBDCPassword">
<source xml:lang="en">Please provide the password to connect to the BDC Controller</source>
</trans-unit>
<trans-unit id="bdcConnectError">
<source xml:lang="en">Error: {0}. </source>
</trans-unit>
<trans-unit id="usernameAndPasswordRequired">
<source xml:lang="en">Username and password are required</source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/objectExplorerNodeProvider/hdfsCommands" source-language="en" datatype="plaintext"><body>
<trans-unit id="allFiles">
<source xml:lang="en">All Files</source>
@@ -622,14 +787,20 @@
<trans-unit id="msgSampleCodeDataFrame">
<source xml:lang="en">This sample code loads the file into a data frame and shows the first 10 results.</source>
</trans-unit>
<trans-unit id="mssql.errorConvertingToNotebook">
<source xml:lang="en">An error occurred converting the SQL document to a Notebook. Error : {0}</source>
</trans-unit>
<trans-unit id="mssql.errorConvertingToSQL">
<source xml:lang="en">An error occurred converting the Notebook document to SQL. Error : {0}</source>
</trans-unit>
<trans-unit id="notebookFileType">
<source xml:lang="en">Notebooks</source>
</trans-unit>
<trans-unit id="unsupportedFileType">
<source xml:lang="en">Only .ipynb Notebooks are supported</source>
</trans-unit>
<trans-unit id="fileNotFound">
<source xml:lang="en">Unable to find the file specified</source>
<trans-unit id="noController">
<source xml:lang="en">Could not find the controller endpoint for this instance</source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/hdfs/hdfsModel" source-language="en" datatype="plaintext"><body>
@@ -658,136 +829,181 @@
<trans-unit id="selectOtherServer">
<source xml:lang="en">Select other SQL Server</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_PleaseSelectSqlWithCluster">
<trans-unit id="sparkJobSubmission.PleaseSelectSqlWithCluster">
<source xml:lang="en">Please select SQL Server with Big Data Cluster.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_NoSqlSelected">
<trans-unit id="sparkJobSubmission.NoSqlSelected">
<source xml:lang="en">No SQL Server is selected.</source>
</trans-unit>
<trans-unit id="errorNotSqlBigDataCluster">
<source xml:lang="en">The selected server does not belong to a SQL Server Big Data Cluster</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_GetFilePathFromSelectedNodeFailed">
<trans-unit id="sparkJobSubmission.GetFilePathFromSelectedNodeFailed">
<source xml:lang="en">Error Get File Path: {0}</source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionDialog" source-language="en" datatype="plaintext"><body>
<trans-unit id="sparkJobSubmission_SparkJobSubmissionDialogInitializeError">
<trans-unit id="sparkJobSubmission.SparkJobSubmissionDialogInitializeError">
<source xml:lang="en">Parameters for SparkJobSubmissionDialog is illegal</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_DialogTitleNewJob">
<trans-unit id="sparkJobSubmission.DialogTitleNewJob">
<source xml:lang="en">New Job</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_DialogCancelButton">
<trans-unit id="sparkJobSubmission.DialogCancelButton">
<source xml:lang="en">Cancel</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_DialogSubmitButton">
<trans-unit id="sparkJobSubmission.DialogSubmitButton">
<source xml:lang="en">Submit</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_SubmitSparkJob">
<trans-unit id="sparkJobSubmission.SubmitSparkJob">
<source xml:lang="en">{0} Spark Job Submission:</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_SubmissionStartMessage">
<trans-unit id="sparkJobSubmission.SubmissionStartMessage">
<source xml:lang="en">.......................... Submit Spark Job Start ..........................</source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionModel" source-language="en" datatype="plaintext"><body>
<trans-unit id="sparkJobSubmission_SparkJobSubmissionModelInitializeError">
<trans-unit id="sparkJobSubmission.SparkJobSubmissionModelInitializeError">
<source xml:lang="en">Parameters for SparkJobSubmissionModel is illegal</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_submissionArgsIsInvalid">
<trans-unit id="sparkJobSubmission.submissionArgsIsInvalid">
<source xml:lang="en">submissionArgs is invalid. </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_LivyBatchIdIsInvalid">
<trans-unit id="sparkJobSubmission.LivyBatchIdIsInvalid">
<source xml:lang="en">livyBatchId is invalid. </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_GetApplicationIdTimeOut">
<trans-unit id="sparkJobSubmission.GetApplicationIdTimeOut">
<source xml:lang="en">Get Application Id time out. {0}[Log] {1}</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_localFileOrFolderNotSpecified.">
<trans-unit id="sparkJobSubmission.localFileOrFolderNotSpecified.">
<source xml:lang="en">Property localFilePath or hdfsFolderPath is not specified. </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_PathNotSpecified.">
<trans-unit id="sparkJobSubmission.PathNotSpecified.">
<source xml:lang="en">Property Path is not specified. </source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/sparkFeature/dialog/sparkJobSubmission/sparkConfigurationTab" source-language="en" datatype="plaintext"><body>
<trans-unit id="sparkJobSubmission_GeneralTabName">
<trans-unit id="sparkJobSubmission.GeneralTabName">
<source xml:lang="en">GENERAL</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_JobNamePlaceHolder">
<trans-unit id="sparkJobSubmission.JobNamePlaceHolder">
<source xml:lang="en">Enter a name ...</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_JobName">
<trans-unit id="sparkJobSubmission.JobName">
<source xml:lang="en">Job Name</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_SparkCluster">
<trans-unit id="sparkJobSubmission.SparkCluster">
<source xml:lang="en">Spark Cluster</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_FilePathPlaceHolder">
<trans-unit id="sparkJobSubmission.FilePathPlaceHolder">
<source xml:lang="en">Path to a .jar or .py file</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_LocalFileDestinationHintWithPath">
<trans-unit id="sparkJobSubmission.LocalFileDestinationHintWithPath">
<source xml:lang="en">The selected local file will be uploaded to HDFS: {0}</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_MainFilePath">
<trans-unit id="sparkJobSubmission.MainFilePath">
<source xml:lang="en">JAR/py File</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_MainClass">
<trans-unit id="sparkJobSubmission.MainClass">
<source xml:lang="en">Main Class</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_Arguments">
<trans-unit id="sparkJobSubmission.Arguments">
<source xml:lang="en">Arguments</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_ArgumentsTooltip">
<trans-unit id="sparkJobSubmission.ArgumentsTooltip">
<source xml:lang="en">Command line arguments used in your main class, multiple arguments should be split by space.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_NotSpecifyJobName">
<trans-unit id="sparkJobSubmission.NotSpecifyJobName">
<source xml:lang="en">Property Job Name is not specified.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_NotSpecifyJARPYPath">
<trans-unit id="sparkJobSubmission.NotSpecifyJARPYPath">
<source xml:lang="en">Property JAR/py File is not specified.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_NotSpecifyMainClass">
<trans-unit id="sparkJobSubmission.NotSpecifyMainClass">
<source xml:lang="en">Property Main Class is not specified.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_HDFSFileNotExistedWithPath">
<trans-unit id="sparkJobSubmission.HDFSFileNotExistedWithPath">
<source xml:lang="en">{0} does not exist in Cluster or exception thrown. </source>
</trans-unit>
<trans-unit id="sparkJobSubmission_HDFSFileNotExisted">
<trans-unit id="sparkJobSubmission.HDFSFileNotExisted">
<source xml:lang="en">The specified HDFS file does not exist. </source>
</trans-unit>
<trans-unit id="sparkSelectLocalFile">
<source xml:lang="en">Select</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_SelectFileError">
<trans-unit id="sparkJobSubmission.SelectFileError">
<source xml:lang="en">Error in locating the file due to Error: {0}</source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/sparkFeature/dialog/sparkJobSubmission/sparkAdvancedTab" source-language="en" datatype="plaintext"><body>
<trans-unit id="sparkJobSubmission_AdvancedTabName">
<trans-unit id="sparkJobSubmission.AdvancedTabName">
<source xml:lang="en">ADVANCED</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_ReferenceJarList">
<trans-unit id="sparkJobSubmission.ReferenceJarList">
<source xml:lang="en">Reference Jars</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_ReferenceJarListToolTip">
<trans-unit id="sparkJobSubmission.ReferenceJarListToolTip">
<source xml:lang="en">Jars to be placed in executor working directory. The Jar path needs to be an HDFS Path. Multiple paths should be split by semicolon (;)</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_ReferencePyList">
<trans-unit id="sparkJobSubmission.ReferencePyList">
<source xml:lang="en">Reference py Files</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_ReferencePyListTooltip">
<trans-unit id="sparkJobSubmission.ReferencePyListTooltip">
<source xml:lang="en">Py Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_ReferenceFilesList">
<trans-unit id="sparkJobSubmission.ReferenceFilesList">
<source xml:lang="en">Reference Files</source>
</trans-unit>
<trans-unit id="sparkJobSubmission_ReferenceFilesListTooltip">
<trans-unit id="sparkJobSubmission.ReferenceFilesListTooltip">
<source xml:lang="en">Files to be placed in executor working directory. The file path needs to be an HDFS Path. Multiple paths should be split by semicolon(;)</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.driverMemory">
<source xml:lang="en">Driver Memory</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.driverMemoryTooltip">
<source xml:lang="en">Amount of memory to allocate to the driver. Specify units as part of value. Example 512M or 2G.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.driverCores">
<source xml:lang="en">Driver Cores</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.driverCoresTooltip">
<source xml:lang="en">Amount of CPU cores to allocate to the driver.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.executorMemory">
<source xml:lang="en">Executor Memory</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.executorMemoryTooltip">
<source xml:lang="en">Amount of memory to allocate to the executor. Specify units as part of value. Example 512M or 2G.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.executorCores">
<source xml:lang="en">Executor Cores</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.executorCoresTooltip">
<source xml:lang="en">Amount of CPU cores to allocate to the executor.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.executorCount">
<source xml:lang="en">Executor Count</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.executorCountTooltip">
<source xml:lang="en">Number of instances of the executor to run.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.queueName">
<source xml:lang="en">Queue Name</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.queueNameTooltip">
<source xml:lang="en">Name of the Spark queue to execute the session in.</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.configValues">
<source xml:lang="en">Configuration Values</source>
</trans-unit>
<trans-unit id="sparkJobSubmission.configValuesTooltip">
<source xml:lang="en">List of name value pairs containing Spark configuration values. Encoded as JSON dictionary. Example: '{"name":"value", "name2":"value2"}'.</source>
</trans-unit>
</body></file>
<file original="extensions/mssql/dist/objectExplorerNodeProvider/objectExplorerNodeProvider" source-language="en" datatype="plaintext"><body>
<trans-unit id="promptUsername">
<source xml:lang="en">Please provide the username to connect to HDFS:</source>
</trans-unit>
<trans-unit id="prmptPwd">
<source xml:lang="en">Please provide the password to connect to HDFS:</source>
</trans-unit>
@@ -901,5 +1117,22 @@
<trans-unit id="downloadServiceDoneChannelMsg">
<source xml:lang="en">Done installing {0}</source>
</trans-unit>
<trans-unit id="entryExtractedChannelMsg">
<source xml:lang="en">Extracted {0} ({1}/{2})</source>
</trans-unit>
</body></file>
</xliff>
<file original="extensions/mssql/dist/features" source-language="en" datatype="plaintext"><body>
<trans-unit id="mssql.missingLinkedAzureAccount">
<source xml:lang="en">Azure Data Studio needs to contact Azure Key Vault to access a column master key for Always Encrypted, but no linked Azure account is available. Please add a linked Azure account and retry the query.</source>
</trans-unit>
<trans-unit id="mssql.chooseLinkedAzureAccount">
<source xml:lang="en">Please select a linked Azure account:</source>
</trans-unit>
<trans-unit id="mssql.canceledLinkedAzureAccountSelection">
<source xml:lang="en">Azure Data Studio needs to contact Azure Key Vault to access a column master key for Always Encrypted, but no linked Azure account was selected. Please retry the query and select a linked Azure account when prompted.</source>
</trans-unit>
<trans-unit id="mssql.insufficientlyPrivelagedAzureAccount">
<source xml:lang="en">The configured Azure account for {0} does not have sufficient permissions for Azure Key Vault to access a column master key for Always Encrypted.</source>
</trans-unit>
</body></file>
</xliff>