mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-03-20 20:10:11 -04:00
* updated langpacks to be pushed in * bumped version number to 1.29.0 * patched german lang pack * patched spanish langpack * patched french langpack * italian langpack patched * Japanese langpack patched * Korean langpack patched * portuguese langpack patched * russian langpack patched * patched chinese simplified langpack and fixed changelog for ru and pt * patched chinese traditional langpack
422 lines
28 KiB
JSON
422 lines
28 KiB
JSON
{
|
||
"": [
|
||
"--------------------------------------------------------------------------------------------",
|
||
"Copyright (c) Microsoft Corporation. All rights reserved.",
|
||
"Licensed under the Source EULA. See License.txt in the project root for license information.",
|
||
"--------------------------------------------------------------------------------------------",
|
||
"Do not edit this file. It is machine generated."
|
||
],
|
||
"version": "1.0.0",
|
||
"contents": {
|
||
"package": {
|
||
"json.schemas.desc": "将架构关联到当前项目中的 JSON 文件",
|
||
"json.schemas.url.desc": "架构的 URL 或当前目录中架构的相对路径",
|
||
"json.schemas.fileMatch.desc": "将 JSON 文件解析到架构时用于匹配的一组文件模式。",
|
||
"json.schemas.fileMatch.item.desc": "将 JSON 文件解析到架构时用于匹配的可以包含 \"*\" 的文件模式。",
|
||
"json.schemas.schema.desc": "给定 URL 的架构定义。仅当要避免访问架构 URL 时需要提供架构。",
|
||
"json.format.enable.desc": "启用/禁用默认 JSON 格式化程序(需要重启)",
|
||
"mssqlCluster.uploadFiles": "上传文件",
|
||
"mssqlCluster.mkdir": "新建目录",
|
||
"mssqlCluster.deleteFiles": "删除",
|
||
"mssqlCluster.previewFile": "预览",
|
||
"mssqlCluster.saveFile": "保存",
|
||
"mssqlCluster.copyPath": "复制路径",
|
||
"mssqlCluster.manageAccess": "管理访问权限",
|
||
"notebook.command.new": "新建笔记本",
|
||
"notebook.command.open": "打开笔记本",
|
||
"tab.bigDataClusterDescription": "有关 SQL Server 大数据群集的任务和信息",
|
||
"title.bigDataCluster": "SQL Server 大数据群集",
|
||
"title.submitSparkJob": "提交 Spark 作业",
|
||
"title.newSparkJob": "新建 Spark 作业",
|
||
"title.openSparkHistory": "查看 Spark 历史记录",
|
||
"title.openYarnHistory": "查看 Yarn 历史记录",
|
||
"title.tasks": "任务",
|
||
"title.installPackages": "安装包",
|
||
"title.configurePython": "为笔记本配置 Python",
|
||
"title.openClusterDashboard": "群集\r\n仪表板",
|
||
"title.searchServers": "搜索: 服务器",
|
||
"title.clearSearchServerResult": "搜索: 清除搜索服务器结果",
|
||
"title.endpoints": "服务终结点",
|
||
"title.books": "笔记本",
|
||
"title.showLogFile": "显示日志文件",
|
||
"mssql.disabled": "已禁用",
|
||
"mssql.enabled": "已启用",
|
||
"mssql.exportNotebookToSql": "将笔记本导出为 SQL",
|
||
"mssql.exportSqlAsNotebook": "将 SQL 导出为笔记本",
|
||
"mssql.configuration.title": "MSSQL 配置",
|
||
"mssql.query.displayBitAsNumber": "是否应将 BIT 列显示为数字(1 或 0)? 若否,则 BIT 列将显示为 \"true\" 或 \"false\"",
|
||
"mssql.query.maxXmlCharsToStore": "运行查询后要存储的 XML 字符数",
|
||
"mssql.format.alignColumnDefinitionsInColumns": "列定义是否应对齐?",
|
||
"mssql.format.datatypeCasing": "数据类型应格式化为大写、小写还是无(不格式化)",
|
||
"mssql.format.keywordCasing": "关键字应格式化为大写、小写还是无(不格式化)",
|
||
"mssql.format.placeCommasBeforeNextStatement": "是否应将逗号置于列表中每个语句的开头(例如 \", mycolumn2\")而不是结尾(例如 \"mycolumn1,\")",
|
||
"mssql.format.placeSelectStatementReferencesOnNewLine": "select 语句中的对象引用是否应分到不同行中? 例如,在 \"SELECT C1, C2 FROM T1\" 中,C1 和 C2 是否将位于不同行",
|
||
"mssql.logDebugInfo": "[可选] 将调试输出记录到控制台(“查看”->“输出”),然后从下拉列表中选择相应的输出通道",
|
||
"mssql.tracingLevel": "[可选] 后端服务的日志级别。Azure Data Studio 在每次启动时都会生成文件名;如果文件已存在,日志条目将追加到该文件。有关旧日志文件的清理,请参阅 logRetentionMinutes 和 logFilesRemovalLimit 设置。默认 tracingLevel 记录的内容很少。更改详细级别可能提升日志的记录和磁盘空间要求。“错误”包含“严重”,“警告”包含“错误”,“信息”包含“警告”,“详细级别”包含“信息”",
|
||
"mssql.logRetentionMinutes": "将后端服务的日志文件保留的时长(分钟数)。默认为 1 周。",
|
||
"mssql.logFilesRemovalLimit": "启动时要删除的已超过 mssql.logRetentionMinutes 的旧文件的最大数量。将在下次启动 Azure Data Studio 时清理由于此限制而未清理的文件。",
|
||
"mssql.intelliSense.enableIntelliSense": "是否应启用 IntelliSense",
|
||
"mssql.intelliSense.enableErrorChecking": "是否应启用 IntelliSense 错误检查",
|
||
"mssql.intelliSense.enableSuggestions": "是否应启用 IntelliSense 建议",
|
||
"mssql.intelliSense.enableQuickInfo": "是否应启用 IntelliSense 快速信息",
|
||
"mssql.intelliSense.lowerCaseSuggestions": "是否小写显示 IntelliSense 建议",
|
||
"mssql.query.setRowCount": "在服务器停止处理查询之前要返回的最大行数。",
|
||
"mssql.query.textSize": "从 SELECT 语句返回的 text 和 ntext 数据的大小上限",
|
||
"mssql.query.executionTimeout": "如果执行超时为 0,则表示无限等待(不会超时)",
|
||
"mssql.query.noCount": "启用 SET NOCOUNT 选项",
|
||
"mssql.query.noExec": "启用 SET NOEXEC 选项",
|
||
"mssql.query.parseOnly": "启用 SET PARSEONLY 选项",
|
||
"mssql.query.arithAbort": "启用 SET ARITHABORT 选项",
|
||
"mssql.query.statisticsTime": "启用 SET STATISTICS TIME 选项",
|
||
"mssql.query.statisticsIO": "启用 SET STATISTICS IO 选项",
|
||
"mssql.query.xactAbortOn": "启用 SET XACT_ABORT ON 选项",
|
||
"mssql.query.transactionIsolationLevel": "启用 SET TRANSACTION ISOLATION LEVEL 选项",
|
||
"mssql.query.deadlockPriority": "启用 SET DEADLOCK_PRIORITY 选项",
|
||
"mssql.query.lockTimeout": "启用 SET LOCK TIMEOUT 选项(毫秒)",
|
||
"mssql.query.queryGovernorCostLimit": "启用 SET QUERY_GOVERNOR_COST_LIMIT",
|
||
"mssql.query.ansiDefaults": "启用 SET ANSI_DEFAULTS",
|
||
"mssql.query.quotedIdentifier": "启用 SET QUOTED_IDENTIFIER",
|
||
"mssql.query.ansiNullDefaultOn": "启用 SET ANSI_NULL_DFLT_ON",
|
||
"mssql.query.implicitTransactions": "启用 SET IMPLICIT_TRANSACTIONS",
|
||
"mssql.query.cursorCloseOnCommit": "启用 SET CURSOR_CLOSE_ON_COMMIT",
|
||
"mssql.query.ansiPadding": "启用 SET ANSI_PADDING",
|
||
"mssql.query.ansiWarnings": "启用 SET ANSI_WARNINGS",
|
||
"mssql.query.ansiNulls": "启用 SET ANSI_NULLS",
|
||
"mssql.query.alwaysEncryptedParameterization": "启用 Always Encrypted 参数化",
|
||
"mssql.ignorePlatformWarning": "[可选] 不显示不受支持的平台警告",
|
||
"onprem.databaseProperties.recoveryModel": "恢复模式",
|
||
"onprem.databaseProperties.lastBackupDate": "上次数据库备份",
|
||
"onprem.databaseProperties.lastLogBackupDate": "上次日志备份",
|
||
"onprem.databaseProperties.compatibilityLevel": "兼容级别",
|
||
"onprem.databaseProperties.owner": "所有者",
|
||
"onprem.serverProperties.serverVersion": "版本",
|
||
"onprem.serverProperties.serverEdition": "版本",
|
||
"onprem.serverProperties.machineName": "计算机名",
|
||
"onprem.serverProperties.osVersion": "OS 版本",
|
||
"cloud.databaseProperties.azureEdition": "版本",
|
||
"cloud.databaseProperties.serviceLevelObjective": "定价层",
|
||
"cloud.databaseProperties.compatibilityLevel": "兼容级别",
|
||
"cloud.databaseProperties.owner": "所有者",
|
||
"cloud.serverProperties.serverVersion": "版本",
|
||
"cloud.serverProperties.serverEdition": "类型",
|
||
"mssql.provider.displayName": "Microsoft SQL Server",
|
||
"mssql.connectionOptions.connectionName.displayName": "名称(可选)",
|
||
"mssql.connectionOptions.connectionName.description": "连接的自定义名称",
|
||
"mssql.connectionOptions.serverName.displayName": "服务器",
|
||
"mssql.connectionOptions.serverName.description": "SQL Server 实例的名称",
|
||
"mssql.connectionOptions.databaseName.displayName": "数据库",
|
||
"mssql.connectionOptions.databaseName.description": "数据源中初始目录或数据库的名称",
|
||
"mssql.connectionOptions.authType.displayName": "身份验证类型",
|
||
"mssql.connectionOptions.authType.description": "指定使用 SQL Server 进行身份验证的方法",
|
||
"mssql.connectionOptions.authType.categoryValues.sqlLogin": "SQL 登录",
|
||
"mssql.connectionOptions.authType.categoryValues.integrated": "Windows 身份验证",
|
||
"mssql.connectionOptions.authType.categoryValues.azureMFA": "Azure Active Directory - 支持 MFA 的通用目录",
|
||
"mssql.connectionOptions.userName.displayName": "用户名",
|
||
"mssql.connectionOptions.userName.description": "指示连接到数据源时使用的用户 ID",
|
||
"mssql.connectionOptions.password.displayName": "密码",
|
||
"mssql.connectionOptions.password.description": "指示连接到数据源时使用的密码",
|
||
"mssql.connectionOptions.applicationIntent.displayName": "应用意图",
|
||
"mssql.connectionOptions.applicationIntent.description": "声明连接到服务器时的应用程序工作负荷类型",
|
||
"mssql.connectionOptions.asynchronousProcessing.displayName": "异步处理",
|
||
"mssql.connectionOptions.asynchronousProcessing.description": "如果为 true,则允许在 .Net Framework 数据提供程序中使用异步功能",
|
||
"mssql.connectionOptions.connectTimeout.displayName": "连接超时",
|
||
"mssql.connectionOptions.connectTimeout.description": "在终止尝试并生成错误之前等待连接到服务器的时长(秒)",
|
||
"mssql.connectionOptions.currentLanguage.displayName": "当前语言",
|
||
"mssql.connectionOptions.currentLanguage.description": "SQL Server 语言记录名称",
|
||
"mssql.connectionOptions.columnEncryptionSetting.displayName": "Always Encrypted",
|
||
"mssql.connectionOptions.columnEncryptionSetting.description": "启用或禁用连接的 Always Encrypted 功能",
|
||
"mssql.connectionOptions.enclaveAttestationProtocol.displayName": "证明协议",
|
||
"mssql.connectionOptions.enclaveAttestationProtocol.description": "指定用于证明与具有安全 enclave 的 Always Encrypted 结合使用的服务器端 enclave 的协议",
|
||
"mssql.connectionOptions.enclaveAttestationProtocol.categoryValues.AAS": "Azure 证明",
|
||
"mssql.connectionOptions.enclaveAttestationProtocol.categoryValues.HGS": "主机保护者服务",
|
||
"mssql.connectionOptions.enclaveAttestationUrl.displayName": "Enclave 证明 URL",
|
||
"mssql.connectionOptions.enclaveAttestationUrl.description": "指定用于证明与具有安全 enclave 的 Always Encrypted 结合使用的服务器端 enclave 的终结点",
|
||
"mssql.connectionOptions.encrypt.displayName": "加密",
|
||
"mssql.connectionOptions.encrypt.description": "当为 true 时,如果服务器安装了证书,则 SQL Server 对客户端与服务器之间发送的所有数据使用 SSL 加密",
|
||
"mssql.connectionOptions.persistSecurityInfo.displayName": "持久安全信息",
|
||
"mssql.connectionOptions.persistSecurityInfo.description": "为 false 时,安全敏感信息(如密码)不作为连接的一部分返回",
|
||
"mssql.connectionOptions.trustServerCertificate.displayName": "信任服务器证书",
|
||
"mssql.connectionOptions.trustServerCertificate.description": "为 true(且 encrypt=true)时,SQL Server 对客户端与服务器之间发送的所有数据使用 SSL 加密,而无需验证服务器证书",
|
||
"mssql.connectionOptions.attachedDBFileName.displayName": "附加的 DB 文件名",
|
||
"mssql.connectionOptions.attachedDBFileName.description": "可附加数据库的主文件的名称(包括完整路径名称)",
|
||
"mssql.connectionOptions.contextConnection.displayName": "上下文连接",
|
||
"mssql.connectionOptions.contextConnection.description": "为 true 时,指示连接应来自 SQL Server 上下文。仅在 SQL Server 进程中运行时可用",
|
||
"mssql.connectionOptions.port.displayName": "端口",
|
||
"mssql.connectionOptions.connectRetryCount.displayName": "连接重试次数",
|
||
"mssql.connectionOptions.connectRetryCount.description": "尝试还原连接的次数",
|
||
"mssql.connectionOptions.connectRetryInterval.displayName": "连接重试间隔",
|
||
"mssql.connectionOptions.connectRetryInterval.description": "两次尝试恢复连接之间的延迟",
|
||
"mssql.connectionOptions.applicationName.displayName": "应用程序名称",
|
||
"mssql.connectionOptions.applicationName.description": "应用程序的名称",
|
||
"mssql.connectionOptions.workstationId.displayName": "工作站 ID",
|
||
"mssql.connectionOptions.workstationId.description": "连接到 SQL Server 的工作站的名称",
|
||
"mssql.connectionOptions.pooling.displayName": "池",
|
||
"mssql.connectionOptions.pooling.description": "为 true 时,从相应的池中取出连接对象,或根据需要创建连接对象并将其添加到相应的池中",
|
||
"mssql.connectionOptions.maxPoolSize.displayName": "最大池大小",
|
||
"mssql.connectionOptions.maxPoolSize.description": "池中允许的最大连接数",
|
||
"mssql.connectionOptions.minPoolSize.displayName": "最小池大小",
|
||
"mssql.connectionOptions.minPoolSize.description": "池中允许的最小连接数",
|
||
"mssql.connectionOptions.loadBalanceTimeout.displayName": "负载均衡超时",
|
||
"mssql.connectionOptions.loadBalanceTimeout.description": "此连接在销毁前在池中生存的最短时间(秒)",
|
||
"mssql.connectionOptions.replication.displayName": "副本",
|
||
"mssql.connectionOptions.replication.description": "由 SQL Server 在复制中使用",
|
||
"mssql.connectionOptions.attachDbFilename.displayName": "附加 DB 文件名",
|
||
"mssql.connectionOptions.failoverPartner.displayName": "故障转移伙伴",
|
||
"mssql.connectionOptions.failoverPartner.description": "充当故障转移伙伴的 SQL Server 实例的名称或网络地址",
|
||
"mssql.connectionOptions.multiSubnetFailover.displayName": "多子网故障转移",
|
||
"mssql.connectionOptions.multipleActiveResultSets.displayName": "多重活动结果集",
|
||
"mssql.connectionOptions.multipleActiveResultSets.description": "为 true 时,可从一个连接返回和读取多个结果集",
|
||
"mssql.connectionOptions.packetSize.displayName": "数据包大小",
|
||
"mssql.connectionOptions.packetSize.description": "用于与 SQL Server 实例通信的网络数据包的大小(字节数)",
|
||
"mssql.connectionOptions.typeSystemVersion.displayName": "类型系统版本",
|
||
"mssql.connectionOptions.typeSystemVersion.description": "指示提供程序将通过 DataReader 公开的服务器类型系统",
|
||
"databasesListProperties.name": "名称",
|
||
"databasesListProperties.status": "状态",
|
||
"databasesListProperties.size": "大小(MB)",
|
||
"databasesListProperties.lastBackup": "上次备份",
|
||
"objectsListProperties.name": "名称"
|
||
},
|
||
"dist/localizedConstants": {
|
||
"msgMissingNodeContext": "调用了节点命令但未传递任何节点",
|
||
"mssql.manageAccessTitle": "管理访问权限",
|
||
"mssql.locationTitle": "位置:",
|
||
"mssql.permissionsTitle": "权限",
|
||
"mssql.ownerPostfix": " - 所有者",
|
||
"mssql.owner": "所有者",
|
||
"mssql.group": "组",
|
||
"mssql.owningGroupPostfix": " - 责任组",
|
||
"mssql.everyone": "其他所有人",
|
||
"mssql.userLabel": "用户",
|
||
"mssql.groupLabel": "组",
|
||
"mssql.accessHeader": "访问",
|
||
"mssql.defaultHeader": "默认",
|
||
"mssql.delete": "删除",
|
||
"mssql.stickyHeader": "粘滞位",
|
||
"mssql.inheritDefaultsLabel": "继承默认值",
|
||
"mssql.readHeader": "读取",
|
||
"mssql.writeHeader": "写入",
|
||
"mssql.executeHeader": "执行",
|
||
"mssql.addUserOrGroup": "添加用户或组",
|
||
"mssql.enterNamePlaceholder": "输入名称",
|
||
"mssql.addLabel": "添加",
|
||
"mssql.namedUsersAndGroups": "命名用户和组",
|
||
"mssql.defaultUserAndGroups": "默认用户和组",
|
||
"mssql.userOrGroupIcon": "用户或组图标",
|
||
"mssql.apply": "应用",
|
||
"mssql.applyRecursively": "递归应用",
|
||
"mssql.errorApplyingAclChanges": "应用更改时发生意外错误: {0}",
|
||
"sparkJobSubmission.LocalFileDestinationHint": "本地文件将上传到 HDFS。",
|
||
"sparkJobSubmission.SubmissionEndMessage": "…………………….. 提交 Spark 作业结束 ……………………….",
|
||
"sparkJobSubmission.PrepareUploadingFile": "正在将文件从本地 {0} 上传到 HDFS 文件夹: {1}",
|
||
"sparkJobSubmission.UploadingFileSucceeded": "已将文件成功上传到群集!",
|
||
"sparkJobSubmission.UploadingFileFailed": "将文件上传到群集失败。{0}",
|
||
"sparkJobSubmission.PrepareSubmitJob": "正在提交作业 {0}…",
|
||
"sparkJobSubmission.SubmitJobFinished": "已提交 Spark 作业。",
|
||
"sparkJobSubmission.SubmitJobFailed": "Spark 作业提交失败。{0}",
|
||
"sparkJobSubmission.YarnUIMessage": "YarnUI URL: {0}",
|
||
"sparkJobSubmission.SparkHistoryLinkMessage": "Spark 历史记录 URL: {0}",
|
||
"sparkJobSubmission.GetApplicationIdFailed": "获取应用程序 ID 失败。{0}",
|
||
"sparkJobSubmission.LocalFileNotExisted": "本地文件 {0} 不存在。",
|
||
"sparkJobSubmission.NoSqlBigDataClusterFound": "未找到 SQL Server 大数据群集。",
|
||
"sparkConnectionRequired": "请先连接到 Spark 群集,然后才能查看 {0} 历史记录。"
|
||
},
|
||
"dist/objectExplorerNodeProvider/fileSources": {
|
||
"maxSizeNotice": "注意: 此文件已在 {0} 处截断以供预览。",
|
||
"maxSizeReached": "文件已在 {0} 处截断以供预览。"
|
||
},
|
||
"dist/objectExplorerNodeProvider/command": {
|
||
"progress": "$(sync~spin) {0}…",
|
||
"cancelTooltip": "取消",
|
||
"cancel": "是否取消操作?",
|
||
"mssql.searchServers": "搜索服务器名称"
|
||
},
|
||
"dist/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionService": {
|
||
"sparkJobSubmission.LivyNoBatchIdReturned": "未从响应中返回 Spark 作业批次 ID。{0}[错误] {1}",
|
||
"sparkJobSubmission.LivyNoLogReturned": "未从响应中返回日志。{0}[错误] {1}"
|
||
},
|
||
"dist/sqlClusterLookUp": {
|
||
"promptBDCUsername": "{0}请提供用户名以连接到 BDC 控制器:",
|
||
"promptBDCPassword": "请提供密码以连接到 BDC 控制器",
|
||
"bdcConnectError": "错误: {0}。",
|
||
"usernameAndPasswordRequired": "用户名和密码是必填项"
|
||
},
|
||
"dist/objectExplorerNodeProvider/hdfsCommands": {
|
||
"allFiles": "所有文件",
|
||
"lblUploadFiles": "上传",
|
||
"uploading": "正在将文件上传到 HDFS",
|
||
"uploadCanceled": "上传操作已取消",
|
||
"uploadError": "上传文件时出错: {0}",
|
||
"makingDir": "正在创建目录",
|
||
"mkdirCanceled": "操作已取消",
|
||
"mkDirError": "创建目录时出错: {0}",
|
||
"enterDirName": "输入目录名称",
|
||
"deleteError": "删除文件时出错: {0}",
|
||
"msgDeleteFolder": "确定要删除此文件夹及其内容吗?",
|
||
"msgDeleteFile": "确定要删除此文件吗?",
|
||
"saving": "正在保存 HDFS 文件",
|
||
"saveCanceled": "保存操作已取消",
|
||
"saveError": "保存文件时出错: {0}",
|
||
"previewing": "正在生成预览",
|
||
"previewError": "预览文件时出错: {0}",
|
||
"copyPathError": "复制路径时出错: {0}",
|
||
"manageAccessError": "打开“管理访问权限”对话框时发生意外错误: {0}"
|
||
},
|
||
"dist/hdfs/webhdfs": {
|
||
"webhdfs.invalidDataStructure": "数据结构无效",
|
||
"webhdfs.missingProperties": "由于缺少选项,无法创建 WebHDFS 客户端: ${0}",
|
||
"webhdfs.undefinedArgument": "“${0}”未定义。",
|
||
"webhdfs.httpError400": "错误的请求",
|
||
"webhdfs.httpError401": "未经授权",
|
||
"webhdfs.httpError403": "已禁止",
|
||
"webhdfs.httpError404": "未找到",
|
||
"webhdfs.httpError500": "内部服务器错误",
|
||
"webhdfs.unknownError": "未知错误",
|
||
"webhdfs.unexpectedRedirect": "意外的重定向"
|
||
},
|
||
"dist/objectExplorerNodeProvider/connection": {
|
||
"connectionInfoUndefined": "未定义 ConnectionInfo。",
|
||
"connectionInfoOptionsUndefined": "未定义 ConnectionInfo.options",
|
||
"connectionInfoOptionsMissingProperties": "connectionInfo.options 中缺少一些属性: {0}"
|
||
},
|
||
"dist/telemetry": {
|
||
"viewKnownIssuesText": "查看已知问题",
|
||
"serviceCrashMessage": "{0} 个组件意外退出。请重启 Azure Data Studio。"
|
||
},
|
||
"dist/main": {
|
||
"msgSampleCodeDataFrame": "此示例代码将文件加载到数据帧中,并显示前 10 个结果。",
|
||
"mssql.errorConvertingToNotebook": "将 SQL 文档转换为笔记本时出错。错误: {0}",
|
||
"mssql.errorConvertingToSQL": "将笔记本文档转换为 SQL 时出错。错误: {0}",
|
||
"notebookFileType": "笔记本",
|
||
"unsupportedFileType": "仅支持 .ipynb 笔记本",
|
||
"noController": "找不到此实例的控制器终结点"
|
||
},
|
||
"dist/hdfs/hdfsModel": {
|
||
"mssql.recursivePermissionOpStarted": "在“{0}”下以递归方式应用权限更改",
|
||
"mssql.recursivePermissionOpSucceeded": "已成功应用权限更改。",
|
||
"mssql.recursivePermissionOpProgress": "正在将权限更改应用于“{0}”。",
|
||
"mssql.recursivePermissionOpError": "应用权限更改时出错: {0}"
|
||
},
|
||
"dist/prompts/confirm": {
|
||
"msgYes": "是",
|
||
"msgNo": "否"
|
||
},
|
||
"dist/sparkFeature/dialog/dialogCommands": {
|
||
"selectOtherServer": "选择其他 SQL Server",
|
||
"sparkJobSubmission.PleaseSelectSqlWithCluster": "请选择具有大数据群集的 SQL Server。",
|
||
"sparkJobSubmission.NoSqlSelected": "未选择 SQL Server。",
|
||
"errorNotSqlBigDataCluster": "所选服务器不属于 SQL Server 大数据群集",
|
||
"sparkJobSubmission.GetFilePathFromSelectedNodeFailed": "获取文件路径时出错: {0}"
|
||
},
|
||
"dist/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionDialog": {
|
||
"sparkJobSubmission.SparkJobSubmissionDialogInitializeError": "不允许为 SparkJobSubmissionDialog 添加参数",
|
||
"sparkJobSubmission.DialogTitleNewJob": "新建作业",
|
||
"sparkJobSubmission.DialogCancelButton": "取消",
|
||
"sparkJobSubmission.DialogSubmitButton": "提交",
|
||
"sparkJobSubmission.SubmitSparkJob": "{0} Spark 作业提交:",
|
||
"sparkJobSubmission.SubmissionStartMessage": "…………………….. 提交 Spark 作业开始 …………………….."
|
||
},
|
||
"dist/sparkFeature/dialog/sparkJobSubmission/sparkJobSubmissionModel": {
|
||
"sparkJobSubmission.SparkJobSubmissionModelInitializeError": "不允许为 SparkJobSubmissionModel 添加参数",
|
||
"sparkJobSubmission.submissionArgsIsInvalid": "submissionArgs 无效。",
|
||
"sparkJobSubmission.LivyBatchIdIsInvalid": "livyBatchId 无效。",
|
||
"sparkJobSubmission.GetApplicationIdTimeOut": "获取应用程序 ID 超时。{0}[日志] {1}",
|
||
"sparkJobSubmission.localFileOrFolderNotSpecified.": "未指定 localFilePath 或 hdfsFolderPath 属性。",
|
||
"sparkJobSubmission.PathNotSpecified.": "未指定 Path 属性。"
|
||
},
|
||
"dist/sparkFeature/dialog/sparkJobSubmission/sparkConfigurationTab": {
|
||
"sparkJobSubmission.GeneralTabName": "常规",
|
||
"sparkJobSubmission.JobNamePlaceHolder": "输入名称…",
|
||
"sparkJobSubmission.JobName": "作业名称",
|
||
"sparkJobSubmission.SparkCluster": "Spark 群集",
|
||
"sparkJobSubmission.FilePathPlaceHolder": ".jar 或 .py 文件的路径",
|
||
"sparkJobSubmission.LocalFileDestinationHintWithPath": "所选本地文件将上传到 HDFS: {0}",
|
||
"sparkJobSubmission.MainFilePath": "JAR/py 文件",
|
||
"sparkJobSubmission.MainClass": "主类",
|
||
"sparkJobSubmission.Arguments": "参数",
|
||
"sparkJobSubmission.ArgumentsTooltip": "在主类中使用的命令行参数,多个参数应用空格隔开。",
|
||
"sparkJobSubmission.NotSpecifyJobName": "未指定属性作业名称。",
|
||
"sparkJobSubmission.NotSpecifyJARPYPath": "未指定属性 JAR/py 文件。",
|
||
"sparkJobSubmission.NotSpecifyMainClass": "未指定属性主类。",
|
||
"sparkJobSubmission.HDFSFileNotExistedWithPath": "群集中不存在 {0} 或引发了异常。",
|
||
"sparkJobSubmission.HDFSFileNotExisted": "指定的 HDFS 文件不存在。",
|
||
"sparkSelectLocalFile": "选择",
|
||
"sparkJobSubmission.SelectFileError": "由于错误导致查找文件时出错: {0}"
|
||
},
|
||
"dist/sparkFeature/dialog/sparkJobSubmission/sparkAdvancedTab": {
|
||
"sparkJobSubmission.AdvancedTabName": "高级",
|
||
"sparkJobSubmission.ReferenceJarList": "引用 Jar",
|
||
"sparkJobSubmission.ReferenceJarListToolTip": "要放置在执行程序工作目录中的 Jar。Jar 路径必须是 HDFS 路径。多个路径应用分号(;)隔开",
|
||
"sparkJobSubmission.ReferencePyList": "引用 py 文件",
|
||
"sparkJobSubmission.ReferencePyListTooltip": "要放置在执行程序工作目录中的 Py 文件。文件路径必须是 HDFS 路径。多个路径应按分号(;)隔开",
|
||
"sparkJobSubmission.ReferenceFilesList": "引用文件",
|
||
"sparkJobSubmission.ReferenceFilesListTooltip": "要放置在执行程序工作目录中的文件。文件路径必须是 HDFS 路径。多个路径应按分号(;)隔开",
|
||
"sparkJobSubmission.driverMemory": "驱动程序内存",
|
||
"sparkJobSubmission.driverMemoryTooltip": "要分配给驱动程序的内存量。指定单位作为值的一部分。示例: 512M 或 2G。",
|
||
"sparkJobSubmission.driverCores": "驱动程序核心数",
|
||
"sparkJobSubmission.driverCoresTooltip": "要分配给驱动程序的 CPU 核心数。",
|
||
"sparkJobSubmission.executorMemory": "执行程序内存",
|
||
"sparkJobSubmission.executorMemoryTooltip": "要分配给执行程序的内存量。指定单位作为值的一部分。示例: 512M 或 2G。",
|
||
"sparkJobSubmission.executorCores": "执行程序核心数",
|
||
"sparkJobSubmission.executorCoresTooltip": "要分配给执行程序的 CPU 核心数。",
|
||
"sparkJobSubmission.executorCount": "执行程序计数",
|
||
"sparkJobSubmission.executorCountTooltip": "要运行的执行程序实例数。",
|
||
"sparkJobSubmission.queueName": "队列名称",
|
||
"sparkJobSubmission.queueNameTooltip": "要在其中执行会话的 Spark 队列的名称。",
|
||
"sparkJobSubmission.configValues": "配置值",
|
||
"sparkJobSubmission.configValuesTooltip": "包含 Spark 配置值的名称值对列表。编码为 JSON 字典。示例: \"{\"name\":\"value\", \"name2\":\"value2\"}\"。"
|
||
},
|
||
"dist/objectExplorerNodeProvider/objectExplorerNodeProvider": {
|
||
"promptUsername": "请提供用户名以连接到 HDFS:",
|
||
"prmptPwd": "请提供密码以连接到 HDFS:",
|
||
"sessionNotFound": "节点 {0} 的会话不存在",
|
||
"notifyError": "通知节点更改时出错: {0}",
|
||
"hdfsFolder": "HDFS",
|
||
"rootLabel": "根"
|
||
},
|
||
"dist/objectExplorerNodeProvider/hdfsProvider": {
|
||
"errorExpanding": "错误: {0}",
|
||
"errDeleteConnectionNode": "无法删除连接。只能删除子文件夹和文件。"
|
||
},
|
||
"dist/objectExplorerNodeProvider/cancelableStream": {
|
||
"streamCanceled": "用户已取消流操作"
|
||
},
|
||
"dist/dashboard/serviceEndpoints": {
|
||
"grafana": "指标仪表板",
|
||
"kibana": "日志搜索仪表板",
|
||
"sparkHistory": "Spark 作业管理和监视仪表板",
|
||
"yarnHistory": "Spark 诊断和监视仪表板",
|
||
"copyText": "复制",
|
||
"endpoint.appproxy": "应用程序代理",
|
||
"endpoint.controller": "群集管理服务",
|
||
"endpoint.gateway": "访问 HDFS 文件的网关,Spark",
|
||
"endpoint.managementproxy": "管理代理",
|
||
"endpoint.mgmtproxy": "管理代理",
|
||
"endpoint.sqlServerEndpoint": "SQL Server 主实例前端",
|
||
"endpoint.grafana": "指标仪表板",
|
||
"endpoint.kibana": "日志搜索仪表板",
|
||
"endpoint.yarnHistory": "Spark 诊断和监视仪表板",
|
||
"endpoint.sparkHistory": "Spark 作业管理和监视仪表板",
|
||
"endpoint.webhdfs": "HDFS 文件系统代理",
|
||
"endpoint.livy": "用于运行 Spark 语句、作业和应用程序的代理"
|
||
},
|
||
"dist/sqlToolsServer": {
|
||
"serviceStartedStatusMsg": "{0} 已启动",
|
||
"startingServiceStatusMsg": "正在启动 {0}",
|
||
"failedToStartServiceErrorMsg": "未能启动 {0}",
|
||
"installingServiceChannelMsg": "正在将 {0} 安装到 {1}",
|
||
"installingServiceStatusMsg": "正在安装 {0}",
|
||
"installedServiceChannelMsg": "安装于 {0}",
|
||
"downloadingServiceChannelMsg": "正在下载 {0}",
|
||
"downloadingServiceSizeChannelMsg": "({0} KB)",
|
||
"downloadingServiceStatusMsg": "正在下载 {0}",
|
||
"downloadServiceDoneChannelMsg": "已安装 {0}",
|
||
"entryExtractedChannelMsg": "已提取 {0} ({1}/{2})"
|
||
},
|
||
"dist/features": {
|
||
"mssql.missingLinkedAzureAccount": "Azure Data Studio 需要连接 Azure Key Vault 才能访问 Always Encrypted 的列主密钥,但没有可用的链接 Azure 帐户。请添加链接的 Azure 帐户,然后重试查询。",
|
||
"mssql.chooseLinkedAzureAccount": "请选择链接的 Azure 帐户:",
|
||
"mssql.canceledLinkedAzureAccountSelection": "Azure Data Studio 需要连接 Azure Key Vault 才能访问 Always Encrypted 的列主密钥,但未选择任何链接的 Azure 帐户。请重试查询,并在出现提示时选择链接的 Azure 帐户。",
|
||
"mssql.insufficientlyPrivelagedAzureAccount": "为 {0} 配置的 Azure 帐户没有足够的权限让 Azure Key Vault 访问 Always Encrypted 的列主密钥。"
|
||
}
|
||
}
|
||
} |