mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 18:46:40 -05:00
Merge vscode source through release 1.79.2 (#23482)
* log when an editor action doesn't run because of enablement * notebooks create/dispose editors. this means controllers must be created eagerly (😢) and that notebooks need a custom way of plugging comparision keys for session. works unless creating another session for the same cell of a duplicated editor * Set offSide to sql lang configuration to true (#183461) * Fixes #181764 (#183550) * fix typo * Always scroll down and focus the input (#183557) * Fixes #180386 (#183561) * cli: ensure ordering of rpc server messages (#183558) * cli: ensure ordering of rpc server messages Sending lots of messages to a stream would block them around the async tokio mutex, which is "fair" so doesn't preserve ordering. Instead, use the write_loop approach I introduced to the server_multiplexer for the same reason some time ago. * fix clippy * update for May endgame * testing: allow invalidateTestResults to take an array (#183569) * Document `ShareProvider` API proposal (#183568) * Document `ShareProvider` API proposal * Remove mention of VS Code from JSDoc * Add support for rendering svg and md in welcome message (#183580) * Remove toggle setting more eagerly (#183584) * rm message abt macOS * Change text (#183589) * Change text * Accidentally changed the wrong file * cli: improve output for code tunnel status (#183571) * testing: allow invalidateTestResults to take an array * cli: improve output for code tunnel status Fixes #183570 * [json/css/html] update services (#183595) * Add experimental setting to enable this dialog * Fix exporting chat model to JSON before it is initialized (#183597) * minimum scrolling to reveal the next cell on shift+enter (#183600) do minimum scrolling to reveal the next cell on Execute cell and select next * Fixing Jupyter notebook issue 13263 (#183527) fix for the issue, still need to understand why there is strange focusing * Tweak proposed API JSDoc (#183590) * Tweak proposed API JSDoc * workbench -> workspace * fix ? operator * Use active editor and show progress when sharing (#183603) Use active editor and show progress * use scroll setting variable correctly * Schedule welcome widget to show once between typing. (#183606) * Schedule dialog to show once between typing * Don't re-render if already displayed once * Add F10 keybinding for debugger step, even on Web. (#183510) Fixes #181792. Previously, for Web the keyboard shortcut was Alt-F10, because it was believed that F10 could not be bound on browsers. This turned out to be incorrect, so we make the shortcut consistent (F10) with desktop VSCode which is also what many other debuggers use. We keep Alt-F10 on web as a secondary keybinding to keep the experience some web users may have gotten used to by now. * Also pass process.env * Restore missing chat clear commands (#183651) * chore: update electron@22.5.4 (#183716) * Show remote indicator in web when remoteAuthority is set (#183728) * feat: .vuerc as json file (#153017) Co-authored-by: Martin Aeschlimann <martinae@microsoft.com> * Delete --compatibility=1.63 code from the server (#183738) * Copy vscode.dev link to tunnel generates an invalid link when an untitled workspace is open (#183739) * Recent devcontainer display string corrupted on Get Started page (#183740) * Improve "next codeblock" navigation (#183744) * Improve "next codeblock" navigation Operate on the current focused response, or the last one, and scroll to the selected item * Normalize command title * Git - run git status if similarityThreshold changes (#183762) * fix aria-label issue in kb editor fixes A11y_GradeB_VSCode_Keyboard shortcut reads words together - Blind: Arrow key navigation to row Find the binding keys and "when" cell data are read together resulting in a word " CTRL + FeditorFocus instead of CTRL + F editorFocus" #182490 * Status - fix compact padding (#183768) * Remove angle brackets from VB brackets (#183782) Fixes #183359 * Update language config schema with more details about brackets. (#183779) * fix comment (#183812) * Support for `Notebook` CodeAction Kind (#183457) * nb kind support -- wip * allow notebook codeactions around single cell edit check * move notebook code action type out of editor --------- Co-authored-by: rebornix <penn.lv@gmail.com> * cli: fix connection default being applied (#183827) * cli: bump to openssl 1.1.1u (#183828) * Implement "delete" action for chat history (#183609) * Use desired file name when generating new md pasted file paths (#183861) Fixes #183851 * Default to filename for markdown new file if empty (#183864) Fixes #183848 * Fix small typo (#183865) Fixes #183819 * Noop when moving a symbol into the file it is already in (#183866) Fixes #183793 * Adjust codeAction validation to account for notebook kind (#183859) * Make JS/TS `go to configuration` commands work on non-`file:` file systems (#183688) Make `go to project` commands work on non-`file:` file systems Fixes #183685 * Can't do regex search after opening notebook (#183884) Fixes #183858 * Default to current dir for `move to file` select (#183875) Fixes #183870 `showOpenDialog` seems to ignore `defaultUri` if the file doesn't exist * Use `<...>` style markdown links when needed (#183876) Fixes #183849 * Remove check for context keys * Update xterm package * Enable updating a chat model without triggering incremental typing (#183894) * Enable chat "move" commands on empty sessions (#183895) * Enable chat "move" commands on empty sessions and also imported sessions * Fix command name * Fix some chat keybindings on windows (#183896) * "Revert File" on inactive editors are ignored (fix #177557) (#183903) * Empty reason while switching profile (fix #183775) (#183904) * fix https://github.com/microsoft/vscode-internalbacklog/issues/4278 (#183910) * fix https://github.com/microsoft/vscode/issues/183770 (#183914) * code --status displays a lot of errors before actual status output (fix #183787) (#183915) * joh/icy manatee (#183917) * Use idle value for widget of interactive editor controller https://github.com/microsoft/vscode/issues/183820 * also make preview editors idle values https://github.com/microsoft/vscode/issues/183820 * Fix #183777 (#183929) * Fix #182309 (#183925) * Tree checkbox item -> items (#183931) Fixes #183826 * Fixes #183909 (#183940) * Fix #183837 (#183943) fix #183837 * Git - fix #183941 (#183944) * Update xterm.css Fixes #181242 * chore: add @ulugbekna and @aiday-mar to my-endgame notebook (#183946) * Revert "When snippet mode is active, make `Tab` not accept suggestion but advance placeholder" This reverts commit 50a80cdb61511343996ff1d41d0b676c3d329f48. * revert not focusing completion list when quick suggest happens during snippet * change `snippetsPreventQuickSuggestions` default to false * Fix #181446 (#183956) * fix https://github.com/microsoft/vscode-internalbacklog/issues/4298 (#183957) * fix: remove extraneous incorrect context keys (#183959) These were actually getting added in getTestItemContextOverlay, and the test ID was using the extended ID which extensions do not know about. Fixes #183612 * Fixes https://github.com/microsoft/monaco-editor/issues/3920 (#183960) * fix https://github.com/microsoft/vscode-internalbacklog/issues/4324 (#183961) * fix #183030 * fix #180826 (#183962) * make message more generic for interactive editor help * . * fix #183968 * Keep codeblock toolbar visible when focused * Fix when clause on "Run in terminal" command * add important info to help menu * fix #183970 * Set `isRefactoring` for all TS refactoring edits (#183982) * consolidate * Disable move to file in TS versions < 5.2 (#183992) There are still a few key bugs with refactoring. We will ship this as a preview for TS 5.2+ instead of for 5.1 * Polish query accepting (#183995) We shouldn't send the same request to Copilot if the query hasn't changed. So if the query is the same, we short circut. Fixes https://github.com/microsoft/vscode-internalbacklog/issues/4286 Also, when we open in chat, we should use the last accepted query, not what's in the input box. Fixes https://github.com/microsoft/vscode-internalbacklog/issues/4280 * Allow widget to have focus (#184000) So that selecting non-code text works. Fixes https://github.com/microsoft/vscode-internalbacklog/issues/4294 * Fix microsoft/vscode-internalbacklog#4257. Mitigate zindex for zone widgets. (#184001) * Change welcome dialog contribution to Eventually * Misc fixes * Workspace folder picker entry descriptions are suboptimal for some filesystems (fix #183418) (#184018) * cli - ignore std error unless verbose (#183787) (#184031) * joh/inquisitive meerkat (#184034) * only stash sessions that are none empty https://github.com/microsoft/vscode-internalbacklog/issues/4281 * only unstash a session once - unless new exchanges are made, https://github.com/microsoft/vscode-internalbacklog/issues/4281 * account for all exchange types * Improve declared components (#184039) * make sure to read setting (#184040) d'oh, related to https://github.com/microsoft/vscode/issues/173387#issuecomment-1571696644 * [html] update service (#184049) [html] update service. FIxes #181176 * reset context keys on reset/hide (#184042) fixes https://github.com/microsoft/vscode-internalbacklog/issues/4330 * use `Lazy`, not `IdleValue` for the IE widget held by the eager controller (#184048) https://github.com/microsoft/vscode/issues/183820 * fix https://github.com/microsoft/vscode-internalbacklog/issues/4333 (#184067) * use undo-loop instead of undo-edit when discarding chat session (#184063) * use undo-loop instead of undo-edit when discarding chat session fixes https://github.com/microsoft/vscode-internalbacklog/issues/4118 * fix tests, wait for correct state * Add logging to node download (#184070) Add logging to node download. For #182951 * re-enable default zone widget revealing when showing (#184072) fixes https://github.com/microsoft/vscode-internalbacklog/issues/4332, also fixes https://github.com/microsoft/vscode-internalbacklog/issues/3784 * fix #178202 * Allow APIs in stable (#184062) * Fix microsoft/vscode-internalbacklog#4206. Override List view whitespace css for monaco editor (#184087) * Fix JSDoc grammatical error (#184090) * Pick up TS 5.1.3 (#184091) Fixes #182931 * Misc fixes * update distro (#184097) * chore: update electron@22.5.5 (#184116) * Extension host veto is registered multiple times on restart (fix #183778) (#184127) Extension host veto is registered multiple times on restart (#183778) * Do not auto start the local web worker extension host (#184137) * Allow embedders to intercept trustedTypes.createPolicy calls (#184136) Allow embedders to intercept trustedTypes.createPolicy calls (#184100) * fix: reading from console output for --status on windows and linux (#184138) fix: reading from console output for --status on windows and linux (#184118) * Misc fixes * code --status displays a lot of errors before actual status output (fix #183787) (#184200) fix 183787 * (cherry-pick to 1.79 from main) Handle galleryExtension failure in featuredExtensionService (#184205) Handle galleryExtension failure in featuredExtensionService (#184198) Handle galleryExtension failure * Fix #184183. Multiple output height updates are skipped. (#184188) * Post merge init fixes * Misc build issues * disable toggle inline diff of `alt` down https://github.com/microsoft/vscode-internalbacklog/issues/4342 * Take into account already activated extensions when computing running locations (#184303) Take into account already activated extensions when computing running locations (fixes #184180) * Avoid `extensionService.getExtension` and use `ActivationKind.Immediate` to allow that URI handling works while resolving (#184310) Avoid `extensionService.getExtension` and use `ActivationKind.Immediate` to allow that URI handling works while resolving (fixes #182217) * WIP * rm fish auto injection * More breaks * Fix Port Attributes constructor (#184412) * WIP * WIP * Allow extensions to get at the exports of other extensions during resolving (#184487) Allow extensions to get at the exports of other extensions during resolving (fixes #184472) * do not auto finish session when inline chat widgets have focus re https://github.com/microsoft/vscode-internalbacklog/issues/4354 * fix compile errors caused by new base method * WIP * WIP * WIP * WIP * Build errors * unc - fix path traversal bypass * Bump version * cherry-pick prod changes from main * Disable sandbox * Build break from merge * bump version * Merge pull request #184739 from max06/max06/issue184659 Restore ShellIntegration for fish (#184659) * Git - only add --find-renames if the value is not the default one (#185053) Git - only add --find-renames if the value is not the default one (#184992) * Cherry-pick: Revert changes to render featured extensions when available (#184747) Revert changes to render featured extensions when available. (#184573) * Lower timeouts for experimentation and gallery service * Revert changes to render extensions when available * Add audio cues * fix: disable app sandbox when --no-sandbox is present (#184913) * fix: disable app sandbox when --no-sandbox is present (#184897) * fix: loading minimist in packaged builds * Runtime errors * UNC allow list checks cannot be disabled in extension host (fix #184989) (#185085) * UNC allow list checks cannot be disabled in extension host (#184989) * Update src/vs/base/node/unc.js Co-authored-by: Robo <hop2deep@gmail.com> --------- Co-authored-by: Robo <hop2deep@gmail.com> * Add notebook extension * Fix mangling issues * Fix mangling issues * npm install * npm install * Issues blocking bundle * Fix build folder compile errors * Fix windows bundle build * Linting fixes * Fix sqllint issues * Update yarn.lock files * Fix unit tests * Fix a couple breaks from test fixes * Bump distro * redo the checkbox style * Update linux build container dockerfile * Bump build image tag * Bump native watch dog package * Bump node-pty * Bump distro * Fix documnetation error * Update distro * redo the button styles * Update datasource TS * Add missing yarn.lock files * Windows setup fix * Turn off extension unit tests while investigating * color box style * Remove appx * Turn off test log upload * update dropdownlist style * fix universal app build error (#23488) * Skip flaky bufferContext vscode test --------- Co-authored-by: Johannes <johannes.rieken@gmail.com> Co-authored-by: Henning Dieterichs <hdieterichs@microsoft.com> Co-authored-by: Julien Richard <jairbubbles@hotmail.com> Co-authored-by: Charles Gagnon <chgagnon@microsoft.com> Co-authored-by: Megan Rogge <merogge@microsoft.com> Co-authored-by: meganrogge <megan.rogge@microsoft.com> Co-authored-by: Rob Lourens <roblourens@gmail.com> Co-authored-by: Connor Peet <connor@peet.io> Co-authored-by: Joyce Er <joyce.er@microsoft.com> Co-authored-by: Bhavya U <bhavyau@microsoft.com> Co-authored-by: Raymond Zhao <7199958+rzhao271@users.noreply.github.com> Co-authored-by: Martin Aeschlimann <martinae@microsoft.com> Co-authored-by: Aaron Munger <aamunger@microsoft.com> Co-authored-by: Aiday Marlen Kyzy <amarlenkyzy@microsoft.com> Co-authored-by: rebornix <penn.lv@gmail.com> Co-authored-by: Ole <oler@google.com> Co-authored-by: Jean Pierre <jeanp413@hotmail.com> Co-authored-by: Robo <hop2deep@gmail.com> Co-authored-by: Yash Singh <saiansh2525@gmail.com> Co-authored-by: Ladislau Szomoru <3372902+lszomoru@users.noreply.github.com> Co-authored-by: Ulugbek Abdullaev <ulugbekna@gmail.com> Co-authored-by: Alex Ross <alros@microsoft.com> Co-authored-by: Michael Lively <milively@microsoft.com> Co-authored-by: Matt Bierner <matb@microsoft.com> Co-authored-by: Andrea Mah <31675041+andreamah@users.noreply.github.com> Co-authored-by: Benjamin Pasero <benjamin.pasero@microsoft.com> Co-authored-by: Sandeep Somavarapu <sasomava@microsoft.com> Co-authored-by: Daniel Imms <2193314+Tyriar@users.noreply.github.com> Co-authored-by: Tyler James Leonhardt <me@tylerleonhardt.com> Co-authored-by: Alexandru Dima <alexdima@microsoft.com> Co-authored-by: Joao Moreno <Joao.Moreno@microsoft.com> Co-authored-by: Alan Ren <alanren@microsoft.com>
This commit is contained in:
@@ -6,17 +6,16 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
|
||||
import { through, readable, ThroughStream } from 'event-stream';
|
||||
import { map, merge, through, ThroughStream } from 'event-stream';
|
||||
import * as jsonMerge from 'gulp-merge-json';
|
||||
import * as File from 'vinyl';
|
||||
import * as Is from 'is';
|
||||
import * as xml2js from 'xml2js';
|
||||
import * as https from 'https';
|
||||
import * as gulp from 'gulp';
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
import * as iconv from '@vscode/iconv-lite-umd';
|
||||
|
||||
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
|
||||
import { l10nJsonFormat, getL10nXlf, l10nJsonDetails, getL10nFilesFromXlf, getL10nJson } from '@vscode/l10n-dev';
|
||||
|
||||
function log(message: any, ...rest: any[]): void {
|
||||
fancyLog(ansiColors.green('[i18n]'), message, ...rest);
|
||||
@@ -51,18 +50,15 @@ export const extraLanguages: Language[] = [
|
||||
{ id: 'tr', folderName: 'trk' }
|
||||
];
|
||||
|
||||
// non built-in extensions also that are transifex and need to be part of the language packs
|
||||
export const externalExtensionsWithTranslations = {
|
||||
'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
|
||||
'vscode-node-debug': 'ms-vscode.node-debug',
|
||||
'vscode-node-debug2': 'ms-vscode.node-debug2'
|
||||
};
|
||||
|
||||
|
||||
export interface Map<V> { // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||
export interface StringMap<V> { // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||
[key: string]: V;
|
||||
}
|
||||
|
||||
export interface ParsedXLF { // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||
messages: StringMap<string>;
|
||||
originalFilePath: string;
|
||||
language: string;
|
||||
}
|
||||
interface Item {
|
||||
id: string;
|
||||
message: string;
|
||||
@@ -74,12 +70,6 @@ export interface Resource {
|
||||
project: string;
|
||||
}
|
||||
|
||||
export interface ParsedXLF { // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||
messages: Map<string>;
|
||||
originalFilePath: string;
|
||||
language: string;
|
||||
}
|
||||
|
||||
interface LocalizeInfo {
|
||||
key: string;
|
||||
comment: string[];
|
||||
@@ -93,9 +83,9 @@ module LocalizeInfo {
|
||||
}
|
||||
|
||||
interface BundledFormat {
|
||||
keys: Map<(string | LocalizeInfo)[]>;
|
||||
messages: Map<string[]>;
|
||||
bundles: Map<string[]>;
|
||||
keys: Record<string, (string | LocalizeInfo)[]>;
|
||||
messages: Record<string, string[]>;
|
||||
bundles: Record<string, string[]>;
|
||||
}
|
||||
|
||||
module BundledFormat {
|
||||
@@ -111,27 +101,6 @@ module BundledFormat {
|
||||
}
|
||||
}
|
||||
|
||||
interface ValueFormat {
|
||||
message: string;
|
||||
comment: string[];
|
||||
}
|
||||
|
||||
interface PackageJsonFormat {
|
||||
[key: string]: string | ValueFormat;
|
||||
}
|
||||
|
||||
module PackageJsonFormat {
|
||||
export function is(value: any): value is PackageJsonFormat {
|
||||
if (Is.undef(value) || !Is.object(value)) {
|
||||
return false;
|
||||
}
|
||||
return Object.keys(value).every(key => {
|
||||
const element = value[key];
|
||||
return Is.string(element) || (Is.object(element) && Is.defined(element.message) && Is.defined(element.comment));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
interface BundledExtensionFormat {
|
||||
[key: string]: {
|
||||
messages: string[];
|
||||
@@ -181,7 +150,7 @@ class TextModel {
|
||||
|
||||
export class XLF {
|
||||
private buffer: string[];
|
||||
private files: Map<Item[]>;
|
||||
private files: Record<string, Item[]>;
|
||||
public numberOfMessages: number;
|
||||
|
||||
constructor(public project: string) {
|
||||
@@ -277,12 +246,12 @@ export class XLF {
|
||||
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
|
||||
return new Promise((resolve) => {
|
||||
const parser = new xml2js.Parser();
|
||||
const files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
|
||||
const files: { messages: StringMap<string>; originalFilePath: string; language: string }[] = [];
|
||||
parser.parseString(xlfString, function (_err: any, result: any) {
|
||||
const fileNodes: any[] = result['xliff']['file'];
|
||||
fileNodes.forEach(file => {
|
||||
const originalFilePath = file.$.original;
|
||||
const messages: Map<string> = {};
|
||||
const messages: StringMap<string> = {};
|
||||
const transUnits = file.body[0]['trans-unit'];
|
||||
if (transUnits) {
|
||||
transUnits.forEach((unit: any) => {
|
||||
@@ -300,11 +269,12 @@ export class XLF {
|
||||
});
|
||||
};
|
||||
|
||||
static parse = function (xlfString: string): Promise<ParsedXLF[]> {
|
||||
|
||||
static org_parse = function (xlfString: string): Promise<ParsedXLF[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const parser = new xml2js.Parser();
|
||||
|
||||
const files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
|
||||
const files: { messages: StringMap<string>; originalFilePath: string; language: string }[] = [];
|
||||
|
||||
parser.parseString(xlfString, function (err: any, result: any) {
|
||||
if (err) {
|
||||
@@ -325,7 +295,7 @@ export class XLF {
|
||||
if (!language) {
|
||||
reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
|
||||
}
|
||||
const messages: Map<string> = {};
|
||||
const messages: StringMap<string> = {};
|
||||
|
||||
const transUnits = file.body[0]['trans-unit'];
|
||||
if (transUnits) {
|
||||
@@ -354,49 +324,61 @@ export class XLF {
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export interface ITask<T> {
|
||||
(): T;
|
||||
}
|
||||
static parse = function (xlfString: string): Promise<l10nJsonDetails[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const parser = new xml2js.Parser();
|
||||
|
||||
interface ILimitedTaskFactory<T> {
|
||||
factory: ITask<Promise<T>>;
|
||||
c: (value?: T | Promise<T>) => void;
|
||||
e: (error?: any) => void;
|
||||
}
|
||||
const files: { messages: Record<string, string>; name: string; language: string }[] = [];
|
||||
|
||||
export class Limiter<T> {
|
||||
private runningPromises: number;
|
||||
private outstandingPromises: ILimitedTaskFactory<any>[];
|
||||
parser.parseString(xlfString, function (err: any, result: any) {
|
||||
if (err) {
|
||||
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
|
||||
}
|
||||
|
||||
constructor(private maxDegreeOfParalellism: number) {
|
||||
this.outstandingPromises = [];
|
||||
this.runningPromises = 0;
|
||||
}
|
||||
const fileNodes: any[] = result['xliff']['file'];
|
||||
if (!fileNodes) {
|
||||
reject(new Error(`XLF parsing error: XLIFF file does not contain "xliff" or "file" node(s) required for parsing.`));
|
||||
}
|
||||
|
||||
queue(factory: ITask<Promise<T>>): Promise<T> {
|
||||
return new Promise<T>((c, e) => {
|
||||
this.outstandingPromises.push({ factory, c, e });
|
||||
this.consume();
|
||||
fileNodes.forEach((file) => {
|
||||
const name = file.$.original;
|
||||
if (!name) {
|
||||
reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
|
||||
}
|
||||
const language = file.$['target-language'];
|
||||
if (!language) {
|
||||
reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
|
||||
}
|
||||
const messages: Record<string, string> = {};
|
||||
|
||||
const transUnits = file.body[0]['trans-unit'];
|
||||
if (transUnits) {
|
||||
transUnits.forEach((unit: any) => {
|
||||
const key = unit.$.id;
|
||||
if (!unit.target) {
|
||||
return; // No translation available
|
||||
}
|
||||
|
||||
let val = unit.target[0];
|
||||
if (typeof val !== 'string') {
|
||||
// We allow empty source values so support them for translations as well.
|
||||
val = val._ ? val._ : '';
|
||||
}
|
||||
if (!key) {
|
||||
reject(new Error(`XLF parsing error: trans-unit ${JSON.stringify(unit, undefined, 0)} defined in file ${name} is missing the ID attribute.`));
|
||||
return;
|
||||
}
|
||||
messages[key] = decodeEntities(val);
|
||||
});
|
||||
files.push({ messages, name, language: language.toLowerCase() });
|
||||
}
|
||||
});
|
||||
|
||||
resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private consume(): void {
|
||||
while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
|
||||
const iLimitedTask = this.outstandingPromises.shift()!;
|
||||
this.runningPromises++;
|
||||
|
||||
const promise = iLimitedTask.factory();
|
||||
promise.then(iLimitedTask.c).catch(iLimitedTask.e);
|
||||
promise.then(() => this.consumed()).catch(() => this.consumed());
|
||||
}
|
||||
}
|
||||
|
||||
private consumed(): void {
|
||||
this.runningPromises--;
|
||||
this.consume();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function sortLanguages(languages: Language[]): Language[] {
|
||||
@@ -480,9 +462,9 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
const messageSection = json.messages;
|
||||
const bundleSection = json.bundles;
|
||||
|
||||
const statistics: Map<number> = Object.create(null);
|
||||
const statistics: Record<string, number> = Object.create(null);
|
||||
|
||||
const defaultMessages: Map<Map<string>> = Object.create(null);
|
||||
const defaultMessages: Record<string, Record<string, string>> = Object.create(null);
|
||||
const modules = Object.keys(keysSection);
|
||||
modules.forEach((module) => {
|
||||
const keys = keysSection[module];
|
||||
@@ -491,7 +473,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
emitter.emit('error', `Message for module ${module} corrupted. Mismatch in number of keys and messages.`);
|
||||
return;
|
||||
}
|
||||
const messageMap: Map<string> = Object.create(null);
|
||||
const messageMap: Record<string, string> = Object.create(null);
|
||||
defaultMessages[module] = messageMap;
|
||||
keys.map((key, i) => {
|
||||
if (typeof key === 'string') {
|
||||
@@ -514,7 +496,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
}
|
||||
|
||||
statistics[language.id] = 0;
|
||||
const localizedModules: Map<string[]> = Object.create(null);
|
||||
const localizedModules: Record<string, string[]> = Object.create(null);
|
||||
const languageFolderName = language.translationId || language.id;
|
||||
const i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
|
||||
let allMessages: I18nFormat | undefined;
|
||||
@@ -611,7 +593,8 @@ export function processNlsFiles(opts: { fileHeader: string; languages: Language[
|
||||
const editorProject: string = 'vscode-editor',
|
||||
workbenchProject: string = 'vscode-workbench',
|
||||
extensionsProject: string = 'vscode-extensions',
|
||||
setupProject: string = 'vscode-setup';
|
||||
setupProject: string = 'vscode-setup',
|
||||
serverProject: string = 'vscode-server';
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const adsProject: string = 'ads-core';
|
||||
@@ -629,6 +612,8 @@ export function getResource(sourceFile: string): Resource {
|
||||
return { name: 'vs/base', project: editorProject };
|
||||
} else if (/^vs\/code/.test(sourceFile)) {
|
||||
return { name: 'vs/code', project: workbenchProject };
|
||||
} else if (/^vs\/server/.test(sourceFile)) {
|
||||
return { name: 'vs/server', project: serverProject };
|
||||
} else if (/^vs\/workbench\/contrib/.test(sourceFile)) {
|
||||
resource = sourceFile.split('/', 4).join('/');
|
||||
return { name: resource, project: workbenchProject };
|
||||
@@ -653,7 +638,7 @@ export function createXlfFilesForCoreBundle(): ThroughStream {
|
||||
const basename = path.basename(file.path);
|
||||
if (basename === 'nls.metadata.json') {
|
||||
if (file.isBuffer()) {
|
||||
const xlfs: Map<XLF> = Object.create(null);
|
||||
const xlfs: Record<string, XLF> = Object.create(null);
|
||||
const json: BundledFormat = JSON.parse((file.contents as Buffer).toString('utf8'));
|
||||
// {{SQL CARBON EDIT}} - Must sort the keys for easier translation.
|
||||
let sortedKeys = Object.keys(json.keys).sort();
|
||||
@@ -698,6 +683,76 @@ export function createXlfFilesForCoreBundle(): ThroughStream {
|
||||
});
|
||||
}
|
||||
|
||||
function createL10nBundleForExtension(extensionFolderName: string, prefixWithBuildFolder: boolean): NodeJS.ReadWriteStream {
|
||||
const prefix = prefixWithBuildFolder ? '.build/' : '';
|
||||
return gulp
|
||||
.src([
|
||||
// For source code of extensions
|
||||
`${prefix}extensions/${extensionFolderName}/{src,client,server}/**/*.{ts,tsx}`,
|
||||
// // For any dependencies pulled in (think vscode-css-languageservice or @vscode/emmet-helper)
|
||||
`${prefix}extensions/${extensionFolderName}/**/node_modules/{@vscode,vscode-*}/**/*.{js,jsx}`,
|
||||
// // For any dependencies pulled in that bundle @vscode/l10n. They needed to export the bundle
|
||||
`${prefix}extensions/${extensionFolderName}/**/bundle.l10n.json`,
|
||||
])
|
||||
.pipe(map(function (data, callback) {
|
||||
const file = data as File;
|
||||
if (!file.isBuffer()) {
|
||||
// Not a buffer so we drop it
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
const extension = path.extname(file.relative);
|
||||
if (extension !== '.json') {
|
||||
const contents = file.contents.toString('utf8');
|
||||
getL10nJson([{ contents, extension }])
|
||||
.then((json) => {
|
||||
callback(undefined, new File({
|
||||
path: `extensions/${extensionFolderName}/bundle.l10n.json`,
|
||||
contents: Buffer.from(JSON.stringify(json), 'utf8')
|
||||
}));
|
||||
})
|
||||
.catch((err) => {
|
||||
callback(new Error(`File ${file.relative} threw an error when parsing: ${err}`));
|
||||
});
|
||||
// signal pause?
|
||||
return false;
|
||||
}
|
||||
|
||||
// for bundle.l10n.jsons
|
||||
let bundleJson;
|
||||
try {
|
||||
bundleJson = JSON.parse(file.contents.toString('utf8'));
|
||||
} catch (err) {
|
||||
callback(new Error(`File ${file.relative} threw an error when parsing: ${err}`));
|
||||
return;
|
||||
}
|
||||
|
||||
// some validation of the bundle.l10n.json format
|
||||
for (const key in bundleJson) {
|
||||
if (
|
||||
typeof bundleJson[key] !== 'string' &&
|
||||
(typeof bundleJson[key].message !== 'string' || !Array.isArray(bundleJson[key].comment))
|
||||
) {
|
||||
callback(new Error(`Invalid bundle.l10n.json file. The value for key ${key} is not in the expected format.`));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
callback(undefined, file);
|
||||
}))
|
||||
.pipe(jsonMerge({
|
||||
fileName: `extensions/${extensionFolderName}/bundle.l10n.json`,
|
||||
jsonSpace: '',
|
||||
concatArrays: true
|
||||
}));
|
||||
}
|
||||
|
||||
export const EXTERNAL_EXTENSIONS = [
|
||||
'ms-vscode.js-debug',
|
||||
'ms-vscode.js-debug-companion',
|
||||
'ms-vscode.vscode-js-profile-table',
|
||||
];
|
||||
|
||||
export function createXlfFilesForExtensions(): ThroughStream {
|
||||
let counter: number = 0;
|
||||
let folderStreamEnded: boolean = false;
|
||||
@@ -708,60 +763,62 @@ export function createXlfFilesForExtensions(): ThroughStream {
|
||||
if (!stat.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
const extensionName = path.basename(extensionFolder.path);
|
||||
if (extensionName === 'node_modules') {
|
||||
const extensionFolderName = path.basename(extensionFolder.path);
|
||||
if (extensionFolderName === 'node_modules') {
|
||||
return;
|
||||
}
|
||||
// Get extension id and use that as the id
|
||||
const manifest = fs.readFileSync(path.join(extensionFolder.path, 'package.json'), 'utf-8');
|
||||
const manifestJson = JSON.parse(manifest);
|
||||
const extensionId = manifestJson.publisher + '.' + manifestJson.name;
|
||||
|
||||
counter++;
|
||||
let _xlf: XLF;
|
||||
function getXlf() {
|
||||
if (!_xlf) {
|
||||
_xlf = new XLF(extensionsProject);
|
||||
let _l10nMap: Map<string, l10nJsonFormat>;
|
||||
function getL10nMap() {
|
||||
if (!_l10nMap) {
|
||||
_l10nMap = new Map();
|
||||
}
|
||||
return _xlf;
|
||||
return _l10nMap;
|
||||
}
|
||||
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
|
||||
merge(
|
||||
gulp.src([`.build/extensions/${extensionFolderName}/package.nls.json`, `.build/extensions/${extensionFolderName}/**/nls.metadata.json`], { allowEmpty: true }),
|
||||
createL10nBundleForExtension(extensionFolderName, EXTERNAL_EXTENSIONS.includes(extensionId))
|
||||
).pipe(through(function (file: File) {
|
||||
if (file.isBuffer()) {
|
||||
const buffer: Buffer = file.contents as Buffer;
|
||||
const basename = path.basename(file.path);
|
||||
if (basename === 'package.nls.json') {
|
||||
const json: PackageJsonFormat = JSON.parse(buffer.toString('utf8'));
|
||||
const keys: Array<string | LocalizeInfo> = [];
|
||||
const messages: string[] = [];
|
||||
Object.keys(json).forEach((key) => {
|
||||
const value = json[key];
|
||||
if (Is.string(value)) {
|
||||
keys.push(key);
|
||||
messages.push(value);
|
||||
} else if (value) {
|
||||
keys.push({
|
||||
key,
|
||||
comment: value.comment
|
||||
});
|
||||
messages.push(value.message);
|
||||
} else {
|
||||
keys.push(key);
|
||||
messages.push(`Unknown message for key: ${key}`);
|
||||
}
|
||||
});
|
||||
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
|
||||
const json: l10nJsonFormat = JSON.parse(buffer.toString('utf8'));
|
||||
getL10nMap().set(`extensions/${extensionId}/package`, json);
|
||||
} else if (basename === 'nls.metadata.json') {
|
||||
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
|
||||
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
|
||||
const relPath = path.relative(`.build/extensions/${extensionFolderName}`, path.dirname(file.path));
|
||||
for (const file in json) {
|
||||
const fileContent = json[file];
|
||||
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
|
||||
const info: l10nJsonFormat = Object.create(null);
|
||||
for (let i = 0; i < fileContent.messages.length; i++) {
|
||||
const message = fileContent.messages[i];
|
||||
const { key, comment } = LocalizeInfo.is(fileContent.keys[i])
|
||||
? fileContent.keys[i] as LocalizeInfo
|
||||
: { key: fileContent.keys[i] as string, comment: undefined };
|
||||
|
||||
info[key] = comment ? { message, comment } : message;
|
||||
}
|
||||
getL10nMap().set(`extensions/${extensionId}/${relPath}/${file}`, info);
|
||||
}
|
||||
} else if (basename === 'bundle.l10n.json') {
|
||||
const json: l10nJsonFormat = JSON.parse(buffer.toString('utf8'));
|
||||
getL10nMap().set(`extensions/${extensionId}/bundle`, json);
|
||||
} else {
|
||||
this.emit('error', new Error(`${file.path} is not a valid extension nls file`));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}, function () {
|
||||
if (_xlf) {
|
||||
if (_l10nMap?.size > 0) {
|
||||
const xlfFile = new File({
|
||||
path: path.join(extensionsProject, extensionName + '.xlf'),
|
||||
contents: Buffer.from(_xlf.toString(), 'utf8')
|
||||
path: path.join(extensionsProject, extensionId + '.xlf'),
|
||||
contents: Buffer.from(getL10nXlf(_l10nMap), 'utf8')
|
||||
});
|
||||
folderStream.queue(xlfFile);
|
||||
}
|
||||
@@ -837,322 +894,8 @@ export function createXlfFilesForIsl(): ThroughStream {
|
||||
});
|
||||
}
|
||||
|
||||
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
|
||||
const tryGetPromises: Array<Promise<boolean>> = [];
|
||||
const updateCreatePromises: Array<Promise<boolean>> = [];
|
||||
|
||||
return through(function (this: ThroughStream, file: File) {
|
||||
const project = path.dirname(file.relative);
|
||||
const fileName = path.basename(file.path);
|
||||
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
|
||||
const credentials = `${username}:${password}`;
|
||||
|
||||
// Check if resource already exists, if not, then create it.
|
||||
let promise = tryGetResource(project, slug, apiHostname, credentials);
|
||||
tryGetPromises.push(promise);
|
||||
promise.then(exists => {
|
||||
if (exists) {
|
||||
promise = updateResource(project, slug, file, apiHostname, credentials);
|
||||
} else {
|
||||
promise = createResource(project, slug, file, apiHostname, credentials);
|
||||
}
|
||||
updateCreatePromises.push(promise);
|
||||
});
|
||||
|
||||
}, function () {
|
||||
// End the pipe only after all the communication with Transifex API happened
|
||||
Promise.all(tryGetPromises).then(() => {
|
||||
Promise.all(updateCreatePromises).then(() => {
|
||||
this.queue(null);
|
||||
}).catch((reason) => { throw new Error(reason); });
|
||||
}).catch((reason) => { throw new Error(reason); });
|
||||
});
|
||||
}
|
||||
|
||||
function getAllResources(project: string, apiHostname: string, username: string, password: string): Promise<string[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const credentials = `${username}:${password}`;
|
||||
const options = {
|
||||
hostname: apiHostname,
|
||||
path: `/api/2/project/${project}/resources`,
|
||||
auth: credentials,
|
||||
method: 'GET'
|
||||
};
|
||||
|
||||
const request = https.request(options, (res) => {
|
||||
const buffer: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => buffer.push(chunk));
|
||||
res.on('end', () => {
|
||||
if (res.statusCode === 200) {
|
||||
const json = JSON.parse(Buffer.concat(buffer).toString());
|
||||
if (Array.isArray(json)) {
|
||||
resolve(json.map(o => o.slug));
|
||||
return;
|
||||
}
|
||||
reject(`Unexpected data format. Response code: ${res.statusCode}.`);
|
||||
} else {
|
||||
reject(`No resources in ${project} returned no data. Response code: ${res.statusCode}.`);
|
||||
}
|
||||
});
|
||||
});
|
||||
request.on('error', (err) => {
|
||||
reject(`Failed to query resources in ${project} with the following error: ${err}. ${options.path}`);
|
||||
});
|
||||
request.end();
|
||||
});
|
||||
}
|
||||
|
||||
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
|
||||
const resourcesByProject: Map<string[]> = Object.create(null);
|
||||
resourcesByProject[extensionsProject] = ([] as any[]).concat(externalExtensionsWithTranslations); // clone
|
||||
|
||||
return through(function (this: ThroughStream, file: File) {
|
||||
const project = path.dirname(file.relative);
|
||||
const fileName = path.basename(file.path);
|
||||
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
|
||||
|
||||
let slugs = resourcesByProject[project];
|
||||
if (!slugs) {
|
||||
resourcesByProject[project] = slugs = [];
|
||||
}
|
||||
slugs.push(slug);
|
||||
this.push(file);
|
||||
}, function () {
|
||||
|
||||
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
|
||||
const i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
|
||||
const extractedResources: string[] = [];
|
||||
for (const project of [workbenchProject, editorProject]) {
|
||||
for (const resource of resourcesByProject[project]) {
|
||||
if (resource !== 'setup_messages') {
|
||||
extractedResources.push(project + '/' + resource);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (i18Resources.length !== extractedResources.length) {
|
||||
console.log(`[i18n] Obsolete resources in file 'build/lib/i18n.resources.json': JSON.stringify(${i18Resources.filter(p => extractedResources.indexOf(p) === -1)})`);
|
||||
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
|
||||
}
|
||||
|
||||
const promises: Array<Promise<void>> = [];
|
||||
for (const project in resourcesByProject) {
|
||||
promises.push(
|
||||
getAllResources(project, apiHostname, username, password).then(resources => {
|
||||
const expectedResources = resourcesByProject[project];
|
||||
const unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
|
||||
if (unusedResources.length) {
|
||||
console.log(`[transifex] Obsolete resources in project '${project}': ${unusedResources.join(', ')}`);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
return Promise.all(promises).then(_ => {
|
||||
this.push(null);
|
||||
}).catch((reason) => { throw new Error(reason); });
|
||||
});
|
||||
}
|
||||
|
||||
function tryGetResource(project: string, slug: string, apiHostname: string, credentials: string): Promise<boolean> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const options = {
|
||||
hostname: apiHostname,
|
||||
path: `/api/2/project/${project}/resource/${slug}/?details`,
|
||||
auth: credentials,
|
||||
method: 'GET'
|
||||
};
|
||||
|
||||
const request = https.request(options, (response) => {
|
||||
if (response.statusCode === 404) {
|
||||
resolve(false);
|
||||
} else if (response.statusCode === 200) {
|
||||
resolve(true);
|
||||
} else {
|
||||
reject(`Failed to query resource ${project}/${slug}. Response: ${response.statusCode} ${response.statusMessage}`);
|
||||
}
|
||||
});
|
||||
request.on('error', (err) => {
|
||||
reject(`Failed to get ${project}/${slug} on Transifex: ${err}`);
|
||||
});
|
||||
|
||||
request.end();
|
||||
});
|
||||
}
|
||||
|
||||
function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> {
|
||||
return new Promise((_resolve, reject) => {
|
||||
const data = JSON.stringify({
|
||||
'content': xlfFile.contents.toString(),
|
||||
'name': slug,
|
||||
'slug': slug,
|
||||
'i18n_type': 'XLIFF'
|
||||
});
|
||||
const options = {
|
||||
hostname: apiHostname,
|
||||
path: `/api/2/project/${project}/resources`,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': Buffer.byteLength(data)
|
||||
},
|
||||
auth: credentials,
|
||||
method: 'POST'
|
||||
};
|
||||
|
||||
const request = https.request(options, (res) => {
|
||||
if (res.statusCode === 201) {
|
||||
log(`Resource ${project}/${slug} successfully created on Transifex.`);
|
||||
} else {
|
||||
reject(`Something went wrong in the request creating ${slug} in ${project}. ${res.statusCode}`);
|
||||
}
|
||||
});
|
||||
request.on('error', (err) => {
|
||||
reject(`Failed to create ${project}/${slug} on Transifex: ${err}`);
|
||||
});
|
||||
|
||||
request.write(data);
|
||||
request.end();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* The following link provides information about how Transifex handles updates of a resource file:
|
||||
* https://dev.befoolish.co/tx-docs/public/projects/updating-content#what-happens-when-you-update-files
|
||||
*/
|
||||
function updateResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: string): Promise<any> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const data = JSON.stringify({ content: xlfFile.contents.toString() });
|
||||
const options = {
|
||||
hostname: apiHostname,
|
||||
path: `/api/2/project/${project}/resource/${slug}/content`,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': Buffer.byteLength(data)
|
||||
},
|
||||
auth: credentials,
|
||||
method: 'PUT'
|
||||
};
|
||||
|
||||
const request = https.request(options, (res) => {
|
||||
if (res.statusCode === 200) {
|
||||
res.setEncoding('utf8');
|
||||
|
||||
let responseBuffer: string = '';
|
||||
res.on('data', function (chunk) {
|
||||
responseBuffer += chunk;
|
||||
});
|
||||
res.on('end', () => {
|
||||
const response = JSON.parse(responseBuffer);
|
||||
log(`Resource ${project}/${slug} successfully updated on Transifex. Strings added: ${response.strings_added}, updated: ${response.strings_added}, deleted: ${response.strings_added}`);
|
||||
resolve();
|
||||
});
|
||||
} else {
|
||||
reject(`Something went wrong in the request updating ${slug} in ${project}. ${res.statusCode}`);
|
||||
}
|
||||
});
|
||||
request.on('error', (err) => {
|
||||
reject(`Failed to update ${project}/${slug} on Transifex: ${err}`);
|
||||
});
|
||||
|
||||
request.write(data);
|
||||
request.end();
|
||||
});
|
||||
}
|
||||
|
||||
export function pullSetupXlfFiles(apiHostname: string, username: string, password: string, language: Language, includeDefault: boolean): NodeJS.ReadableStream {
|
||||
const setupResources = [{ name: 'setup_messages', project: workbenchProject }];
|
||||
if (includeDefault) {
|
||||
setupResources.push({ name: 'setup_default', project: setupProject });
|
||||
}
|
||||
return pullXlfFiles(apiHostname, username, password, language, setupResources);
|
||||
}
|
||||
|
||||
function pullXlfFiles(apiHostname: string, username: string, password: string, language: Language, resources: Resource[]): NodeJS.ReadableStream {
|
||||
const credentials = `${username}:${password}`;
|
||||
const expectedTranslationsCount = resources.length;
|
||||
let translationsRetrieved = 0, called = false;
|
||||
|
||||
return readable(function (_count: any, callback: any) {
|
||||
// Mark end of stream when all resources were retrieved
|
||||
if (translationsRetrieved === expectedTranslationsCount) {
|
||||
return this.emit('end');
|
||||
}
|
||||
|
||||
if (!called) {
|
||||
called = true;
|
||||
const stream = this;
|
||||
resources.map(function (resource) {
|
||||
retrieveResource(language, resource, apiHostname, credentials).then((file: File | null) => {
|
||||
if (file) {
|
||||
stream.emit('data', file);
|
||||
}
|
||||
translationsRetrieved++;
|
||||
}).catch(error => { throw new Error(error); });
|
||||
});
|
||||
}
|
||||
|
||||
callback();
|
||||
});
|
||||
}
|
||||
const limiter = new Limiter<File | null>(NUMBER_OF_CONCURRENT_DOWNLOADS);
|
||||
|
||||
function retrieveResource(language: Language, resource: Resource, apiHostname: string, credentials: string): Promise<File | null> {
|
||||
return limiter.queue(() => new Promise<File | null>((resolve, reject) => {
|
||||
const slug = resource.name.replace(/\//g, '_');
|
||||
const project = resource.project;
|
||||
const transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
|
||||
const options = {
|
||||
hostname: apiHostname,
|
||||
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
|
||||
auth: credentials,
|
||||
port: 443,
|
||||
method: 'GET'
|
||||
};
|
||||
console.log('[transifex] Fetching ' + options.path);
|
||||
|
||||
const request = https.request(options, (res) => {
|
||||
const xlfBuffer: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => xlfBuffer.push(chunk));
|
||||
res.on('end', () => {
|
||||
if (res.statusCode === 200) {
|
||||
resolve(new File({ contents: Buffer.concat(xlfBuffer), path: `${project}/${slug}.xlf` }));
|
||||
} else if (res.statusCode === 404) {
|
||||
console.log(`[transifex] ${slug} in ${project} returned no data.`);
|
||||
resolve(null);
|
||||
} else {
|
||||
reject(`${slug} in ${project} returned no data. Response code: ${res.statusCode}.`);
|
||||
}
|
||||
});
|
||||
});
|
||||
request.on('error', (err) => {
|
||||
reject(`Failed to query resource ${slug} with the following error: ${err}. ${options.path}`);
|
||||
});
|
||||
request.end();
|
||||
}));
|
||||
}
|
||||
|
||||
export function prepareI18nFiles(): ThroughStream {
|
||||
const parsePromises: Promise<ParsedXLF[]>[] = [];
|
||||
|
||||
return through(function (this: ThroughStream, xlf: File) {
|
||||
const stream = this;
|
||||
const parsePromise = XLF.parse(xlf.contents.toString());
|
||||
parsePromises.push(parsePromise);
|
||||
parsePromise.then(
|
||||
resolvedFiles => {
|
||||
resolvedFiles.forEach(file => {
|
||||
const translatedFile = createI18nFile(file.originalFilePath, file.messages);
|
||||
stream.queue(translatedFile);
|
||||
});
|
||||
}
|
||||
);
|
||||
}, function () {
|
||||
Promise.all(parsePromises)
|
||||
.then(() => { this.queue(null); })
|
||||
.catch(reason => { throw new Error(reason); });
|
||||
});
|
||||
}
|
||||
|
||||
export function createI18nFile(originalFilePath: string, messages: any): File { // {{SQL CARBON EDIT}} Needed for locfunc.
|
||||
let result = Object.create(null);
|
||||
export function createI18nFile(name: string, messages: any): File { // {{SQL CARBON EDIT}} Needed for locfunc.
|
||||
const result = Object.create(null);
|
||||
result[''] = [
|
||||
'--------------------------------------------------------------------------------------------',
|
||||
'Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||
@@ -1169,7 +912,7 @@ export function createI18nFile(originalFilePath: string, messages: any): File {
|
||||
content = content.replace(/\n/g, '\r\n');
|
||||
}
|
||||
return new File({
|
||||
path: path.join(originalFilePath + '.i18n.json'),
|
||||
path: path.join(name + '.i18n.json'),
|
||||
contents: Buffer.from(content, 'utf8')
|
||||
});
|
||||
}
|
||||
@@ -1177,7 +920,7 @@ export function createI18nFile(originalFilePath: string, messages: any): File {
|
||||
export interface I18nPack { // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||
version: string;
|
||||
contents: {
|
||||
[path: string]: Map<string>;
|
||||
[path: string]: Record<string, string>;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1188,38 +931,48 @@ export interface TranslationPath {
|
||||
resourceName: string;
|
||||
}
|
||||
|
||||
export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingTranslationPaths: TranslationPath[], pseudo = false): NodeJS.ReadWriteStream {
|
||||
const parsePromises: Promise<ParsedXLF[]>[] = [];
|
||||
function getRecordFromL10nJsonFormat(l10nJsonFormat: l10nJsonFormat): Record<string, string> {
|
||||
const record: Record<string, string> = {};
|
||||
for (const key of Object.keys(l10nJsonFormat).sort()) {
|
||||
const value = l10nJsonFormat[key];
|
||||
record[key] = typeof value === 'string' ? value : value.message;
|
||||
}
|
||||
return record;
|
||||
}
|
||||
|
||||
export function prepareI18nPackFiles(resultingTranslationPaths: TranslationPath[]): NodeJS.ReadWriteStream {
|
||||
const parsePromises: Promise<l10nJsonDetails[]>[] = [];
|
||||
const mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
|
||||
const extensionsPacks: Map<I18nPack> = {};
|
||||
const extensionsPacks: Record<string, I18nPack> = {};
|
||||
const errors: any[] = [];
|
||||
return through(function (this: ThroughStream, xlf: File) {
|
||||
const project = path.basename(path.dirname(path.dirname(xlf.relative)));
|
||||
const resource = path.basename(xlf.relative, '.xlf');
|
||||
let project = path.basename(path.dirname(path.dirname(xlf.relative)));
|
||||
// strip `-new` since vscode-extensions-loc uses the `-new` suffix to indicate that it's from the new loc pipeline
|
||||
const resource = path.basename(path.basename(xlf.relative, '.xlf'), '-new');
|
||||
if (EXTERNAL_EXTENSIONS.find(e => e === resource)) {
|
||||
project = extensionsProject;
|
||||
}
|
||||
const contents = xlf.contents.toString();
|
||||
log(`Found ${project}: ${resource}`);
|
||||
const parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
|
||||
const parsePromise = getL10nFilesFromXlf(contents);
|
||||
parsePromises.push(parsePromise);
|
||||
parsePromise.then(
|
||||
resolvedFiles => {
|
||||
resolvedFiles.forEach(file => {
|
||||
const path = file.originalFilePath;
|
||||
const path = file.name;
|
||||
const firstSlash = path.indexOf('/');
|
||||
|
||||
if (project === extensionsProject) {
|
||||
// resource will be the extension id
|
||||
let extPack = extensionsPacks[resource];
|
||||
if (!extPack) {
|
||||
extPack = extensionsPacks[resource] = { version: i18nPackVersion, contents: {} };
|
||||
}
|
||||
const externalId = externalExtensions[resource];
|
||||
if (!externalId) { // internal extension: remove 'extensions/extensionId/' segnent
|
||||
const secondSlash = path.indexOf('/', firstSlash + 1);
|
||||
extPack.contents[path.substr(secondSlash + 1)] = file.messages;
|
||||
} else {
|
||||
extPack.contents[path] = file.messages;
|
||||
}
|
||||
// remove 'extensions/extensionId/' segment
|
||||
const secondSlash = path.indexOf('/', firstSlash + 1);
|
||||
extPack.contents[path.substring(secondSlash + 1)] = getRecordFromL10nJsonFormat(file.messages);
|
||||
} else {
|
||||
mainPack.contents[path.substr(firstSlash + 1)] = file.messages;
|
||||
mainPack.contents[path.substring(firstSlash + 1)] = getRecordFromL10nJsonFormat(file.messages);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1236,17 +989,11 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
||||
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
|
||||
|
||||
this.queue(translatedMainFile);
|
||||
for (const extension in extensionsPacks) {
|
||||
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
|
||||
for (const extensionId in extensionsPacks) {
|
||||
const translatedExtFile = createI18nFile(`extensions/${extensionId}`, extensionsPacks[extensionId]);
|
||||
this.queue(translatedExtFile);
|
||||
|
||||
const externalExtensionId = externalExtensions[extension];
|
||||
if (externalExtensionId) {
|
||||
resultingTranslationPaths.push({ id: externalExtensionId, resourceName: `extensions/${extension}.i18n.json` });
|
||||
} else {
|
||||
resultingTranslationPaths.push({ id: `vscode.${extension}`, resourceName: `extensions/${extension}.i18n.json` });
|
||||
}
|
||||
|
||||
resultingTranslationPaths.push({ id: extensionId, resourceName: `extensions/${extensionId}.i18n.json` });
|
||||
}
|
||||
this.queue(null);
|
||||
})
|
||||
@@ -1257,7 +1004,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
||||
}
|
||||
|
||||
export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup): ThroughStream {
|
||||
const parsePromises: Promise<ParsedXLF[]>[] = [];
|
||||
const parsePromises: Promise<l10nJsonDetails[]>[] = [];
|
||||
|
||||
return through(function (this: ThroughStream, xlf: File) {
|
||||
const stream = this;
|
||||
@@ -1266,7 +1013,7 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
|
||||
parsePromise.then(
|
||||
resolvedFiles => {
|
||||
resolvedFiles.forEach(file => {
|
||||
const translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
|
||||
const translatedFile = createIslFile(file.name, file.messages, language, innoSetupConfig);
|
||||
stream.queue(translatedFile);
|
||||
});
|
||||
}
|
||||
@@ -1282,13 +1029,13 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
|
||||
});
|
||||
}
|
||||
|
||||
function createIslFile(originalFilePath: string, messages: Map<string>, language: Language, innoSetup: InnoSetup): File {
|
||||
function createIslFile(name: string, messages: l10nJsonFormat, language: Language, innoSetup: InnoSetup): File {
|
||||
const content: string[] = [];
|
||||
let originalContent: TextModel;
|
||||
if (path.basename(originalFilePath) === 'Default') {
|
||||
originalContent = new TextModel(fs.readFileSync(originalFilePath + '.isl', 'utf8'));
|
||||
if (path.basename(name) === 'Default') {
|
||||
originalContent = new TextModel(fs.readFileSync(name + '.isl', 'utf8'));
|
||||
} else {
|
||||
originalContent = new TextModel(fs.readFileSync(originalFilePath + '.en.isl', 'utf8'));
|
||||
originalContent = new TextModel(fs.readFileSync(name + '.en.isl', 'utf8'));
|
||||
}
|
||||
originalContent.lines.forEach(line => {
|
||||
if (line.length > 0) {
|
||||
@@ -1311,7 +1058,7 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
||||
}
|
||||
});
|
||||
|
||||
const basename = path.basename(originalFilePath);
|
||||
const basename = path.basename(name);
|
||||
const filePath = `${basename}.${language.id}.isl`;
|
||||
const encoded = iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user