mirror of
https://github.com/Ylianst/MeshCentral.git
synced 2024-12-23 21:55:52 -05:00
Translation tools improvements.
This commit is contained in:
parent
bcf5cfbb5e
commit
d9595f4bf2
15
common.js
15
common.js
@ -243,3 +243,18 @@ module.exports.createTaskLimiterQueue = function (maxTasks, maxTaskTime, cleanin
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
// Convert string translations to a standardized JSON we can use in GitHub
|
||||
// Strings are sorder by english source and object keys are sorted
|
||||
module.exports.translationsToJson = function(t) {
|
||||
var arr2 = [], arr = t.strings;
|
||||
for (var i in arr) {
|
||||
var names = [], el = arr[i], el2 = {};
|
||||
for (var j in el) { names.push(j); }
|
||||
names.sort();
|
||||
for (var j in names) { el2[names[j]] = el[names[j]]; }
|
||||
arr2.push(el2);
|
||||
}
|
||||
arr2.sort(function (a, b) { if (a.en > b.en) return 1; if (a.en < b.en) return -1; return 0; });
|
||||
return JSON.stringify({ strings: arr2 }, null, ' ');
|
||||
}
|
@ -90,8 +90,14 @@ function decode_utf8(s) { return decodeURIComponent(escape(s)); }
|
||||
function data2blob(data) {
|
||||
var bytes = new Array(data.length);
|
||||
for (var i = 0; i < data.length; i++) bytes[i] = data.charCodeAt(i);
|
||||
var blob = new Blob([new Uint8Array(bytes)]);
|
||||
return blob;
|
||||
return new Blob([new Uint8Array(bytes)]);
|
||||
}
|
||||
|
||||
// Convert a UTF8 string into a blob
|
||||
function utf2blob(str) {
|
||||
var bytes = [], utf8 = unescape(encodeURIComponent(str));
|
||||
for (var i = 0; i < utf8.length; i++) { bytes.push(utf8.charCodeAt(i)); }
|
||||
return new Blob([new Uint8Array(bytes)]);
|
||||
}
|
||||
|
||||
// Generate random numbers
|
||||
|
@ -383,8 +383,23 @@
|
||||
xdr.send(JSON.stringify({ 'action': 'setTranslations', strings: translations }));
|
||||
}
|
||||
|
||||
// Convert the translations to a standardized JSON we can use in GitHub
|
||||
// Strings are sorder by english source and object keys are sorted
|
||||
function translationsToJson(t) {
|
||||
var arr2 = [], arr = t.strings;
|
||||
for (var i in arr) {
|
||||
var names = []; el = arr[i], el2 = {};
|
||||
for (var j in el) { names.push(j); }
|
||||
names.sort();
|
||||
for (var j in names) { el2[names[j]] = el[names[j]]; }
|
||||
arr2.push(el2);
|
||||
}
|
||||
arr2.sort(function (a, b) { if (a.en > b.en) return 1; if (a.en < b.en) return -1; return 0; });
|
||||
return JSON.stringify({ strings: arr2 }, null, ' ');
|
||||
}
|
||||
|
||||
function saveToFile() {
|
||||
saveAs(data2blob(JSON.stringify({ strings: translations })), 'translate.json');
|
||||
saveAs(utf2blob(translationsToJson({ strings: translations })), 'translate.json');
|
||||
}
|
||||
|
||||
function setTranslation() {
|
||||
|
Binary file not shown.
@ -297,7 +297,7 @@ function fromtext(source, target, lang) {
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(source + '-new', JSON.stringify(sourceLangFileData), { flag: 'w+' });
|
||||
fs.writeFileSync(source + '-new', translationsToJson(sourceLangFileData), { flag: 'w+' });
|
||||
console.log('Done.');
|
||||
}
|
||||
|
||||
@ -332,7 +332,7 @@ function merge(source, target, lang) {
|
||||
for (var i in index) { targetData.strings.push(index[i]); }
|
||||
|
||||
// Save the target back
|
||||
fs.writeFileSync(target, JSON.stringify(targetData, null, ' '), { flag: 'w+' });
|
||||
fs.writeFileSync(target, translationsToJson(targetData), { flag: 'w+' });
|
||||
console.log('Done.');
|
||||
}
|
||||
|
||||
@ -387,7 +387,7 @@ function extract(langFile, sources) {
|
||||
//if ((sourceStrings[i].xloc != null) && (sourceStrings[i].xloc.length > 0)) { output.push(sourceStrings[i]); } // Only save results that have a source location.
|
||||
output.push(sourceStrings[i]); // Save all results
|
||||
}
|
||||
fs.writeFileSync(langFile, JSON.stringify({ 'strings': output }, null, ' '), { flag: 'w+' });
|
||||
fs.writeFileSync(langFile, translationsToJson({ strings: output }), { flag: 'w+' });
|
||||
console.log(format("{0} strings in output file.", count));
|
||||
process.exit();
|
||||
return;
|
||||
@ -653,4 +653,19 @@ function InstallModule(modulename, func, tag1, tag2) {
|
||||
func(tag1, tag2);
|
||||
return;
|
||||
});
|
||||
}
|
||||
|
||||
// Convert the translations to a standardized JSON we can use in GitHub
|
||||
// Strings are sorder by english source and object keys are sorted
|
||||
function translationsToJson(t) {
|
||||
var arr2 = [], arr = t.strings;
|
||||
for (var i in arr) {
|
||||
var names = [], el = arr[i], el2 = {};
|
||||
for (var j in el) { names.push(j); }
|
||||
names.sort();
|
||||
for (var j in names) { el2[names[j]] = el[names[j]]; }
|
||||
arr2.push(el2);
|
||||
}
|
||||
arr2.sort(function (a, b) { if (a.en > b.en) return 1; if (a.en < b.en) return -1; return 0; });
|
||||
return JSON.stringify({ strings: arr2 }, null, ' ');
|
||||
}
|
16552
translate/translate.json
16552
translate/translate.json
File diff suppressed because it is too large
Load Diff
@ -1989,7 +1989,7 @@ module.exports.CreateWebServer = function (parent, db, args, certificates) {
|
||||
try { res.sendFile(obj.path.join(__dirname, 'translate', 'translate.json')); } catch (ex) { res.sendStatus(404); }
|
||||
} else { res.sendStatus(404); }
|
||||
} else if (data.action == 'setTranslations') {
|
||||
obj.fs.writeFile(obj.path.join(obj.parent.datapath, 'translate.json'), JSON.stringify({ strings: data.strings }), function (err) { if (err == null) { res.send(JSON.stringify({ response: 'ok' })); } else { res.send(JSON.stringify({ response: err })); } });
|
||||
obj.fs.writeFile(obj.path.join(obj.parent.datapath, 'translate.json'), obj.common.translationsToJson({ strings: data.strings }), function (err) { if (err == null) { res.send(JSON.stringify({ response: 'ok' })); } else { res.send(JSON.stringify({ response: err })); } });
|
||||
} else if (data.action == 'translateServer') {
|
||||
if (obj.pendingTranslation === true) { res.send(JSON.stringify({ response: 'Server is already performing a translation.' })); return; }
|
||||
const nodeVersion = Number(process.version.match(/^v(\d+\.\d+)/)[1]);
|
||||
|
Loading…
Reference in New Issue
Block a user