Merge branch 'release-v1.0.2'
This commit is contained in:
commit
8543c7b9ff
29 changed files with 316 additions and 123 deletions
|
@ -1,5 +1,7 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const loaderUtils = require("loader-utils");
|
||||
|
||||
// copies the resources into the webapp directory.
|
||||
//
|
||||
|
||||
|
@ -61,12 +63,6 @@ const COPY_LIST = [
|
|||
["./config.json", "webapp", { directwatch: 1 }],
|
||||
];
|
||||
|
||||
INCLUDE_LANGS.forEach(function(l) {
|
||||
COPY_LIST.push([
|
||||
l.value, "webapp/i18n/", { lang: 1 },
|
||||
]);
|
||||
});
|
||||
|
||||
const parseArgs = require('minimist');
|
||||
const Cpx = require('cpx');
|
||||
const chokidar = require('chokidar');
|
||||
|
@ -77,8 +73,8 @@ const argv = parseArgs(
|
|||
process.argv.slice(2), {}
|
||||
);
|
||||
|
||||
var watch = argv.w;
|
||||
var verbose = argv.v;
|
||||
const watch = argv.w;
|
||||
const verbose = argv.v;
|
||||
|
||||
function errCheck(err) {
|
||||
if (err) {
|
||||
|
@ -136,39 +132,11 @@ function next(i, err) {
|
|||
.on('change', copy)
|
||||
.on('ready', cb)
|
||||
.on('error', errCheck);
|
||||
} else if (opts.lang) {
|
||||
const reactSdkFile = 'node_modules/matrix-react-sdk/src/i18n/strings/' + source + '.json';
|
||||
const riotWebFile = 'src/i18n/strings/' + source + '.json';
|
||||
|
||||
// XXX: Use a debounce because for some reason if we read the language
|
||||
// file immediately after the FS event is received, the file contents
|
||||
// appears empty. Possibly https://github.com/nodejs/node/issues/6112
|
||||
let makeLangDebouncer;
|
||||
const makeLang = () => {
|
||||
if (makeLangDebouncer) {
|
||||
clearTimeout(makeLangDebouncer);
|
||||
}
|
||||
makeLangDebouncer = setTimeout(() => {
|
||||
genLangFile(source, dest);
|
||||
}, 500);
|
||||
};
|
||||
|
||||
[reactSdkFile, riotWebFile].forEach(function(f) {
|
||||
chokidar.watch(f)
|
||||
.on('add', makeLang)
|
||||
.on('change', makeLang)
|
||||
//.on('ready', cb) We'd have to do this when both files are ready
|
||||
.on('error', errCheck);
|
||||
});
|
||||
next(i + 1, err);
|
||||
} else {
|
||||
cpx.on('watch-ready', cb);
|
||||
cpx.on("watch-error", cb);
|
||||
cpx.watch();
|
||||
}
|
||||
} else if (opts.lang) {
|
||||
genLangFile(source, dest);
|
||||
next(i + 1, err);
|
||||
} else {
|
||||
cpx.copy(cb);
|
||||
}
|
||||
|
@ -195,21 +163,28 @@ function genLangFile(lang, dest) {
|
|||
|
||||
translations = weblateToCounterpart(translations);
|
||||
|
||||
fs.writeFileSync(dest + lang + '.json', JSON.stringify(translations, null, 4));
|
||||
const json = JSON.stringify(translations, null, 4);
|
||||
const jsonBuffer = Buffer.from(json);
|
||||
const digest = loaderUtils.getHashDigest(jsonBuffer, null, null, 7);
|
||||
const filename = `${lang}.${digest}.json`;
|
||||
|
||||
fs.writeFileSync(dest + filename, json);
|
||||
if (verbose) {
|
||||
console.log("Generated language file: " + lang);
|
||||
console.log("Generated language file: " + filename);
|
||||
}
|
||||
|
||||
return filename;
|
||||
}
|
||||
|
||||
function genLangList() {
|
||||
function genLangList(langFileMap) {
|
||||
const languages = {};
|
||||
INCLUDE_LANGS.forEach(function(lang) {
|
||||
const normalizedLanguage = lang.value.toLowerCase().replace("_", "-");
|
||||
const languageParts = normalizedLanguage.split('-');
|
||||
if (languageParts.length == 2 && languageParts[0] == languageParts[1]) {
|
||||
languages[languageParts[0]] = {'fileName': lang.value + '.json', 'label': lang.label};
|
||||
languages[languageParts[0]] = {'fileName': langFileMap[lang.value], 'label': lang.label};
|
||||
} else {
|
||||
languages[normalizedLanguage] = {'fileName': lang.value + '.json', 'label': lang.label};
|
||||
languages[normalizedLanguage] = {'fileName': langFileMap[lang.value], 'label': lang.label};
|
||||
}
|
||||
});
|
||||
fs.writeFile('webapp/i18n/languages.json', JSON.stringify(languages, null, 4), function(err) {
|
||||
|
@ -257,5 +232,50 @@ function weblateToCounterpart(inTrs) {
|
|||
return outTrs;
|
||||
}
|
||||
|
||||
genLangList();
|
||||
/**
|
||||
watch the input files for a given language,
|
||||
regenerate the file, adding its content-hashed filename to langFileMap
|
||||
and regenerating languages.json with the new filename
|
||||
*/
|
||||
function watchLanguage(lang, dest, langFileMap) {
|
||||
const reactSdkFile = 'node_modules/matrix-react-sdk/src/i18n/strings/' + lang + '.json';
|
||||
const riotWebFile = 'src/i18n/strings/' + lang + '.json';
|
||||
|
||||
// XXX: Use a debounce because for some reason if we read the language
|
||||
// file immediately after the FS event is received, the file contents
|
||||
// appears empty. Possibly https://github.com/nodejs/node/issues/6112
|
||||
let makeLangDebouncer;
|
||||
const makeLang = () => {
|
||||
if (makeLangDebouncer) {
|
||||
clearTimeout(makeLangDebouncer);
|
||||
}
|
||||
makeLangDebouncer = setTimeout(() => {
|
||||
const filename = genLangFile(lang, dest);
|
||||
langFileMap[lang]=filename;
|
||||
genLangList(langFileMap);
|
||||
}, 500);
|
||||
};
|
||||
|
||||
[reactSdkFile, riotWebFile].forEach(function(f) {
|
||||
chokidar.watch(f)
|
||||
.on('add', makeLang)
|
||||
.on('change', makeLang)
|
||||
.on('error', errCheck);
|
||||
});
|
||||
}
|
||||
|
||||
// language resources
|
||||
const I18N_DEST = "webapp/i18n/";
|
||||
const I18N_FILENAME_MAP = INCLUDE_LANGS.reduce((m, l) => {
|
||||
const filename = genLangFile(l.value, I18N_DEST);
|
||||
m[l.value] = filename;
|
||||
return m;
|
||||
}, {});
|
||||
genLangList(I18N_FILENAME_MAP);
|
||||
|
||||
if (watch) {
|
||||
INCLUDE_LANGS.forEach(l => watchLanguage(l.value, I18N_DEST, I18N_FILENAME_MAP));
|
||||
}
|
||||
|
||||
// non-language resources
|
||||
next(0);
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# Converts an svg logo into the various image resources required by
|
||||
# the various platforms deployments.
|
||||
#
|
||||
# On debian-based systems you need these deps:
|
||||
# apt-get install xmlstarlet python3-cairosvg icnsutils
|
||||
|
||||
if [ $# != 1 ]
|
||||
then
|
||||
|
@ -52,7 +58,23 @@ cp "$tmpdir/256.png" "$tmpdir/Riot.iconset/icon_256x256.png"
|
|||
cp "$tmpdir/512.png" "$tmpdir/Riot.iconset/icon_256x256@2x.png"
|
||||
cp "$tmpdir/512.png" "$tmpdir/Riot.iconset/icon_512x512.png"
|
||||
cp "$tmpdir/1024.png" "$tmpdir/Riot.iconset/icon_512x512@2x.png"
|
||||
iconutil -c icns -o electron_app/build/icon.icns "$tmpdir/Riot.iconset"
|
||||
|
||||
if [ -x "$(command -v iconutil)" ]; then
|
||||
# available on macos
|
||||
iconutil -c icns -o electron_app/build/icon.icns "$tmpdir/Riot.iconset"
|
||||
elif [ -x "$(command -v png2icns)" ]; then
|
||||
# available on linux
|
||||
# png2icns is more finicky about its input than iconutil
|
||||
# 1. it doesn't support a 64x64 (aka 32x32@2x)
|
||||
# 2. it doesn't like duplicates (128x128@2x == 256x256)
|
||||
rm "$tmpdir/Riot.iconset/icon_128x128@2x.png"
|
||||
rm "$tmpdir/Riot.iconset/icon_256x256@2x.png"
|
||||
rm "$tmpdir/Riot.iconset/icon_16x16@2x.png"
|
||||
rm "$tmpdir/Riot.iconset/icon_32x32@2x.png"
|
||||
png2icns electron_app/build/icon.icns "$tmpdir"/Riot.iconset/*png
|
||||
else
|
||||
echo "WARNING: Unsupported platform. Skipping icns build"
|
||||
fi
|
||||
|
||||
cp "$tmpdir/36.png" "res/vector-icons/android-chrome-36x36.png"
|
||||
cp "$tmpdir/48.png" "res/vector-icons/android-chrome-48x48.png"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue