Files
requestarr/src/i18n/extractMessages.ts
Gauthier 989af67c0a refactor: update Next.js, React.js and Node.js (#815)
* refactor: update Next.js and React.js

* refactor: update Next.js images

* refactor: update ESLint rules and fix warnings/errors

* fix: remove old intl polyfill

* fix: add proper size to next/image components

* fix: adjust full-size for next/image components

* fix: temporary allow all domains for image optimization

* build: fixes an issue where dev env could lead to javascript heap out of memory

* fix: resolve webpack cache issue with country-flag-icons

* refactor: switch compiler from Babel to SWC

* fix: resize logo in sidebar

* fix: break word on long path to avoid text overflow

* chore: added sharp for production image optimisation

* fix: change extract script for i18n to a custom script

* fix: resolve GitHub CodeQL alert

* chore: temporarily remove builds for ARMv7

* fix: resize avatar images

* refactor: update Node.js to v20

* fix: resolve various UI issues

* build: migrate yarn to pnpm and restrict engine to node@^20.0.0

* ci: specify the pnpm version to use in workflow actions

* ci: fix typo in pnpm action-setup for cypress workflow

* test(cypress): use pnpm instead of yarn

* style: ran prettier on pnpm-lock

* ci(cypress): setup nodejs v20 in cypress workflow

* ci: pnpm cache to reduce install time

* ci: use sh shell to get pnpm store directory

* build(dockerfile): migrate to pnpm from yarn in docker builds

* build(dockerfile): copy the proper pnpm lockfile

* build: install pnpm for all platforms

* build(dockerfile): remove unnecessary `&&` on apk installation steps

* build: migrate pnpm 8 to 9

* build(dockerfile): add node-gyp back in

* build(dockerfile): install node-gyp through npm

* build(dockerfile): ignore scripts to not run husky install when devdependencies are pruned

* build: migrate to pnpm from yarn

* chore: remove a section that is no longer relevant

---------

Co-authored-by: fallenbagel <98979876+Fallenbagel@users.noreply.github.com>
2024-06-23 23:43:54 +02:00

114 lines
3.5 KiB
TypeScript

import { promises as fs } from 'fs';
import { join } from 'path';
// get all file content recursively
async function getFiles(dir: string): Promise<string[]> {
const dirents = await fs.readdir(dir, { withFileTypes: true });
const files = await Promise.all(
dirents.map((dirent) => {
const res = join(dir, dirent.name);
return dirent.isDirectory() ? getFiles(res) : res;
})
);
return Array.prototype.concat(...files);
}
// extract the i18n messages from the file
async function extractMessages(
filePath: string
): Promise<{ namespace: string; messages: Record<string, string> } | null> {
const content = await fs.readFile(filePath, 'utf8');
const regex = /defineMessages\(\n?\s*'(.+?)',\n?\s*\{([\s\S]+?)\}\n?\);/;
const match = content.match(regex);
if (match) {
const [, namespace, messages] = match;
try {
const formattedMessages = messages
.trim()
.replace(/^\s*(['"])?([a-zA-Z0-9_-]+)(['"])?:/gm, '"$2":')
.replace(
/'.*'/g,
(match) =>
`"${match
.match(/'(.*)'/)?.[1]
.replace(/\\/g, '\\\\')
.replace(/"/g, '\\"')}"`
)
.replace(/,$/, '');
const messagesJson = JSON.parse(`{${formattedMessages}}`);
return { namespace: namespace.trim(), messages: messagesJson };
} catch (e) {
return null;
}
}
return null;
}
async function processMessages(dir: string): Promise<string> {
// get messages from all files and sort them by namespace
const files = await getFiles(dir);
const extractedMessagesGroups = await Promise.all(files.map(extractMessages));
// group messages by namespace
const messagesByNamespace: {
namespace: string;
messages: Record<string, string>;
}[] = [];
const namespaces = [
...new Set(extractedMessagesGroups.map((msg) => msg?.namespace)),
];
for (const namespace of namespaces) {
if (!namespace) continue;
const filteredMessagesGroups = extractedMessagesGroups
.filter((msg) => msg?.namespace === namespace)
.map((msg) => msg?.messages);
for (const extractedMessages of filteredMessagesGroups) {
if (!extractedMessages) continue;
const previousNamespaceMessages = messagesByNamespace.find(
(msg) => msg.namespace === namespace
);
if (previousNamespaceMessages) {
Object.assign(previousNamespaceMessages.messages, extractedMessages);
} else {
messagesByNamespace.push({ namespace, messages: extractedMessages });
}
}
}
messagesByNamespace.sort((a, b) => {
if (!a || !b) return 0;
if (
a.namespace.startsWith(b.namespace) ||
b.namespace.startsWith(a.namespace)
) {
const aLevel = a.namespace.match(/\./g)?.length || 0;
const bLevel = b.namespace.match(/\./g)?.length || 0;
return bLevel - aLevel;
}
return a.namespace.localeCompare(b.namespace);
});
// add every messages from every namespace to an object
const result: Record<string, string> = {};
for (const extractedMessages of messagesByNamespace) {
const { namespace, messages } = extractedMessages;
for (const key of Object.keys(messages).sort()) {
result[`${namespace}.${key}`] = messages[key];
}
}
return JSON.stringify(result, null, ' ') + '\n';
}
async function saveMessages() {
const directoryPath = './src/';
const resultPath = './src/i18n/locale/en.json';
const result = await processMessages(directoryPath);
await fs.writeFile(resultPath, result);
}
saveMessages();
export {};