mirror of
https://github.com/appium/appium.git
synced 2026-01-06 02:09:59 -06:00
docs(appium): Enable the documentation to be translated via Crowdin (#20800)
This commit is contained in:
38
.github/workflows/crowdin-sync-docs-translations.yml
vendored
Normal file
38
.github/workflows/crowdin-sync-docs-translations.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
# Retrieves non-English translations from Crowdin and creates a PR with new changes
|
||||
|
||||
name: Sync Crowdin Docs Translations
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
# TODO: Uncomment the scheduler as soon as we have existing translations
|
||||
# imported to Crowdin
|
||||
# schedule:
|
||||
# - cron: 0 0 * * 0
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js LTS
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: 'npm'
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
- name: Crowdin Sync
|
||||
run: npm run crowdin-sync-docs-translations
|
||||
env:
|
||||
# appium-documentation
|
||||
CROWDIN_PROJECT_ID: ${{ vars.CROWDIN_DOCS_PROJECT_ID }}
|
||||
CROWDIN_TOKEN: ${{ secrets.CROWDIN_DOCS_TOKEN }}
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7.0.5
|
||||
with:
|
||||
token: ${{ github.token }}
|
||||
commit-message: 'docs(appium): Update documentation translations'
|
||||
title: 'docs(appium): Update documentation translations'
|
||||
branch: crowdin-sync-${{ github.run_id }}
|
||||
body: 'Automated Update of Documentation Translations: https://crowdin.com/project/appium-documentation'
|
||||
31
.github/workflows/crowdin-update-docs-resources.yml
vendored
Normal file
31
.github/workflows/crowdin-update-docs-resources.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
# Updates Crowdin with any changes in document files in English
|
||||
|
||||
name: Update Crowdin English Docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
paths:
|
||||
- 'packages/appium/docs/en/**.md'
|
||||
- 'packages/appium/docs/mkdocs-en.yml'
|
||||
- '.github/workflows/crowdin-update-docs-resources.yml' # this file
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js LTS
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: lts/*
|
||||
cache: 'npm'
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
- name: Crowdin Update
|
||||
run: npm run crowdin-update-docs
|
||||
env:
|
||||
# appium-documentation
|
||||
CROWDIN_PROJECT_ID: ${{ vars.CROWDIN_DOCS_PROJECT_ID }}
|
||||
CROWDIN_TOKEN: ${{ secrets.CROWDIN_DOCS_TOKEN }}
|
||||
@@ -38,6 +38,8 @@
|
||||
"clean:interactive": "git clean -d -x -i",
|
||||
"clean:force": "git clean -d -x -f",
|
||||
"clean:workspaces": "lerna run clean || true",
|
||||
"crowdin-sync-docs-translations": "node ./scripts/crowdin-sync-docs-translations.mjs",
|
||||
"crowdin-update-docs": "node ./scripts/crowdin-update-docs-resources.mjs",
|
||||
"dev": "run-s build:compile \"build:workspaces\" \"build:compile -- --watch\"",
|
||||
"dev:docs": "npm run -w packages/appium dev:docs",
|
||||
"docs": "npm run -w packages/appium build:docs",
|
||||
|
||||
@@ -121,3 +121,30 @@ npm run dev:docs
|
||||
```
|
||||
|
||||
You can then view the documentation at `http://127.0.0.1:8000/docs/en`.
|
||||
|
||||
## Translating Appium Documentation
|
||||
|
||||
The process of Appium documents localization into languages other than English is automated and is done via
|
||||
the [Crowdin Translations Management System](https://crowdin.com). Do not edit any translated documents
|
||||
directly in the GitHub Appium repository as they are going to be replaced with ones exported from Crowdin
|
||||
during an upcoming sync.
|
||||
|
||||
### Where To Start
|
||||
|
||||
If you would like to contribute to the translation of Appium documents into your language then simply join
|
||||
the translators group for the [Appium Documentation](https://crowdin.com/project/appium-documentation)
|
||||
Crowdin project, and start translating documents there. If you see that your language is missing from
|
||||
the list of available Crowdin languages then simply let us know by creating an
|
||||
[issue](https://github.com/appium/appium/issues).
|
||||
|
||||
### Source Language Updates
|
||||
|
||||
Changes in documents are synchronized to Crowdin automatically via the `Update Crowdin English Docs` GitHub action.
|
||||
This action is triggered automatically as soon as there are any changes under `packages/appium/docs/en/**.md`
|
||||
or `packages/appium/docs/mkdocs-en.yml`.
|
||||
|
||||
### Fetching Translated Documents
|
||||
|
||||
In order to fetch translated files from Crowdin to the GitHub repository it is necessary to trigger
|
||||
the `Sync Crowdin Docs Translations` action. This action should also automatically create a PR with
|
||||
corresponding translated resources included.
|
||||
|
||||
86
scripts/crowdin-common.mjs
Normal file
86
scripts/crowdin-common.mjs
Normal file
@@ -0,0 +1,86 @@
|
||||
import path from 'node:path';
|
||||
|
||||
import {logger, fs} from '@appium/support';
|
||||
import axios from 'axios';
|
||||
import _ from 'lodash';
|
||||
|
||||
export const log = logger.getLogger('CROWDIN');
|
||||
|
||||
// https://developer.crowdin.com/api/v2/
|
||||
const PROJECT_ID = process.env.CROWDIN_PROJECT_ID;
|
||||
const API_TOKEN = process.env.CROWDIN_TOKEN;
|
||||
if (!PROJECT_ID || !API_TOKEN) {
|
||||
throw new Error(`Both CROWDIN_PROJECT_ID and CROWDIN_TOKEN environment variables must be set`);
|
||||
}
|
||||
export const RESOURCES_ROOT = path.resolve('packages', 'appium', 'docs');
|
||||
export const ORIGINAL_LANGUAGE = 'en';
|
||||
export const DOCUMENTS_EXT = '.md';
|
||||
export const MKDOCS_YAML = (langName) => `mkdocs-${langName}.yml`;
|
||||
const USER_AGENT = 'Appium CI';
|
||||
const API_ROOT = 'https://api.crowdin.com/api/v2';
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} dir
|
||||
* @param {string} ext
|
||||
* @returns {Promise<string[]>}
|
||||
*/
|
||||
export async function walk(dir, ext) {
|
||||
const itemsInDir = await fs.readdir(dir);
|
||||
const result = [];
|
||||
for (const itemInDir of itemsInDir) {
|
||||
const fullPath = path.join(dir, itemInDir);
|
||||
let stats;
|
||||
try {
|
||||
stats = await fs.stat(fullPath);
|
||||
} catch (e) {
|
||||
continue;
|
||||
}
|
||||
if (stats.isDirectory()) {
|
||||
result.push(...(await walk(fullPath, ext)));
|
||||
} else if (itemInDir.endsWith(ext)) {
|
||||
result.push(fullPath);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} [suffix='']
|
||||
* @param {ApiRequestOptions} [opts={}]
|
||||
* @returns {any}
|
||||
*/
|
||||
export async function performApiRequest(suffix = '', opts = {}) {
|
||||
const {method = 'GET', payload, headers, params, isProjectSpecific = true} = opts;
|
||||
const url = isProjectSpecific
|
||||
? `${API_ROOT}/projects/${PROJECT_ID}${suffix}`
|
||||
: `${API_ROOT}${suffix}`;
|
||||
log.debug(`Sending ${method} request to ${url}`);
|
||||
if (_.isPlainObject(payload)) {
|
||||
log.debug(`Request payload: ${JSON.stringify(payload)}`);
|
||||
}
|
||||
return (
|
||||
await axios({
|
||||
method,
|
||||
headers: {
|
||||
Authorization: `Bearer ${API_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': USER_AGENT,
|
||||
...(headers || {}),
|
||||
},
|
||||
url,
|
||||
params,
|
||||
data: payload,
|
||||
})
|
||||
).data;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} ApiRequestOptions
|
||||
* @property {string} [method='GET']
|
||||
* @property {any} [payload]
|
||||
* @property {axios.AxiosRequestHeaders} headers
|
||||
* @property {Record<string, any>} [params]
|
||||
* @property {boolean} [isProjectSpecific=true]
|
||||
*/
|
||||
150
scripts/crowdin-sync-docs-translations.mjs
Normal file
150
scripts/crowdin-sync-docs-translations.mjs
Normal file
@@ -0,0 +1,150 @@
|
||||
import path from 'node:path';
|
||||
import {fs, net, tempDir, zip} from '@appium/support';
|
||||
import {waitForCondition} from 'asyncbox';
|
||||
import {
|
||||
log,
|
||||
walk,
|
||||
ORIGINAL_LANGUAGE,
|
||||
performApiRequest,
|
||||
RESOURCES_ROOT,
|
||||
DOCUMENTS_EXT,
|
||||
MKDOCS_YAML,
|
||||
} from './crowdin-common.mjs';
|
||||
|
||||
const BUILD_TIMEOUT_MS = 1000 * 60 * 10;
|
||||
const BUILD_STATUS = {
|
||||
finished: 'finished',
|
||||
created: 'created',
|
||||
inProgress: 'inProgress',
|
||||
canceled: 'canceled',
|
||||
failed: 'failed',
|
||||
};
|
||||
// Add new languages here whenever needed
|
||||
const CROWDIN_TO_FS_LANGUAGES_MAP = {
|
||||
ja: 'ja',
|
||||
'zh-CN': 'zh',
|
||||
// de,es-ES,fr,it,ja,pt-BR,uk
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<number>}
|
||||
*/
|
||||
async function buildTranslations() {
|
||||
log.info('Building project translations');
|
||||
const {data: buildData} = await performApiRequest('/translations/builds', {
|
||||
method: 'POST',
|
||||
});
|
||||
return buildData.id;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} buildId
|
||||
* @param {string} dstPath
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function downloadTranslations(buildId, dstPath) {
|
||||
log.info(`Waiting up to ${BUILD_TIMEOUT_MS / 1000}s for the build #${buildId} to finish`);
|
||||
await waitForCondition(
|
||||
async () => {
|
||||
const {data: buildData} = await performApiRequest(`/translations/builds/${buildId}`);
|
||||
switch (buildData.status) {
|
||||
case BUILD_STATUS.finished:
|
||||
return true;
|
||||
case BUILD_STATUS.inProgress:
|
||||
case BUILD_STATUS.created:
|
||||
return false;
|
||||
default:
|
||||
throw new Error(`The translations build got an unexpected status '${buildData.status}'`);
|
||||
}
|
||||
},
|
||||
{
|
||||
waitMs: BUILD_TIMEOUT_MS,
|
||||
intervalMs: 1000,
|
||||
},
|
||||
);
|
||||
const {data: downloadData} = await performApiRequest(`/translations/builds/${buildId}/download`);
|
||||
log.info(`Downloading translations to '${dstPath}'`);
|
||||
await net.downloadFile(downloadData.url, dstPath);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} srcDir
|
||||
* @param {string} dstDir
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function syncTranslatedDocuments(srcDir, dstDir) {
|
||||
const srcTranslatedDocs = await walk(srcDir, DOCUMENTS_EXT);
|
||||
if (srcTranslatedDocs.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
let count = 0;
|
||||
for (const srcPath of srcTranslatedDocs) {
|
||||
const relativeTranslatedDocPath = path.relative(srcDir, srcPath);
|
||||
const dstPath = path.join(dstDir, relativeTranslatedDocPath);
|
||||
log.info(`Synchronizing '${dstPath}' (${++count} of ${srcTranslatedDocs.length})`);
|
||||
await fs.mv(srcPath, dstPath, {mkdirp: true});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} srcDir
|
||||
* @param {string} dstDir
|
||||
* @param {string} dstLanguage
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function syncTranslatedConfig(srcDir, dstDir, dstLanguage) {
|
||||
const configPath = path.join(srcDir, MKDOCS_YAML(ORIGINAL_LANGUAGE));
|
||||
if (!await fs.exists(configPath)) {
|
||||
throw new Error(`Did not find the translated MkDocs config at '${configPath}'`);
|
||||
}
|
||||
|
||||
const dstPath = path.join(dstDir, MKDOCS_YAML(dstLanguage));
|
||||
log.info(`Synchronizing '${dstPath}'`);
|
||||
await fs.mv(configPath, dstPath);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const buildId = await buildTranslations();
|
||||
const zipPath = await tempDir.path({prefix: 'translations', suffix: '.zip'});
|
||||
try {
|
||||
await downloadTranslations(buildId, zipPath);
|
||||
const tmpRoot = await tempDir.openDir();
|
||||
try {
|
||||
await zip.extractAllTo(zipPath, tmpRoot);
|
||||
const srcLanguageNames = await fs.readdir(tmpRoot);
|
||||
log.info(`Available Crowdin languages: ${srcLanguageNames}`);
|
||||
log.info(`Supported languages map: ${JSON.stringify(CROWDIN_TO_FS_LANGUAGES_MAP)}`);
|
||||
let count = 0;
|
||||
for (const name of srcLanguageNames) {
|
||||
const currentPath = path.join(tmpRoot, name);
|
||||
if (!(await fs.stat(currentPath)).isDirectory() || name === ORIGINAL_LANGUAGE) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const dstLanguageName = CROWDIN_TO_FS_LANGUAGES_MAP[name];
|
||||
if (!dstLanguageName) {
|
||||
// If the target language is not present in the map we ignore it
|
||||
continue;
|
||||
}
|
||||
|
||||
await syncTranslatedDocuments(currentPath, path.join(RESOURCES_ROOT, dstLanguageName));
|
||||
await syncTranslatedConfig(currentPath, RESOURCES_ROOT, dstLanguageName);
|
||||
log.info(
|
||||
`Successfully updated resources for the '${dstLanguageName}' ` +
|
||||
`('${name}' in Crowdin) language (${++count} of ${Object.keys(CROWDIN_TO_FS_LANGUAGES_MAP).length})`
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
await fs.rimraf(tmpRoot);
|
||||
}
|
||||
} finally {
|
||||
await fs.rimraf(zipPath);
|
||||
}
|
||||
}
|
||||
|
||||
(async () => await main())();
|
||||
320
scripts/crowdin-update-docs-resources.mjs
Normal file
320
scripts/crowdin-update-docs-resources.mjs
Normal file
@@ -0,0 +1,320 @@
|
||||
import path from 'node:path';
|
||||
import crypto from 'node:crypto';
|
||||
import {
|
||||
log,
|
||||
walk,
|
||||
ORIGINAL_LANGUAGE,
|
||||
performApiRequest,
|
||||
RESOURCES_ROOT,
|
||||
DOCUMENTS_EXT,
|
||||
MKDOCS_YAML,
|
||||
} from './crowdin-common.mjs';
|
||||
import {fs} from '@appium/support';
|
||||
|
||||
const LANGUAGE_ROOT = path.resolve(RESOURCES_ROOT, ORIGINAL_LANGUAGE);
|
||||
const MAX_ITEMS_PER_PAGE = 300;
|
||||
const DOCUMENT_CONTENT_TYPE = 'text/markdown';
|
||||
const MKDOCS_CONTENT_TYPE = 'application/yaml';
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} str
|
||||
* @returns {string}
|
||||
*/
|
||||
function toHash(str) {
|
||||
return crypto.createHash('md5').update(str).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} fullPath
|
||||
* @returns {string}
|
||||
*/
|
||||
function toCrowdinPath(fullPath) {
|
||||
let result = `/${path.relative(LANGUAGE_ROOT, fullPath)}`;
|
||||
if (result.includes('..')) {
|
||||
result = `/${path.basename(fullPath)}`;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {string|null|undefined} [parentId]
|
||||
* @returns {Promise<number>}
|
||||
*/
|
||||
async function addDirectory(name, parentId) {
|
||||
const {data: directoryData} = await performApiRequest('/directories', {
|
||||
method: 'POST',
|
||||
payload: {
|
||||
name,
|
||||
directoryId: parentId,
|
||||
}
|
||||
});
|
||||
return directoryData.id;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {number} storageId
|
||||
* @param {string|null|undefined} [parentDirectoryId]
|
||||
* @returns {Promise<number>}
|
||||
*/
|
||||
async function addFile(name, storageId, parentDirectoryId) {
|
||||
const {data: fileData} = await performApiRequest('/files', {
|
||||
method: 'POST',
|
||||
payload: {
|
||||
name,
|
||||
storageId,
|
||||
directoryId: parentDirectoryId,
|
||||
}
|
||||
});
|
||||
return fileData.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<Record<string, any>[]>}
|
||||
*/
|
||||
async function listFiles() {
|
||||
const {data: filesData} = await performApiRequest('/files', {
|
||||
method: 'GET',
|
||||
params: {
|
||||
limit: MAX_ITEMS_PER_PAGE,
|
||||
}
|
||||
});
|
||||
return filesData.map(({data}) => data);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} fileId
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function deleteFile(fileId) {
|
||||
await performApiRequest(`/files/${fileId}`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} [parentDirectoryId]
|
||||
* @param {number} [recursion]
|
||||
* @return {Promise<Record<string, any>[]>}
|
||||
*/
|
||||
async function listDirectories(parentDirectoryId) {
|
||||
const {data: directoriesData} = await performApiRequest('/directories', {
|
||||
method: 'GET',
|
||||
params: {
|
||||
limit: MAX_ITEMS_PER_PAGE,
|
||||
directoryId: parentDirectoryId,
|
||||
recursion: parentDirectoryId ? 10 : undefined,
|
||||
},
|
||||
});
|
||||
return directoriesData.map(({data}) => data);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} name Should be properly url-encoded
|
||||
* @param {string} fullPath
|
||||
* @param {string} contentType Should be one of https://www.iana.org/assignments/media-types/media-types.xhtml
|
||||
* @returns {Promise<Record<string, any>>}
|
||||
*/
|
||||
async function addStorage(name, fullPath, contentType) {
|
||||
const {data: storageData} = await performApiRequest('/storages', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Crowdin-API-FileName': name,
|
||||
'Content-Type': contentType,
|
||||
},
|
||||
payload: fs.createReadStream(fullPath),
|
||||
isProjectSpecific: false,
|
||||
});
|
||||
return storageData;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string[]} matchedFiles
|
||||
* @returns {Promise<Record<string, string>>}
|
||||
*/
|
||||
async function uploadDocumentsToStorage(matchedFiles) {
|
||||
const resultMap = {};
|
||||
let count = 0;
|
||||
for (const matchedFilePath of matchedFiles) {
|
||||
const crowdinPath = toCrowdinPath(matchedFilePath);
|
||||
// Hashing is used to make sure we aways create the same storage for the same file path in Crowdin
|
||||
const storageName = toHash(crowdinPath);
|
||||
log.info(`Uploading '${crowdinPath}' to Crowdin storage (${++count} of ${matchedFiles.length})`);
|
||||
const storageData = await addStorage(storageName, matchedFilePath, DOCUMENT_CONTENT_TYPE);
|
||||
resultMap[matchedFilePath] = storageData.id;
|
||||
}
|
||||
return resultMap;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string[]} matchedFiles
|
||||
* @returns {Promise<Record<string, number>>}
|
||||
*/
|
||||
async function ensureDirectoryStructure(matchedFiles) {
|
||||
const uniqueFolderPaths = new Set();
|
||||
for (const matchedFile of matchedFiles) {
|
||||
const relativePath = path.relative(LANGUAGE_ROOT, path.dirname(matchedFile));
|
||||
if (relativePath) {
|
||||
uniqueFolderPaths.add(relativePath);
|
||||
}
|
||||
}
|
||||
if (uniqueFolderPaths.size === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const splitPaths = Array.from(uniqueFolderPaths)
|
||||
.map((p) => p.split(path.sep))
|
||||
.sort((a, b) => a.length - b.length);
|
||||
const result = {};
|
||||
const topDirectories = await listDirectories();
|
||||
for (const splitPath of splitPaths) {
|
||||
for (const level in splitPath) {
|
||||
const pathInCrowdin = `/${splitPath.slice(0, level + 1).join('/')}`;
|
||||
const parentPathInCrowdin = path.dirname(pathInCrowdin);
|
||||
const parentDirectoryId = level === 0 ? undefined : result[parentPathInCrowdin];
|
||||
const subDirectories = level === 0 ? topDirectories : await listDirectories(parentDirectoryId);
|
||||
const existingDirectoryData = subDirectories.find((data) => data.path === pathInCrowdin);
|
||||
if (existingDirectoryData) {
|
||||
log.info(`Crowdin directory '${pathInCrowdin}' already exists`);
|
||||
result[pathInCrowdin] = existingDirectoryData.id;
|
||||
} else {
|
||||
log.info(`Crowdin directory '${pathInCrowdin}' does not exist yet. Adding it`);
|
||||
result[pathInCrowdin] = await addDirectory(splitPath[level], parentDirectoryId);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Record<string, number>} storageMapping
|
||||
* @param {Record<string, number>} directoriesMapping
|
||||
* @param {Record<string, number>[]} existingFilesData
|
||||
* @returns {Promise<Record<string, number>>}
|
||||
*/
|
||||
async function ensureFileStructure(storageMapping, directoriesMapping, existingFilesData) {
|
||||
const result = {};
|
||||
let count = 0;
|
||||
for (const [fullPath, storageId] of Object.entries(storageMapping)) {
|
||||
const pathInCrowdin = toCrowdinPath(fullPath);
|
||||
log.info(`Synchronizing '${pathInCrowdin}' (${++count} of ${Object.keys(storageMapping).length})`);
|
||||
const fileData = existingFilesData.find((data) => data.path === pathInCrowdin);
|
||||
if (fileData) {
|
||||
result[fullPath] = fileData.id;
|
||||
} else {
|
||||
const parentFolderId = directoriesMapping[path.dirname(pathInCrowdin)];
|
||||
try {
|
||||
const fileId = await addFile(encodeURIComponent(path.basename(pathInCrowdin)), storageId, parentFolderId);
|
||||
result[fullPath] = fileId;
|
||||
} catch (e) {
|
||||
log.info(`Cannot add '${pathInCrowdin}'. Skipping it`);
|
||||
log.warn(e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function cleanupObsoleteDocuments() {
|
||||
const [existingFilesData, matchedFiles] = await Promise.all([
|
||||
listFiles(),
|
||||
walk(LANGUAGE_ROOT, DOCUMENTS_EXT)
|
||||
]);
|
||||
const matchedFilePaths = new Set(matchedFiles.map(toCrowdinPath));
|
||||
let count = 0;
|
||||
for (const existingFileData of existingFilesData) {
|
||||
if (matchedFilePaths.has(existingFileData.path)
|
||||
|| !existingFileData.name.endsWith(DOCUMENTS_EXT)) {
|
||||
continue;
|
||||
}
|
||||
log.info(`Deleting the obsolete document '${existingFileData.path}'`);
|
||||
await deleteFile(existingFileData.id);
|
||||
++count;
|
||||
}
|
||||
if (count > 0) {
|
||||
log.info(`Deleted ${count} obsolete documents`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Record<string, number>} filesMapping
|
||||
* @param {Record<string, number>} storageMapping
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function updateFiles(filesMapping, storageMapping) {
|
||||
let count = 0;
|
||||
for (const [fullPath, fileId] of Object.entries(filesMapping)) {
|
||||
log.info(`Updating '${toCrowdinPath(fullPath)}' (${++count} of ${Object.keys(filesMapping).length})`);
|
||||
await performApiRequest(`/files/${fileId}`, {
|
||||
method: 'PUT',
|
||||
payload: {
|
||||
storageId: storageMapping[fullPath],
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function updateDocuments() {
|
||||
const matchedFiles = await walk(LANGUAGE_ROOT, DOCUMENTS_EXT);
|
||||
if (matchedFiles.length === 0) {
|
||||
throw new Error(`Did not find any files matching the '*${DOCUMENTS_EXT}' extension in '${LANGUAGE_ROOT}'`);
|
||||
}
|
||||
log.info(`Matched ${matchedFiles.length} files from '${LANGUAGE_ROOT}' for upload...`);
|
||||
|
||||
const [storageMapping, directoriesMapping] = await Promise.all([
|
||||
uploadDocumentsToStorage(matchedFiles),
|
||||
ensureDirectoryStructure(matchedFiles),
|
||||
]);
|
||||
const existingFilesData = await listFiles();
|
||||
const filesMapping = await ensureFileStructure(storageMapping, directoriesMapping, existingFilesData);
|
||||
await updateFiles(filesMapping, storageMapping);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function updateMkDocsConfig() {
|
||||
const configFileName = MKDOCS_YAML(ORIGINAL_LANGUAGE);
|
||||
const matchedFilePath = path.join(RESOURCES_ROOT, configFileName);
|
||||
if (!await fs.exists(matchedFilePath)) {
|
||||
throw new Error(`Did not find the MkDocs config at '${matchedFilePath}'`);
|
||||
}
|
||||
const storageData = await addStorage(encodeURIComponent(configFileName), matchedFilePath, MKDOCS_CONTENT_TYPE);
|
||||
const storageMapping = {[matchedFilePath]: storageData.id};
|
||||
const existingFilesData = await listFiles();
|
||||
const filesMapping = await ensureFileStructure(storageMapping, {}, existingFilesData);
|
||||
await updateFiles(filesMapping, storageMapping);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
log.info('Updating documents');
|
||||
await updateDocuments();
|
||||
await cleanupObsoleteDocuments();
|
||||
log.info('Updating MkDocs config');
|
||||
await updateMkDocsConfig();
|
||||
|
||||
log.info('All done');
|
||||
}
|
||||
|
||||
(async () => await main())();
|
||||
Reference in New Issue
Block a user