refactor: migrate NPM to Yarn and add TypeScript and Prettier

pull/4042/head
Menci 2022-07-10 16:35:32 +08:00 committed by Wenzhuo Liu
parent 0bcb7c6519
commit 1afc0eb0f4
27 changed files with 7791 additions and 23115 deletions

4
.bashrc vendored
View File

@ -87,8 +87,8 @@ alias wiki-upd='export LC_ALL=C.UTF-8 && cd /OI-wiki && git pull origin master'
alias wiki-theme='export LC_ALL=C.UTF-8 && cd /OI-wiki && chmod +x ./scripts/build.sh && ./scripts/build.sh'
alias wiki-bld='export LC_ALL=C.UTF-8 && cd /OI-wiki && mkdocs build -v'
alias wiki-svr='export LC_ALL=C.UTF-8 && cd /OI-wiki && mkdocs serve -v'
alias wiki-bld-math='export LC_ALL=C.UTF-8 && cd /OI-wiki && mkdocs build -v && find ./site -type f -name "*.html" -exec node --max_old_space_size=512 ./scripts/render_math.js {} \;'
alias wiki-o='export LC_ALL=C.UTF-8 && cd /OI-wiki && remark ./docs -o --silent'
alias wiki-bld-math='export LC_ALL=C.UTF-8 && cd /OI-wiki && mkdocs build -v && env NODE_OPTIONS="--max_old_space_size=3072" yarn ts-node-esm ./scripts/render_math.ts'
alias wiki-o='export LC_ALL=C.UTF-8 && cd /OI-wiki && yarn remark ./docs -o --silent'
# Alias definitions.
# You may want to put all your additions into a separate file like

5
.github/CODEOWNERS vendored
View File

@ -6,6 +6,7 @@
.github/pull_request_template.md @Ir1d
.github/workflows/build-pdf.yml @Enter-tainer
.github/workflows/build.yml @billchenchina
.github/workflows/build-authors-cache.yml @Menci
.github/workflows/celebration.yml @ouuan
.github/workflows/pr-stale.yml @CoelacanthusHex
.github/ISSUE_TEMPLATE/ @Ir1d
@ -14,12 +15,14 @@
Dockerfile @CoelacanthusHex
docs/intro/docker-deploy.md @CoelacanthusHex
scripts/ @Ir1d @billchenchina
.prettierrc @Menci
.clang-format @Ir1d
.remarkignore @Enter-tainer
.remarkrc @Ir1d @Enter-tainer
CODE_OF_CONDUCT.md @Ir1d
README.md @Ir1d
gulpfile.js @Ir1d @Enter-tainer
gulpfile.cjs @Ir1d @Enter-tainer
package.json @Ir1d @Enter-tainer
requirements.txt @Ir1d
runtime.txt @Irld
tsconfig.json @Menci

View File

@ -22,17 +22,17 @@ jobs:
id: cache
with:
path: node_modules
key: ${{ runner.os }}-node-${{ hashFiles('package-lock.json') }}
key: ${{ runner.os }}-node-${{ hashFiles('yarn.lock') }}
restore-keys: ${{ runner.os }}-node-
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: npm install
run: yarn --frozen-lockfile --production
- name: Fetch Authors
run: |
rm -rf authors.json
# Fetch authors list with 10 concurrency requests
node scripts/fetch-authors.js 10
yarn ts-node-esm scripts/update-authors-cache.ts 10
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Commit Authors Cache

View File

@ -31,23 +31,25 @@ jobs:
id: cache-node
with:
path: node_modules
key: ${{ runner.os }}-node-${{ hashFiles('package-lock.json') }}
key: ${{ runner.os }}-node-${{ hashFiles('yarn.lock') }}
restore-keys: ${{ runner.os }}-node-
- name: Install Node.js dependencies
if: steps.cache-node.outputs.cache-hit != 'true'
run: npm install
run: yarn --frozen-lockfile --production
- name: Page Build
run: |
chmod +x ./scripts/build.sh && ./scripts/build.sh
mkdocs build -v
- name: Render git history info
run: ./scripts/render_git_history_info.sh
- name: Render commits info
run: ./scripts/render-commits-info.sh
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Render math
run: node --max_old_space_size=3072 ./scripts/render_math.js
run: yarn ts-node-esm ./scripts/render_math.ts
env:
NODE_OPTIONS: --max_old_space_size=3072
- name: Gulp minify
run: npx gulp minify
run: yarn gulp -f gulpfile.cjs minify
- name: Generate redirects
run: python scripts/gen_redirect.py
- name: Deploy to gh-pages

38
.github/workflows/check-scripts.yml vendored Normal file
View File

@ -0,0 +1,38 @@
name: Check Scripts
on:
pull_request:
branches:
- master
paths:
- scripts/**
- package.json
- yarn.lock
- .prettierrc
- tsconfig.json
workflow_dispatch:
jobs:
build:
name: Check Scripts
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '18.x'
check-latest: true
- name: Cache Node.js dependencies
uses: actions/cache@v2
id: cache-node
with:
path: node_modules
key: ${{ runner.os }}-node-${{ hashFiles('yarn.lock') }}
restore-keys: ${{ runner.os }}-node-
- name: Install Node.js dependencies
if: steps.cache-node.outputs.cache-hit != 'true'
run: yarn --frozen-lockfile
- name: Check Prettier Format
run: yarn scripts:format:check
- name: Check TypeScript Compile
run: yarn tsc

6
.prettierrc Normal file
View File

@ -0,0 +1,6 @@
{
"endOfLine": "lf",
"printWidth": 120,
"arrowParens": "avoid",
"trailingComma": "none"
}

View File

@ -67,7 +67,7 @@ members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
This Code of Conduct is adapted from the[Contributor Covenant][homepage], version 1.4,
available at <https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>
[homepage]: https://www.contributor-covenant.org

4
Dockerfile vendored
View File

@ -6,14 +6,14 @@ WORKDIR /
RUN apt-get update \
&& apt-get install -y git wget curl python3 python3-pip gcc g++ make \
&& curl https://bootstrap.pypa.io/get-pip.py | python3 \
&& curl -sL https://deb.nodesource.com/setup_10.x | bash - \
&& curl -sL https://deb.nodesource.com/setup_18.x | bash - \
&& apt-get install -y nodejs
# If you can't connect to GitHub, set WIKI_REPO to any mirror repo.
RUN git clone ${WIKI_REPO:-https://github.com/OI-wiki/OI-wiki.git} --depth=1 \
&& cd OI-wiki \
&& pip install -U -r requirements.txt \
&& npm install
&& yarn --frozen-lockfile
ADD .bashrc /root/

22731
package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

21
package.json vendored
View File

@ -3,11 +3,14 @@
"version": "1.0.0",
"description": "wiki for OI / ACM-ICPC",
"main": "index.js",
"type": "module",
"directories": {
"doc": "docs"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
"scripts:format": "prettier --write \"scripts/**/*.ts\"",
"scripts:format:check": "prettier --check \"scripts/**/*.ts\"",
"docs:format:remark": "remark . -o"
},
"repository": {
"type": "git",
@ -25,8 +28,7 @@
},
"homepage": "https://github.com/OI-wiki/OI-wiki#readme",
"dependencies": {
"@types/cheerio": "^0.22.31",
"@types/klaw": "^3.0.3",
"@swc/core": "^1.2.210",
"bluebird": "^3.7.2",
"chalk": "^4.1.0",
"cheerio": "^1.0.0-rc.12",
@ -41,13 +43,22 @@
"octokit": "^1.8.1",
"remark-clang-format": "^1.9.3",
"remark-cli": "^8.0.0",
"remark-copywriting-correct": "^0.5.0",
"remark-copywriting-correct": "0.6.0",
"remark-details": "^3.0.0",
"remark-footnotes": "^1.0.0",
"remark-lint-final-newline": "^1.0.2",
"remark-lint-no-tabs": "^1.0.2",
"remark-math": "^1.0.5",
"remark-math-space": "^2.1",
"remark-preset-lint-markdown-style-guide": "^4.0.0"
"remark-preset-lint-markdown-style-guide": "^4.0.0",
"ts-node": "^10.8.2"
},
"devDependencies": {
"@types/bluebird": "^3.5.36",
"@types/cheerio": "^0.22.31",
"@types/hexo-fs": "^0.2.10",
"@types/klaw": "^3.0.3",
"prettier": "^2.7.1",
"typescript": "^4.7.4"
}
}

6
scripts/authors-cache.ts Normal file
View File

@ -0,0 +1,6 @@
export type AuthorUserMap = Record<string, { name: string; githubUsername: string }>;
export interface AuthorsCache {
latestCommitTime: string;
userMap: AuthorUserMap;
}

View File

@ -1,88 +0,0 @@
const fs = require("fs");
const child_process = require("child_process");
const { Octokit } = require("octokit");
const AUTHORS_FILE = "authors.json";
// Increase this value carefully since it should cause API token limit exceeded
const fetchConcurrency = Number(process.argv[2]) || 1;
// Get current GitHub repo path
const repo = child_process.execSync("git remote get-url origin | perl -ne '/([^:\\/]+\\/[^:\\/]+?)(?:\\.git)?$/s and print $1'", { stdio: 'pipe' }).toString().trim()
/**
* @return {Promise<{
* latestCommitTime: string;
* userMap: Record<string, { name: string; githubUsername: string; }>;
* }>} map of email to { name, githubUsername }
*/
async function fetchUserMap(since) {
console.log(`Fetching commits newer than: ${since || "(fetch all)"}`);
const octokit = new Octokit({
auth: process.env.GITHUB_TOKEN
});
/**
* @type {Record<string, { name: string; githubUsername: string; }>}
*/
const result = {};
let latestCommitTime = 0;
for (let i = 1; ; i += fetchConcurrency) {
const responses = await Promise.all(
Array(fetchConcurrency).fill().map((_, j) => i + j).map(page =>
octokit.request(`GET /repos/${repo}/commits`, {
per_page: 100,
page,
...since ? { since } : {}
})
)
);
const data = responses.flatMap(response => response.data);
if (data.length === 0) break;
for (const item of data) {
const commitTime = +new Date(item.commit.committer.date);
if (latestCommitTime < commitTime) {
latestCommitTime = commitTime;
}
const email = item.commit.author.email.toLowerCase();
const name = item.commit.author.name;
if (name.includes("[bot]")) continue;
if (!(email in result)) result[email] = {};
result[email].name = name;
if (item.author && item.author.login)
result[email].githubUsername = item.author.login;
}
}
return {
latestCommitTime: new Date(latestCommitTime).toISOString(),
userMap: result
};
}
(async () => {
let lastLastestCommitTime = "";
let oldUserMap = {};
try {
const oldData = JSON.parse(fs.readFileSync(AUTHORS_FILE, "utf-8"));
lastLastestCommitTime = !Number.isNaN(+new Date(oldData.latestCommitTime)) ? oldData.latestCommitTime : "";
if (lastLastestCommitTime)
oldUserMap = Object.prototype.toString.call(oldData.userMap) === "[object Object]" ? oldData.userMap : {};
} catch {}
// Fetch data from newer commits only
const result = await fetchUserMap(lastLastestCommitTime);
// Merge old data
result.userMap = { ...oldUserMap, ...result.userMap };
// Sort by emails
result.userMap = Object.fromEntries(
Object.keys(result.userMap).sort().map(key => [key, result.userMap[key]])
);
fs.writeFileSync(AUTHORS_FILE, JSON.stringify(result, null, 2));
})();

View File

@ -1,156 +0,0 @@
const klaw = require("klaw");
const util = require("util");
const fs = require("fs");
const child_process = require("child_process");
const cheerio = require("cheerio");
const GITHUB_REPO = "OI-wiki/OI-wiki";
const AUTHORS_FILE = "authors.json";
const AUTHORS_EXCLUDED = [
"24OI-Bot",
"OI-wiki"
];
const execFileAsync = util.promisify(child_process.execFile);
/**
* @type {Record<string, { name: string; githubUsername: string; }>}
*/
const authorsMap = JSON.parse(fs.readFileSync(AUTHORS_FILE, "utf-8")).userMap;
// Fetch ALL git histories
child_process.execSync("(git rev-parse --is-shallow-repository | grep false >/dev/null) || git fetch --unshallow", { stdio: "inherit" });
klaw(process.argv[2]).on("data", item => {
if (item.stats.isFile() && item.path.toLowerCase().endsWith(".html"))
processFile(item.path);
});
/**
* @param {string} sourceFilePath
*/
async function readCommitsLog(sourceFilePath) {
const SEPARATOR = "\001";
const { stdout: commitsLog } = await execFileAsync(
"git", ["log", `--pretty=format:%cD${SEPARATOR}%aE`, `docs${sourceFilePath}`]
);
return commitsLog
.trim()
.split("\n")
.map(line => line.split(SEPARATOR))
.map(([commitDate, authorEmail]) => ({ commitDate, authorEmail }));
}
/**
* @param {string} sourceFilePath
*/
async function readCoAuthorLog(sourceFilePath) {
const { stdout: coAuthorLog } = await execFileAsync(
"bash", ["-c", `git log --pretty=format:%B "$FILENAME" | sed -nE 's/^Co-Authored-By: .+?<(.+)>/\\1/pi'`],
{
env: {
...process.env,
FILENAME: `docs${sourceFilePath}`
}
}
);
return coAuthorLog
.trim()
.split("\n")
.map(s => s.trim());
}
/**
* @param {string} htmlFilePath
*/
async function processFile(htmlFilePath) {
const $ = cheerio.load(await fs.promises.readFile(htmlFilePath, "utf-8"));
$("html").attr("lang", "zh-Hans");
// The path of .md file relative to /docs, starting with a leading "/"
const sourceFilePath = ($(".page_edit_url").attr("href") || "").split("?ref=")[1];
if (sourceFilePath) {
// Set link to git history
$(".edit_history").attr("href", `https://github.com/${GITHUB_REPO}/commits/master/docs${sourceFilePath}`);
const [commitsLog, coAuthorLog] = await Promise.all([
readCommitsLog(sourceFilePath),
readCoAuthorLog(sourceFilePath)
]);
// "本页面最近更新"
const latestDate = new Date(
commitsLog
.map(l => +new Date(l.commitDate))
.reduce(
(latest, current) => Math.max(latest, current)
)
);
$(".facts_modified").html(
latestDate.toLocaleDateString("zh-CN", { timeZone: "Asia/Shanghai", hour12: false }) + " " +
latestDate.toLocaleTimeString("zh-CN", { timeZone: "Asia/Shanghai", hour12: false })
);
// "本页面贡献者"
const authors = Object
.entries(
// Commit count by author
[
// From markdown front-matter
...$(".page_contributors")
.text()
.trim()
.split(",")
.map(username => `${username.trim()}\ngithub`),
// From git history
...[
...coAuthorLog,
...commitsLog.map(l => l.authorEmail)
]
.map(email => email.toLowerCase())
.filter(email => email in authorsMap)
.map(email => (
authorsMap[email].githubUsername
? `${authorsMap[email].githubUsername}\ngithub` // GitHub username
: `${authorsMap[email].name}\ngit\n${email}` // Git name (when email not linked with GitHub)
))
].reduce((count, author) => {
if (AUTHORS_EXCLUDED.some(
excluded => `${excluded.toLowerCase()}\ngithub` === author.toLowerCase()
))
return count;
count[author] = (count[author] || 0) + 1;
return count;
}, {})
)
.sort(([author1, count1], [author2, count2]) => {
// Sort DESC by commit count
if (count1 !== count2)
return count2 - count1;
else
return author1.toLowerCase() < author2.toLowerCase() ? -1 : 1;
})
.map(([author]) => author);
$(".page_contributors").html(
authors.map(author => {
const [name, type, email] = author.split("\n");
return type === "github"
? `<a href="https://github.com/${name}" target="_blank">${name}</a>`
: `<a href="mailto:${email}" target="_blank">${name}</a>`
}).join(", ")
);
} else {
// Pages without source
$(".edit_history").attr("href", `https://github.com/${GITHUB_REPO}/commits/master`);
$(".facts_modified").html("无更新");
$(".page_contributors").html("(自动生成)");
$(".page_edit_url").attr("href", "#");
}
console.log(`Processed: ${htmlFilePath} (${sourceFilePath || "no source file"})`);
await fs.promises.writeFile(htmlFilePath, $.html());
}

View File

@ -16,7 +16,7 @@ git rev-parse --short HEAD | xargs -I % sed -i "s/githash: ''/githash: '%'/g" mk
mkdocs build -v
./scripts/render_git_history_info.sh
./scripts/render-commits-info.sh
# find ./site -type f -name '*.html' -exec node --max_old_space_size=512 ./scripts/render_math.js {} \;

11
scripts/render-commits-info.sh vendored Executable file
View File

@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -e
REPO="OI-wiki/OI-wiki"
DIRNAME="$(dirname -- "${BASH_SOURCE[0]}")"
wget "https://raw.githubusercontent.com/$REPO/authors-cache/authors.json" -O "authors.json"
yarn ts-node-esm "$DIRNAME/update-authors-cache.ts" # Update authors cache (incrementally)
yarn ts-node-esm "$DIRNAME/render-commits-info.ts" site

View File

@ -0,0 +1,142 @@
import util from "util";
import fs from "fs";
import child_process from "child_process";
import klaw from "klaw";
import cheerio from "cheerio";
import type { AuthorsCache } from "./authors-cache.js";
const GITHUB_REPO = "OI-wiki/OI-wiki";
const AUTHORS_FILE = "authors.json";
const AUTHORS_EXCLUDED = ["24OI-Bot", "OI-wiki"];
const execFileAsync = util.promisify(child_process.execFile);
const authorsMap = (JSON.parse(fs.readFileSync(AUTHORS_FILE, "utf-8")) as AuthorsCache).userMap;
// Fetch ALL git histories
child_process.execSync("(git rev-parse --is-shallow-repository | grep false >/dev/null) || git fetch --unshallow", {
stdio: "inherit"
});
klaw(process.argv[2]).on("data", item => {
if (item.stats.isFile() && item.path.toLowerCase().endsWith(".html")) processFile(item.path);
});
/**
* @param {string} sourceFilePath
*/
async function readCommitsLog(sourceFilePath: string) {
const SEPARATOR = "\x01";
const { stdout: commitsLog } = await execFileAsync("git", [
"log",
`--pretty=format:%cD${SEPARATOR}%aE`,
`docs${sourceFilePath}`
]);
return commitsLog
.trim()
.split("\n")
.map(line => line.split(SEPARATOR))
.map(([commitDate, authorEmail]) => ({ commitDate, authorEmail }));
}
async function readCoAuthorLog(sourceFilePath: string) {
const { stdout: coAuthorLog } = await execFileAsync(
"bash",
["-c", `git log --pretty=format:%B "$FILENAME" | sed -nE 's/^Co-Authored-By: .+?<(.+)>/\\1/pi'`],
{
env: {
...process.env,
FILENAME: `docs${sourceFilePath}`
}
}
);
return coAuthorLog
.trim()
.split("\n")
.map(s => s.trim());
}
async function processFile(htmlFilePath: string) {
const $ = cheerio.load(await fs.promises.readFile(htmlFilePath, "utf-8"));
$("html").attr("lang", "zh-Hans");
// The path of .md file relative to /docs, starting with a leading "/"
const sourceFilePath = ($(".page_edit_url").attr("href") || "").split("?ref=")[1];
if (sourceFilePath) {
// Set link to git history
$(".edit_history").attr("href", `https://github.com/${GITHUB_REPO}/commits/master/docs${sourceFilePath}`);
const [commitsLog, coAuthorLog] = await Promise.all([
readCommitsLog(sourceFilePath),
readCoAuthorLog(sourceFilePath)
]);
// "本页面最近更新"
const latestDate = new Date(
commitsLog.map(l => +new Date(l.commitDate)).reduce((latest, current) => Math.max(latest, current))
);
$(".facts_modified").html(
latestDate.toLocaleDateString("zh-CN", { timeZone: "Asia/Shanghai", hour12: false }) +
" " +
latestDate.toLocaleTimeString("zh-CN", { timeZone: "Asia/Shanghai", hour12: false })
);
// "本页面贡献者"
const authors = Object.entries(
// Commit count by author
[
// From markdown front-matter
...$(".page_contributors")
.text()
.trim()
.split(",")
.map(username => `${username.trim()}\ngithub`),
// From git history
...[...coAuthorLog, ...commitsLog.map(l => l.authorEmail)]
.map(email => email.toLowerCase())
.filter(email => email in authorsMap)
.map(
email =>
authorsMap[email].githubUsername
? `${authorsMap[email].githubUsername}\ngithub` // GitHub username
: `${authorsMap[email].name}\ngit\n${email}` // Git name (when email not linked with GitHub)
)
].reduce<Record<string, number>>((count, author) => {
if (AUTHORS_EXCLUDED.some(excluded => `${excluded.toLowerCase()}\ngithub` === author.toLowerCase()))
return count;
count[author] = (count[author] || 0) + 1;
return count;
}, {})
)
.sort(([author1, count1], [author2, count2]) => {
// Sort DESC by commit count
if (count1 !== count2) return count2 - count1;
else return author1.toLowerCase() < author2.toLowerCase() ? -1 : 1;
})
.map(([author]) => author);
$(".page_contributors").html(
authors
.map(author => {
const [name, type, email] = author.split("\n");
return type === "github"
? `<a href="https://github.com/${name}" target="_blank">${name}</a>`
: `<a href="mailto:${email}" target="_blank">${name}</a>`;
})
.join(", ")
);
} else {
// Pages without source
$(".edit_history").attr("href", `https://github.com/${GITHUB_REPO}/commits/master`);
$(".facts_modified").html("无更新");
$(".page_contributors").html("(自动生成)");
$(".page_edit_url").attr("href", "#");
}
console.log(`Processed: ${htmlFilePath} (${sourceFilePath || "no source file"})`);
await fs.promises.writeFile(htmlFilePath, $.html());
}

View File

@ -1,6 +0,0 @@
#!/bin/bash -e
wget https://raw.githubusercontent.com/OI-wiki/OI-wiki/authors-cache/authors.json
node scripts/fetch-authors.js # Update authors cache (incrementally)
node scripts/html-postprocess.js site

View File

@ -1,28 +0,0 @@
const Promise = require('bluebird');
const { join, dirname } = require('path');
const { green } = require('chalk');
const { listDir } = require('hexo-fs');
const { cpus } = require('os');
const WorkerPool = require('./worker-pool');
const distDir = join(dirname(__dirname) + '/site');
const workerPath = join(__dirname + '/render_math_worker.js');
// Maxmize CPU performance
const cpuNums = cpus().length;
console.log(`${green('INFO')} ${cpuNums} CPU Threads detected, using ${cpuNums} threads`);
const pool = new WorkerPool(workerPath, cpuNums);
const START_TIME = +new Date();
Promise.all(listDir(distDir).map(async item => {
if (item.endsWith('.html')) {
const filename = join(distDir, item);
await pool.run(filename);
}
})).then(() => {
pool.destroy();
const END_TIME = +new Date();
console.log(`${green('INFO')} MathJax rendered finished in ${(END_TIME - START_TIME) / 1000}s.`);
})

36
scripts/render_math.ts Normal file
View File

@ -0,0 +1,36 @@
import bluebird from "bluebird";
import { join, dirname } from "path";
import chalk from "chalk";
import hexoFs from "hexo-fs";
import { cpus } from "os";
import WorkerPool from "./worker-pool.js";
import url from "url";
const { all } = bluebird;
const { green } = chalk;
const { listDir } = hexoFs;
const __dirname = dirname(url.fileURLToPath(import.meta.url));
const distDir = join(process.cwd() + "/site");
const workerPath = join(__dirname + "/render_math_worker.ts");
// Maxmize CPU performance
const cpuNums = cpus().length;
console.log(`${green("INFO")} ${cpuNums} CPU Threads detected, using ${cpuNums} threads`);
const pool = new WorkerPool(workerPath, cpuNums);
const START_TIME = +new Date();
all(
listDir(distDir).map(async item => {
if (item.endsWith(".html")) {
const filename = join(distDir, item);
await pool.run(filename);
}
})
).then(() => {
pool.destroy();
const END_TIME = +new Date();
console.log(`${green("INFO")} MathJax rendered finished in ${(END_TIME - START_TIME) / 1000}s.`);
});

View File

@ -1,16 +1,19 @@
const { isMainThread, parentPort } = require('worker_threads');
import { isMainThread, parentPort } from "worker_threads";
if (isMainThread) {
throw new Error('Its not a worker');
throw new Error("Its not a worker");
}
import Promise from "bluebird";
import hexoFs from "hexo-fs";
import chalk from "chalk";
const Promise = require('bluebird');
const { readFile, writeFile } = require('hexo-fs');
const { green, red, yellow, bgRed } = require('chalk');
import mathjaxNodePage from "mathjax-node-page";
import { promisify } from "util";
const { mjpage } = require('mathjax-node-page');
const { promisify } = require('util');
const { readFile, writeFile } = hexoFs;
const { green, red, yellow, bgRed } = chalk;
const { mjpage } = mathjaxNodePage;
mjpage[promisify.custom] = input => {
return new Promise((resolve, reject) => {
@ -18,9 +21,9 @@ mjpage[promisify.custom] = input => {
mjpage(
input,
{
format: ["TeX"],
format: ["TeX"]
},
{ svg: true, ex: 8, cjkCharWidth: 18, linebreaks: true, },
{ svg: true, ex: 8, cjkCharWidth: 18, linebreaks: true },
resolve
);
} catch (e) {
@ -38,27 +41,27 @@ async function renderMathJax(filename) {
const preProcessed = content
.replace(/<span class="MathJax_Preview">.+?<\/span><script type="math\/tex">/gi, '<script type="math/tex">')
.replace(/<div class="MathJax_Preview">[\s\S]*?<\/div>/gi, '');
.replace(/<div class="MathJax_Preview">[\s\S]*?<\/div>/gi, "");
let result = null;
try {
result = await mathJaxRenderer(preProcessed);
} catch (e) {
console.error(`${bgRed('ERROR')} ${yellow(filename)} ${red(`rendered failed`)}, detailed error see below:`);
console.error(`${bgRed("ERROR")} ${yellow(filename)} ${red(`rendered failed`)}, detailed error see below:`);
console.error(e);
}
if (result) {
const END_TIME = +new Date();
console.log(`${green('INFO')} ${yellow(filename)} rendered finished (${(END_TIME - START_TIME) / 1000}s).`);
return writeFile(filename, result)
};
console.log(`${green("INFO")} ${yellow(filename)} rendered finished (${(END_TIME - START_TIME) / 1000}s).`);
return writeFile(filename, result);
}
return;
}
parentPort.on('message', async filename => {
parentPort.on("message", async filename => {
await renderMathJax(filename);
parentPort.postMessage('Done');
parentPort.postMessage("Done");
});

View File

@ -1,31 +0,0 @@
const Promise = require('bluebird');
const { join, dirname } = require('path');
const { green } = require('chalk');
const { listDir } = require('hexo-fs');
const { cpus } = require('os');
const WorkerPool = require('./worker-pool');
const distDir = join(dirname(__dirname) + '/site-cdn-pages');
const workerPath = join(__dirname + '/replace_cdn_worker.js');
// Maxmize CPU performance
const cpuNums = cpus().length;
console.log(`${green('INFO')} ${cpuNums} CPU Threads detected, using ${cpuNums} threads`);
const pool = new WorkerPool(workerPath, cpuNums);
let HTML_NUMS = 0;
const START_TIME = +new Date();
Promise.all(listDir(distDir).map(async item => {
if (item.endsWith('.html')) {
const filename = join(distDir, item);
HTML_NUMS++
await pool.run(filename);
}
})).then(() => {
pool.destroy();
const END_TIME = +new Date();
console.log(`${green('INFO')} CDN replacement for ${HTML_NUMS} html files finished in ${(END_TIME - START_TIME) / 1000}s.`);
})

View File

@ -1,22 +0,0 @@
const { isMainThread, parentPort } = require('worker_threads');
if (isMainThread) {
throw new Error('Its not a worker');
}
const { readFile, writeFile } = require('hexo-fs');
async function replaceStaticFilesCdn(filename) {
const content = await readFile(filename);
const result = content.replace(/[^"']*assets[^"']*/g, s => {
return 'https://cdn-for-oi-wiki.billchn.com/' + s;
});
return writeFile(filename, result);
}
parentPort.on('message', async filename => {
await replaceStaticFilesCdn(filename);
parentPort.postMessage('Done');
});

View File

@ -0,0 +1,83 @@
import fs from "fs";
import { Octokit } from "octokit";
import { AuthorsCache, AuthorUserMap } from "./authors-cache.js";
const GITHUB_REPO = "OI-wiki/OI-wiki";
const AUTHORS_FILE = "authors.json";
// Increase this value carefully since it should cause API token limit exceeded
const fetchConcurrency = Number(process.argv[2]) || 1;
async function fetchUserMap(since: string): Promise<AuthorsCache> {
console.log(`Fetching commits newer than: ${since || "(fetch all)"}`);
const octokit = new Octokit({
auth: process.env.GITHUB_TOKEN
});
const result: AuthorUserMap = {};
let latestCommitTime = 0;
for (let i = 1; ; i += fetchConcurrency) {
const responses = await Promise.all(
Array(fetchConcurrency)
.fill(null)
.map((_, j) => i + j)
.map(page =>
octokit.request(`GET /repos/${GITHUB_REPO}/commits`, {
per_page: 100,
page,
...(since ? { since } : {})
})
)
);
const data = responses.flatMap(response => response.data);
if (data.length === 0) break;
for (const item of data) {
const commitTime = +new Date(item.commit.committer.date);
if (latestCommitTime < commitTime) {
latestCommitTime = commitTime;
}
const email = item.commit.author.email.toLowerCase();
const name = item.commit.author.name;
if (name.includes("[bot]")) continue;
if (!(email in result))
result[email] = {
name,
githubUsername: item.author && item.author.login ? item.author.login : undefined
};
}
}
return {
latestCommitTime: new Date(latestCommitTime).toISOString(),
userMap: result
};
}
// Read cached old data (if exists)
let lastLastestCommitTime = "";
let oldUserMap = {};
try {
const oldData = JSON.parse(fs.readFileSync(AUTHORS_FILE, "utf-8"));
lastLastestCommitTime = !Number.isNaN(+new Date(oldData.latestCommitTime)) ? oldData.latestCommitTime : "";
if (lastLastestCommitTime)
oldUserMap = Object.prototype.toString.call(oldData.userMap) === "[object Object]" ? oldData.userMap : {};
} catch {}
// Fetch data from newer commits only
const result = await fetchUserMap(lastLastestCommitTime);
// Merge old data
result.userMap = { ...oldUserMap, ...result.userMap };
// Sort by emails
result.userMap = Object.fromEntries(
Object.keys(result.userMap)
.sort()
.map(key => [key, result.userMap[key]])
);
fs.writeFileSync(AUTHORS_FILE, JSON.stringify(result, null, 2));

View File

@ -1,7 +1,10 @@
const { Worker } = require('worker_threads');
const Promise = require('bluebird');
import { Worker } from "worker_threads";
import Promise from "bluebird";
export default class WorkerPool {
readonly workerPath: string;
readonly numOfThreads: number;
class WorkerPool {
_workers = [];
_activeWorkers = [];
_queue = [];
@ -14,11 +17,13 @@ class WorkerPool {
init() {
if (this.numOfThreads < 1) {
throw new Error('Number of threads should be at least 1');
throw new Error("Number of threads should be at least 1");
}
for (let i = 0; i < this.numOfThreads; i++) {
const worker = new Worker(this.workerPath);
const worker = new Worker(this.workerPath, {
execArgv: ["--loader", "ts-node/esm"]
});
this._workers[i] = worker;
this._activeWorkers[i] = false;
@ -57,7 +62,7 @@ class WorkerPool {
}
return resolve(result);
}
}
};
// No more idle workers
if (restWorkerId === -1) {
@ -67,26 +72,26 @@ class WorkerPool {
// Let idle workers run
this.runWorker(restWorkerId, queueItem);
})
});
}
async runWorker(workerId, queueItem) {
const worker = this._workers[workerId];
this._activeWorkers[workerId] = true;
const messageCallback = (result) => {
const messageCallback = result => {
queueItem.callback(null, result);
cleanUp();
};
const errorCallback = (error) => {
const errorCallback = error => {
queueItem.callback(error);
cleanUp();
};
// Clear up listeners
const cleanUp = () => {
worker.removeAllListeners('message');
worker.removeAllListeners('error');
worker.removeAllListeners("message");
worker.removeAllListeners("error");
this._activeWorkers[workerId] = false;
@ -95,14 +100,12 @@ class WorkerPool {
}
this.runWorker(workerId, this._queue.shift());
}
};
// create listeners
worker.once('message', messageCallback);
worker.once('error', errorCallback);
worker.once("message", messageCallback);
worker.once("error", errorCallback);
// Send data to other newly created workers
worker.postMessage(queueItem.getData);
}
}
module.exports = WorkerPool;

17
tsconfig.json vendored Normal file
View File

@ -0,0 +1,17 @@
{
"compilerOptions": {
"module": "ES2022",
"moduleResolution": "Node16",
"esModuleInterop": true,
"target": "ES2022",
"sourceMap": true,
"outDir": "./dist",
"baseUrl": "./scripts",
"noEmit": true,
"skipLibCheck": true
},
"exclude": ["node_modules"],
"ts-node": {
"swc": true
}
}

7377
yarn.lock Normal file

File diff suppressed because it is too large Load Diff