Skip to content
This repository was archived by the owner on Dec 5, 2024. It is now read-only.

Commit f07666b

Browse files
committed
feat(fusuma): improve output logs
1 parent d972d1a commit f07666b

File tree

9 files changed

+30828
-27958
lines changed

9 files changed

+30828
-27958
lines changed

README.md

+5-1
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,11 @@ You can see your Note for each slide and the next slide on the Host screen.
199199
200200
Fusuma supports improving performance, SEO, and so on as default.
201201

202-
<img src="./site/docs/assets/lighthouse.png" width="600px">
202+
<img src="./site/docs/assets/lighthouse.png" width="500px">
203+
204+
Fusuma analyzes the slide's performance, and outputs like below.
205+
206+
<img src="./output-logs.png" width="500px">
203207

204208
---
205209

output-logs.png

181 KB
Loading

packages/fusuma/package-lock.json

+30,523-27,870
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

packages/fusuma/package.json

+1
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@
6565
"regenerator-runtime": "^0.13.7",
6666
"rmfr": "^2.0.0",
6767
"style-loader": "^2.0.0",
68+
"table": "^6.0.7",
6869
"terser-webpack-plugin": "^5.1.1",
6970
"webp-loader": "^0.6.0",
7071
"webpack": "^5.24.3",

packages/fusuma/src/server/dynamicRenderingServer.js

+24-3
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,10 @@ const fileServer = require('./fileServer');
1111
async function dynamicRenderingServer(outputDirPath, publicPath, spinner, isThumbnail = true) {
1212
spinner.setContent({ color: 'cyan', text: 'Rendering components to HTML...' });
1313

14+
const logs = {
15+
network: [],
16+
performance: {},
17+
};
1418
const port = 5445;
1519
const browser = await puppeteer.launch({
1620
chromeWebSecurity: false,
@@ -26,10 +30,27 @@ async function dynamicRenderingServer(outputDirPath, publicPath, spinner, isThum
2630
width: 1200,
2731
height: 630,
2832
});
33+
34+
page.on('request', (request) => {
35+
const url = request.url();
36+
37+
if (url.includes(`http://localhost:${port}${publicPath}`)) {
38+
logs.network.push(url.split(`http://localhost:${port}${publicPath}`).pop());
39+
} else {
40+
logs.network.push(url);
41+
}
42+
});
43+
2944
await page.goto(url, {
3045
waitUntil: ['load', 'networkidle2'],
3146
});
3247

48+
const performanceTimingJson = await page.evaluate(() =>
49+
JSON.stringify(window.performance.timing)
50+
);
51+
52+
logs.performance = JSON.parse(performanceTimingJson);
53+
3354
try {
3455
await unlink(htmlPath);
3556
await writeFile(htmlPath, await page.content());
@@ -67,9 +88,9 @@ async function dynamicRenderingServer(outputDirPath, publicPath, spinner, isThum
6788
}
6889
}
6990

70-
await page.close();
71-
await browser.close();
72-
app.close();
91+
await Promise.all([page.close(), browser.close(), new Promise((r) => app.close(r))]);
92+
93+
return logs;
7394
}
7495

7596
module.exports = dynamicRenderingServer;

packages/fusuma/src/tasks/build.js

+11-7
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
'use strict';
22

33
const Spinner = require('../cli/Spinner');
4-
const { info, warn } = require('../cli/log');
4+
const { warn } = require('../cli/log');
55
const deleteDir = require('../utils/deleteDir');
66
const getRemoteOriginUrl = require('../utils/getRemoteOriginUrl');
7+
const buildLogs = require('../utils/buildLogs');
78
const { build: webpackBuild } = require('../webpack');
8-
const outputBuildInfo = require('../webpack/outputBuildInfo');
9+
const outputBuildInfo = require('../webpack/getChunks');
910
const dynamicRenderingServer = require('../server/dynamicRenderingServer');
1011

1112
async function build(config, isConsoleOutput = true) {
@@ -40,16 +41,19 @@ async function build(config, isConsoleOutput = true) {
4041
}
4142
}
4243

43-
await dynamicRenderingServer(outputDirPath, config.build.publicPath, spinner, neededThumbnail);
44+
const logs = await dynamicRenderingServer(
45+
outputDirPath,
46+
config.build.publicPath,
47+
spinner,
48+
neededThumbnail
49+
);
4450

4551
spinner.stop();
4652

4753
if (isConsoleOutput) {
48-
const logs = outputBuildInfo(stats);
49-
const last = logs.splice(-1);
54+
const chunks = outputBuildInfo(stats);
5055

51-
console.info(logs.join('\n'));
52-
info('build', last);
56+
buildLogs({ ...logs, ...chunks });
5357
}
5458
}
5559

+194
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,194 @@
1+
'use strict';
2+
3+
const { table, getBorderCharacters } = require('table');
4+
const chalk = require('chalk');
5+
const prettyBytes = require('pretty-bytes');
6+
const { javascript } = require('webpack');
7+
8+
const tableConfig = {
9+
border: getBorderCharacters('void'),
10+
columnDefault: {
11+
paddingLeft: 0,
12+
paddingRight: 1,
13+
},
14+
columns: {
15+
3: {
16+
alignment: 'right',
17+
paddingLeft: 1,
18+
},
19+
},
20+
drawHorizontalLine: () => false,
21+
};
22+
23+
function buildLogs(logs) {
24+
const outputStack = [];
25+
const syncChunk = chalk.yellow('○');
26+
const asyncChunk = chalk.cyan('○');
27+
28+
{
29+
const outputStack = [];
30+
const tti = logs.performance.domInteractive - logs.performance.navigationStart;
31+
outputStack.push(['TTI (Time to Interactive)', `${chalk.yellow(tti)} ms`]);
32+
const ttfb = logs.performance.responseStart - logs.performance.requestStart;
33+
outputStack.push(['TTFB (Time to First Byte)', `${chalk.yellow(ttfb)} ms`]);
34+
const ttlb = logs.performance.responseEnd - logs.performance.requestStart;
35+
outputStack.push(['TTLB (Time to Last Byte)', `${chalk.yellow(ttlb)} ms`]);
36+
console.log(table(outputStack, tableConfig));
37+
}
38+
39+
// header
40+
outputStack.push(['', '', 'size', 'gzip']);
41+
outputStack.push([
42+
` ┌─`,
43+
`${syncChunk} /`,
44+
chalk.green(prettyBytes(logs.assets['index.html'].size)),
45+
'',
46+
]);
47+
outputStack.push([' │', '', '', '']);
48+
49+
let runtimeFileNum = 0;
50+
let runtimeGzipFileNum = 0;
51+
const assetsTree = {};
52+
53+
for (const asset of logs.network) {
54+
// html, external assets
55+
if (asset.includes('http')) {
56+
// assetsTree['network'] = {};
57+
} else {
58+
const name = asset.split('.')[0];
59+
60+
runtimeFileNum++;
61+
62+
// assets
63+
if (!asset.includes('.js') && !asset.includes('.css')) {
64+
if (!assetsTree['assets']) {
65+
assetsTree['assets'] = {
66+
size: 0,
67+
gzSize: 0,
68+
files: {},
69+
};
70+
}
71+
72+
assetsTree['assets'].files[asset] = {
73+
gzSize: 0,
74+
...logs.assets[asset],
75+
isAsync: false,
76+
};
77+
assetsTree['assets'].size += logs.assets[asset].size;
78+
79+
const gz = logs.assets[`${asset}.gz`];
80+
81+
if (gz) {
82+
assetsTree['assets'].gzSize += gz.size;
83+
runtimeGzipFileNum++;
84+
}
85+
} else {
86+
if (!Number(name)) {
87+
// initial
88+
if (!assetsTree[name]) {
89+
assetsTree[name] = {
90+
size: 0,
91+
gzSize: 0,
92+
files: {},
93+
};
94+
}
95+
96+
for (const file of logs.chunks[name].files) {
97+
if (!assetsTree[name].files[file]) {
98+
const info = logs.assets[file];
99+
const gz = logs.assets[`${file}.gz`];
100+
101+
assetsTree[name].files[file] = {
102+
...info,
103+
isAsync: false,
104+
};
105+
106+
assetsTree[name].size += info.size;
107+
108+
if (gz) {
109+
runtimeGzipFileNum++;
110+
assetsTree[name].files[file].gzSize = gz.size;
111+
assetsTree[name].gzSize += gz.size;
112+
}
113+
}
114+
}
115+
} else {
116+
// async
117+
for (const file of logs.chunks[name].files) {
118+
const info = logs.assets[file];
119+
const gz = logs.assets[`${file}.gz`];
120+
121+
assetsTree['runtime'].files[file] = {
122+
...info,
123+
isAsync: true,
124+
};
125+
assetsTree['runtime'].size += info.size;
126+
if (gz) {
127+
runtimeGzipFileNum++;
128+
assetsTree['runtime'].files[file].gzSize = gz.size;
129+
assetsTree['runtime'].gzSize += gz.size;
130+
}
131+
}
132+
}
133+
}
134+
}
135+
}
136+
137+
const topFieldNum = Object.keys(assetsTree).length;
138+
let runtimeFileSize = 0;
139+
let runtimeGzipFileSize = 0;
140+
141+
Object.entries(assetsTree).forEach(([initialName, initialValue], topIndex) => {
142+
runtimeFileSize += initialValue.size;
143+
runtimeGzipFileSize += initialValue.gzSize;
144+
145+
outputStack.push([
146+
topFieldNum - 1 !== topIndex ? ' ├─' : ' └─',
147+
`[${initialName}]`,
148+
chalk.green(prettyBytes(initialValue.size)),
149+
initialValue.gzSize !== 0 ? chalk.yellow(prettyBytes(initialValue.gzSize)) : '',
150+
]);
151+
152+
Object.entries(initialValue.files).forEach(([name, { size, gzSize, isAsync }], index) => {
153+
outputStack.push([
154+
topFieldNum - 1 > topIndex ? ' │' : '',
155+
`${Object.keys(initialValue.files).length - 1 !== index ? '├─' : '└─'} ${
156+
isAsync ? asyncChunk : syncChunk
157+
} ${name}`,
158+
chalk.blue(prettyBytes(size)),
159+
gzSize !== 0 ? chalk.blue(prettyBytes(gzSize)) : '',
160+
]);
161+
});
162+
163+
if (topFieldNum - 1 !== topIndex) {
164+
outputStack.push([' │', '', '', '']);
165+
}
166+
});
167+
168+
console.log(table(outputStack, tableConfig));
169+
console.log(`${syncChunk} initial ${asyncChunk} async`);
170+
console.log();
171+
172+
{
173+
const outputStack = [];
174+
175+
outputStack.push([
176+
'runtime file size totals',
177+
chalk.green(prettyBytes(runtimeFileSize)),
178+
`(${runtimeFileNum} files)`,
179+
chalk.yellow(prettyBytes(runtimeGzipFileSize)),
180+
`(${runtimeGzipFileNum} files)`,
181+
]);
182+
outputStack.push([
183+
'file size totals',
184+
chalk.green(prettyBytes(logs.size.total)),
185+
`(${logs.num.total} files)`,
186+
chalk.yellow(prettyBytes(logs.size.gz)),
187+
`(${logs.num.gz} files)`,
188+
]);
189+
190+
console.log(table(outputStack, tableConfig));
191+
}
192+
}
193+
194+
module.exports = buildLogs;
+70
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
'use strict';
2+
3+
function getRequiredFilesFromParentChunks(parentChunks, allChunks) {
4+
let requiredFiles = [];
5+
6+
for (let i = 0; i < parentChunks.length; i++) {
7+
const parentChunkId = parentChunks[i];
8+
const parentChunk = allChunks.find((chunk) => chunk.id === parentChunkId);
9+
10+
if (parentChunk) {
11+
if (parentChunk.parents.length > 0) {
12+
requiredFiles = getRequiredFilesFromParentChunks(parentChunk.parents, allChunks);
13+
}
14+
requiredFiles = requiredFiles.concat(parentChunk.files);
15+
}
16+
}
17+
18+
return requiredFiles;
19+
}
20+
21+
function outputBuildInfo(res) {
22+
const logs = {
23+
size: {
24+
total: 0,
25+
gz: 0,
26+
},
27+
num: {
28+
total: 0,
29+
gz: 0,
30+
},
31+
chunks: {},
32+
assets: {},
33+
};
34+
const stats = res.toJson();
35+
36+
for (const chunk of stats.chunks) {
37+
const { names, parents, id, hash, files } = chunk;
38+
const name = names[0] || id;
39+
const requiredFiles = [
40+
...getRequiredFilesFromParentChunks(parents, stats.chunks),
41+
...chunk.files,
42+
];
43+
44+
logs.chunks[`${name}`] = {
45+
hash,
46+
files,
47+
deps: requiredFiles,
48+
};
49+
}
50+
51+
Object.entries(res.compilation.assets).forEach(([name, asset]) => {
52+
const size = asset.size();
53+
54+
logs.size.total += size;
55+
logs.num.total++;
56+
57+
if (name.includes('.gz')) {
58+
logs.size.gz += size;
59+
logs.num.gz++;
60+
}
61+
62+
logs.assets[name] = {
63+
size,
64+
};
65+
});
66+
67+
return logs;
68+
}
69+
70+
module.exports = outputBuildInfo;

0 commit comments

Comments
 (0)