乾貨
body {
font-weight: 400;
background-color: #fdfdfd;
color: #212121;
}
h1, h2, h3, h4, h5, h6 {
font-weight: 700;
color: #1a1a1a;
}
/* Dark mode CSS */
@media (prefers-color-scheme: dark) {
body {
font-weight: 350;
background-color: #12121f;
color: #fbfbfb;
}
h1, h2, h3, h4, h5, h6 {
font-weight: 600;
color: #fff;
}
}
<!doctype html>
<html lang="zh-TW">
<head>
<title>國家兩廳院</title>
<meta charset="utf-8"/>
<meta name="viewport" content="initial-scale=1,maximum-scale=5" />
<meta name="description" content="兩廳院是臺灣最成熟的國際級藝術中心,也是亞洲具指標性的當代劇場,無論當代或傳統、原生或外來,都在兩廳院的舞臺上共生,且深受臺灣觀眾青睞。這片土地的自由與開放讓多元文化彼此對話,自由帶來空間,開放帶來思考,是兩廳院存在的立基,也讓兩廳院成為亞洲最自由與開放的文化場景。" />
<meta name="google-site-verification" content="8TNz1gjBqx7XLyTvkNXtez_4yh-4QOBiB9In-vy_jZk" />
<meta property="og:title" content="國家兩廳院 NTCH" />
<meta property="og:description" content="兩廳院是臺灣最成熟的國際級藝術中心,也是亞洲具指標性的當代劇場,無論當代或傳統、原生或外來,都在兩廳院的舞臺上共生,且深受臺灣觀眾青睞。這片土地的自由與開放讓多元文化彼此對話,自由帶來空間,開放帶來思考,是兩廳院存在的立基,也讓兩廳院成為亞洲最自由與開放的文化場景。" />
<meta property="og:type" content="website" />
<meta property="og:image" content="https://vfms-file-test.npac-ntch.org/b3fe4353497bfe07770adcc82546ca468335c10b.jpeg" />
</head>
<body>
/* .... */
</body>
</html>
SSR
Server Side Rendering
# nginx.conf
upstream open_graph_upstream {
server OpenGraph;
}
location ~* ^/programs/\d {
proxy_set_header Host $host;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
if ($http_user_agent ~ (facebookexternalhit|line-poker|TelegramBot|TwitterBot|Slackbot)) {
proxy_pass http://open_graph_upstream;
}
try_files $uri /$uri /index.html;
}
facebookexternalhit / line-poker / TelegramBot / TwitterBot / Slackbot
User Agent
router.get('/programs/:programId', async (ctx) => {
const { programId } = ctx.params;
debugOpenGraph(`Fetched: Program ${programId}`);
const language = ctx.headers['accept-language']?.match(/en(-US)?/) ? 'en-US' : 'zh-TW';
const program = await getProgram(null, { id: programId }, { language });
if (!program) {
ctx.status = 404;
} else {
ctx.body = `<!DOCTYPE html>
<html lang="${language}">
<head>
<title>${program.title} ${program.engTitle}</title>
<meta charset='utf-8' />
<meta name='viewport' content='initial-scale=1, maximum-scale=1' />
<meta name='description' content="${program.brief}" />
<meta property="og:url" content="${OFFICIAL_SITE_HOST}/programs/${programId}" />
<meta property="og:locale" content="${language === 'en-US' ? 'en_US' : 'zh_TW'}" />
<meta property="og:title" content="${language === 'en-US' ? program.engTitle : program.title}" />
<meta property="og:description" content="${program.brief}" />
<meta property="og:image" content="${STATIC_HOST}/${program.cover}" />
<meta property="og:type" content="article" />
<meta property="fb:app_id" content="1606444052877313" />
</head>
<body>
<h1>${language === 'en-US' ? program.engTitle : program.title}</h1>
<p>${program.brief}</p>
<article>
${program.content}
</article>
<p id="tags">
${[...program.series, ...program.tags].map(s => s.name).join(', ')}
</p>
${program.purchaseLink ? `<a href=${program.purchaseLink} target="_blank">立即購買</a>` : ''}
</body>
</html>
`;
}
});
Customize meta response not making SEO ranking better. If you concerned result ranking, please use SSR, sitemap...etc.
To export large scaled records or generate webpage screen shot, server uses aggregated time more than 30 seconds. It causes aggressive CDNs cut down the request with 502 response.
Send Email
Server Push
Subscription / Event Source
Client
useQuery
generateProgramPreviewPDF
Resolver
Puppetter
Resolver
asyncFileId
useSubscription
asyncFileDownloadUrl(asyncFileId)
Static Filename
Generated Filename
Client
useQuery
generateProgramPreviewPDF
Resolver
Puppetter
Resolver
asyncFileId
useSubscription
asyncFileDownloadUrl(asyncFileId)
Static Filename
Generated Filename
GRPC
WebSocket
GraphQL
Worker
export async function generatePreviewPDF(id, member) {
const program = await models.Program.findById(id);
if (!program) throw new ProgramNotFoundError();
if (!await program.shouldMemberHasPermissionToChange(member)) throw new PermissionError();
const m = await models.Member.findById(member.id);
const refreshToken = await m.getRefreshToken();
const asyncFileId = uuid();
pdfGenerationClient.generateApplyPreviewPDF({
programId: program.id,
refreshToken,
}, (err, response) => {
if (err) {
debugGenerateApplyPreviewPDF(`Generate Failed: ${err}`);
return;
}
debugGenerateApplyPreviewPDF(`Preview (Program: ${program.id}) File Generated: ${response.filename}`);
fileUploaderClient.updateAsyncFileUploaderUrl({
id: asyncFileId,
filename: response.filename,
}, (updateErr) => {
if (updateErr) {
debugGenerateApplyPreviewPDF(`Update Async File Uploader URI: ${updateErr}`);
}
});
});
return asyncFileId;
}
service FileUploader {
rpc updateAsyncFileUploaderUrl(UploadedFile) returns (EmptyResponse) {}
}
message UploadedFile {
required string id = 1;
required string filename = 2;
}
message EmptyResponse {
}
const { programId } = useParams();
const [watchAsyncFileId, setAsyncFileId] = useState(null);
const [fetchURL, { loading: urlLoading }] = useMutation(GENERATE_PROGRAM_PREVIEW_PDF, {
variables: {
id: programId,
},
});
const { data, variables } = useSubscription(ASYNC_FILE_UPLOAD_SUBSCRIPTION, {
variables: {
fileId: watchAsyncFileId,
},
skip: !watchAsyncFileId,
});
const loading = useMemo(() => urlLoading, [urlLoading]);
const doAction = useCallback(async () => {
const response = await fetchURL();
if (response
&& response.data
&& response.data.generateProgramPreviewPDF) {
const asyncFileId = response.data.generateProgramPreviewPDF;
setAsyncFileId(asyncFileId);
}
}, [fetchURL]);
if (watchAsyncFileId
&& watchAsyncFileId === variables.fileId
&& data?.asyncFileDownloadUrl) {
return (
<div style={styles.wrapper}>
<a
target="_blank"
rel="noopener noreferrer"
href={`${STATIC_HOST}/${data?.asyncFileDownloadUrl}`}
css={styles.btn}>
下載 PDF
</a>
<p style={styles.helper}>檔案製作完成</p>
</div>
);
}
return (
<div style={styles.wrapper}>
<button
type="button"
disabled={loading}
onClick={doAction}
css={[
styles.btn,
loading && styles.btnLoading,
]}>
{loading || watchAsyncFileId ? (
<LoadingSpinner />
) : '下載 PDF'}
</button>
{loading || watchAsyncFileId ? (
<p style={styles.helper}>檔案製作中..</p>
) : null}
</div>
);
By YouTube, Google Maps, Facebook..., third party service script should load when user request the services.
For performance, we prefer to load scripts when it used.
Src: https://www.youtube.com/iframe_api
OnLoad Callback: onYouTubeIframeAPIReady
Asynchronous loading should handing with React render ticks.
const YOUTUBE_API_SRC = 'https://www.youtube.com/iframe_api';
const youtubeOnLoadedTasks = [];
window.isYoutubeStartedLoading = false;
window.onYouTubeIframeAPIReady = function onYouTubeIframeAPIReady() {
youtubeOnLoadedTasks.forEach((task) => task());
};
function useYoutubePlayer() {
const loadPlayer = useCallback(() => {
// do something required youtube iframe api
}, []);
useEffect(() => {
let cancelled = false;
if (typeof YT !== 'undefined') {
loadPlayer();
return () => {};
}
youtubeOnLoadedTasks.push(() => {
if (cancelled) return;
loadPlayer();
});
if (!window.isYoutubeStartedLoading) {
const scriptTag = document.createElement('script');
scriptTag.src = YOUTUBE_API_SRC;
document.body.appendChild(scriptTag);
window.isYoutubeStartedLoading = true;
}
return () => { cancelled = true; };
}, []);
}
We need to record article views count each query from server, but in scalable system, this behavior will be a bottleneck.
When you update views counter in database, it will lock the data row (or whole table with bad index configuration), affecting the query performance.
Risk
Stream Processing
In memory database
consume
Views Counter
Consumer
Produce Kafka Message
Update everytime
Consumer
subscribe
consume
subscribe
Update Cached View
Get Cached View
Redis
Full Logs
Dedicated Table
Get Cached View
Update periodly
Aggragated
ArticleViews
ArticleViewLogs
const REDIS_URI = process.env.REDIS_URI || 'redis://127.0.0.1:6379';
const redisClient = redis.createClient({ url: REDIS_URI });
const CACHE_EXPIRE_SECONDS = 4 * 60 * 60; // 4 hours;
export async function getArticleViews(articleId: string): Promise<number> {
return new Promise((resolve) => {
redisClient.get(articleId, async (_, views) => {
if (views) {
resolve(Number(views));
} else {
const manager = await getManager();
const articleViewRecord = await manager.findOne(ArticleView, { ArticleId: articleId });
if (articleViewRecord) {
redisClient.set(articleId, articleViewRecord.views.toString());
redisClient.expire(articleId, CACHE_EXPIRE_SECONDS);
resolve(articleViewRecord.views);
} else {
redisClient.set(articleId, '0');
redisClient.expire(articleId, CACHE_EXPIRE_SECONDS);
resolve(0);
}
}
});
});
}
When miss cache, find from aggregated table.
import { Kafka, logLevel } from 'kafkajs';
const KAFKA_CLIENT_ID = process.env.KAFKA_CLIENT_ID || 'wealth-usage-listener';
const KAFKA_BROKER_URI = process.env.KAFKA_BROKER_URI || 'localhost:7092';
const KAFKA_ARTICLE_VIEW_TOPIC = process.env.KAFKA_ARTICLE_VIEW_TOPIC || 'wealth-article-views';
const kafka = new Kafka({
logLevel: logLevel.INFO,
brokers: [KAFKA_BROKER_URI],
clientId: KAFKA_CLIENT_ID,
});
const initQueue: QueueRecord[] = [];
export default async function init() {
if (isInitialing || isConnected) {
return;
}
debugKafkaProducer('Connecting Producer...');
isInitialing = true;
producer.on('producer.connect', () => {
initQueue.map((record) => async () => {
await producer.send({
topic: KAFKA_ARTICLE_VIEW_TOPIC,
messages: [
{
key: 'id',
value: record.articleId,
...(record.memberId ? {
headers: {
memberId: record.memberId,
},
} : {}),
},
],
});
}).reduce((prev, next) => prev.then(next), Promise.resolve());
});
await producer.connect();
isConnected = true;
}
export async function recordArticleView(
articleId: string,
memberId?: string | undefined,
): Promise<void> {
redisClient.get(articleId, (_, views) => {
if (views) {
redisClient.set(articleId, (Number(views) + 1).toString());
} else {
redisClient.set(articleId, '1');
}
redisClient.expire(articleId, CACHE_EXPIRE_SECONDS);
});
if (!isConnected) {
debugKafkaProducer('Kafka Producer is not connected.');
initQueue.push({ articleId, memberId });
if (!isInitialing) {
init();
}
return;
}
await producer.send({
topic: KAFKA_ARTICLE_VIEW_TOPIC,
messages: [
{
key: 'id',
value: articleId,
...(memberId ? {
headers: {
memberId,
},
} : {}),
},
],
});
}
const KAFKA_CONSUMER_GROUP_ID = process.env.KAFKA_CONSUMER_GROUP_ID || 'wealth-consumer';
const KAFKA_ARTICLE_VIEW_TOPIC = process.env.KAFKA_ARTICLE_VIEW_TOPIC || 'wealth-article-views';
const ARTICLE_VIEW_UPDATE_FEQ_IN_MS = Number(process.env.ARTICLE_VIEW_UPDATE_FEQ_IN_MS || '30000'); // 30 sec
const consumer = kafka.consumer({ groupId: KAFKA_CONSUMER_GROUP_ID });
export default async function run() {
await consumer.subscribe({ topic: KAFKA_ARTICLE_VIEW_TOPIC });
let lastSync = Date.now();
await consumer.run({
eachMessage: async ({ topic, message }) => {
const articleId = message.value!.toString();
if (topic === KAFKA_ARTICLE_VIEW_TOPIC && message?.key?.toString() === 'id') {
const manager = await getManager();
const log = manager.create(ArticleViewLog, {
ArticleId: articleId,
MemberId: message.headers?.memberId?.toString() ?? undefined,
});
await manager.save(log);
}
if (ARTICLE_VIEW_UPDATE_FEQ_IN_MS
&& (Date.now() - lastSync) > ARTICLE_VIEW_UPDATE_FEQ_IN_MS) {
const manager = await getManager();
const count = await manager.count(ArticleViewLog, { ArticleId: articleId });
const viewRecord = await manager.findOne(ArticleView, articleId);
if (viewRecord) {
viewRecord.views = count;
viewRecord.updatedAt = new Date();
await manager.save(viewRecord);
} else {
const newRecord = manager.create(ArticleView, {
ArticleId: articleId,
views: count,
});
await manager.save(newRecord);
}
lastSync = Date.now();
debugKafkaConsumer(`Article ${articleId} updated views to ${count}`);
}
},
});
}
import debug from 'debug';
import Koa from 'koa';
import { createConnection } from 'typeorm';
import { getArticleViews } from './worker';
const debugServer = debug('Wealth:UsageListenerServer');
const USAGE_LISTENER_PORT = Number(process.env.USAGE_LISTENER_PORT || '6068');
const app = new Koa();
app.use(async (ctx) => {
const articleId = ctx.url.replace(/^\//, '');
ctx.body = await getArticleViews(articleId);
});
createConnection().then(() => {
app.listen(USAGE_LISTENER_PORT, () => {
debugServer(`Usage Listener Server listen on ${USAGE_LISTENER_PORT}`);
});
});