diff --git a/index.js b/index.js index e33e97f..2f935dd 100644 --- a/index.js +++ b/index.js @@ -67,8 +67,8 @@ async function fetchApi(token, query) { /** * 获取内容路径 - * @param {*} uniqid - * @returns + * @param {*} uniqid + * @returns */ function getContentPath(uniqid) { const subDir = 'content/' + uniqid.substring(0, 1); @@ -116,19 +116,18 @@ function getMimeFromUrl(u) { /** * 生成缩略图并缓存 - * @param {*} reply - * @param {*} apiData - * @param {*} metaPath - * @param {*} contentPath - * @returns + * @param {*} reply + * @param {*} apiData + * @param {*} contentPath + * @returns */ -async function generateThumbAndCache(reply, apiData, metaPath, contentPath) { +async function generateThumbAndCache(reply, apiData, contentPath) { const srcPath = contentPath; const dir = path.dirname(srcPath); const base = path.basename(srcPath); const thumbFinal = path.join(dir, base.replace('.data', `.thumb`)); - const metaThumbPath = metaPath.replace('.meta', '.thumb.meta'); + const metaThumbPath = contentPath.replace('.data', '.thumb.meta'); if (fs.existsSync(thumbFinal) && fs.existsSync(metaThumbPath)) { const st = fs.statSync(thumbFinal); if (st.size > 0) { @@ -231,13 +230,13 @@ async function generateThumbAndCache(reply, apiData, metaPath, contentPath) { } // Serve existing file -function serveCompletedCache(reply, apiData, metaPath, contentPath) { +function serveCompletedCache(reply, apiData, contentPath) { const content = contentPath; const responseHeaders = { ...apiData.data.headers }; if (!responseHeaders['Access-Control-Allow-Origin']) { responseHeaders['Access-Control-Allow-Origin'] = '*'; } - // Fastify handles Range requests automatically if we send the stream? + // Fastify handles Range requests automatically if we send the stream? // Actually, for full control over custom headers + Range, we often need manual handling or plugins. // But serving a raw stream in Fastify usually just pipes it. // For "High Performance", letting nginx handle static files is best, but here we do it in Node. @@ -248,7 +247,7 @@ function serveCompletedCache(reply, apiData, metaPath, contentPath) { // For completed files, we can use fs.createReadStream. const range = reply.request.headers.range; - const stat = fs.statSync(content); // Sync is okay for startup/metadata, but Async preferred in high-perf. + const stat = fs.statSync(content); // Sync is okay for startup/metadata, but Async preferred in high-perf. // In strict high-perf, use fs.promises.stat or cache stats. const totalSize = stat.size; @@ -273,7 +272,7 @@ function serveCompletedCache(reply, apiData, metaPath, contentPath) { } // Download and Serve logic -async function downloadAndServe(reply, apiData, metaPath, contentPath, key) { +async function downloadAndServe(reply, apiData, contentPath, key) { let task = activeDownloads.get(key); @@ -321,7 +320,6 @@ async function downloadAndServe(reply, apiData, metaPath, contentPath, key) { ws.end(); return ws; } - fs.writeFileSync(metaPath, JSON.stringify(apiData)); task.totalSize = parseInt(headers['content-length'] || '0', 10); const fileStream = fs.createWriteStream(task.path); fileStream.on('error', (err) => { @@ -371,15 +369,15 @@ async function downloadAndServe(reply, apiData, metaPath, contentPath, key) { if (task.done && fs.existsSync(contentPath)) { console.log('Download completed:', key); if (isValidThumbSpec(apiData.data.thumb)) { - return generateThumbAndCache(reply, apiData, metaPath, contentPath).catch(() => { }); + return generateThumbAndCache(reply, apiData, contentPath).catch(() => { }); } - return serveCompletedCache(reply, apiData, metaPath, contentPath);//reply, apiData, metaPath, contentPath + return serveCompletedCache(reply, apiData, contentPath);//reply, apiData, contentPath } console.log('Downloading:', key); if (isValidThumbSpec(apiData.data.thumb)) { - return generateThumbAndCache(reply, apiData, metaPath, contentPath).catch(() => { }); + return generateThumbAndCache(reply, apiData, contentPath).catch(() => { }); } // Serve growing file @@ -421,7 +419,7 @@ function serveGrowingFile(reply, task, apiData) { return new Readable({ read(size) { const self = this; - let bytesSent = start; // State needs to be per-stream instance. + let bytesSent = start; // State needs to be per-stream instance. // Wait, 'read' is called multiple times. We need to store state outside or on 'this'. if (this._bytesSent === undefined) this._bytesSent = start; @@ -468,7 +466,7 @@ function serveGrowingFile(reply, task, apiData) { // Actually Node streams: if push returns true, we can push more. // But here we just push what we have and wait for next _read call or event? // Standard implementation: push until it returns false. - // But for "live" tailing, we might want to just push what we have and exit, + // But for "live" tailing, we might want to just push what we have and exit, // expecting _read to be called again by consumer. } else { wait(stream); @@ -509,7 +507,7 @@ function serveGrowingFile(reply, task, apiData) { task.emitter.once('done', onDone); // Check done state task.emitter.once('error', onError); - // If stream destroyed, remove listeners? + // If stream destroyed, remove listeners? // Readable.read is active, so stream is active. } }, @@ -588,13 +586,13 @@ fastify.get('/*', async (request, reply) => { } const contentPath = getContentPath(apiData.data.uniqid || key); - if (fs.existsSync(contentPath) && fs.existsSync(metaPath) && !nocache) { + if (fs.existsSync(contentPath) && !nocache) { if (isValidThumbSpec(apiData.data.thumb)) { - return generateThumbAndCache(reply, apiData, metaPath, contentPath).catch(() => { }); + return generateThumbAndCache(reply, apiData, contentPath).catch(() => { }); } - return serveCompletedCache(reply, apiData, metaPath, contentPath); + return serveCompletedCache(reply, apiData, contentPath); } - return await downloadAndServe(reply, apiData, metaPath, contentPath, key); + return await downloadAndServe(reply, apiData, contentPath, key); } catch (err) { request.log.error(err); reply.code(502);