refactor: 重构项目结构并移除旧代码
移除obfuscate.js和旧的fastify实现,合并优化后的new.js作为主文件 更新package.json依赖,新增sharp和fluent-ffmpeg 删除不再使用的index.php文件
This commit is contained in:
609
fastify.js
Normal file
609
fastify.js
Normal file
@@ -0,0 +1,609 @@
|
||||
const fastify = require('fastify')({
|
||||
logger: false, // 关闭默认日志,极大提升吞吐量
|
||||
disableRequestLogging: true, // 关闭请求日志
|
||||
connectionTimeout: 30000, // 快速释放死连接
|
||||
keepAliveTimeout: 5000, // 调整 Keep-Alive
|
||||
});
|
||||
const ffmpeg = require('fluent-ffmpeg');
|
||||
|
||||
|
||||
|
||||
const { request } = require('undici'); // High-performance HTTP client
|
||||
const sharp = require('sharp');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
const EventEmitter = require('events');
|
||||
|
||||
// Configuration
|
||||
const PORT = 9520;
|
||||
const API_BASE = 'http://127.0.0.1:9558/api';
|
||||
const CACHE_DIR = path.join(__dirname, '.cache');
|
||||
|
||||
// Ensure cache directory exists
|
||||
if (!fs.existsSync(CACHE_DIR)) {
|
||||
fs.mkdirSync(CACHE_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
// Active downloads manager
|
||||
const activeDownloads = new Map();
|
||||
|
||||
// Global Error Handling to prevent crash
|
||||
process.on('uncaughtException', (err) => {
|
||||
console.error(`[${new Date().toISOString()}] Uncaught Exception:`, err);
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
console.error(`[${new Date().toISOString()}] Unhandled Rejection:`, reason);
|
||||
});
|
||||
|
||||
// Helper to fetch JSON from API using Undici (Faster than http.get)
|
||||
async function fetchApi(token) {
|
||||
const apiUrl = new URL(API_BASE);
|
||||
if (token) {
|
||||
apiUrl.searchParams.set('token', token);
|
||||
}
|
||||
|
||||
const { statusCode, body } = await request(apiUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Connection': 'keep-alive',
|
||||
'token': token
|
||||
},
|
||||
bodyTimeout: 5000,
|
||||
headersTimeout: 5000
|
||||
});
|
||||
|
||||
if (statusCode !== 200) {
|
||||
throw new Error(`API Status Code: ${statusCode}`);
|
||||
}
|
||||
|
||||
const data = await body.json();
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取内容路径
|
||||
* @param {*} uniqid
|
||||
* @returns
|
||||
*/
|
||||
function getContentPath(uniqid) {
|
||||
const subDir = 'content/' + uniqid.substring(0, 1);
|
||||
const dir = path.join(CACHE_DIR, subDir);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
return path.join(dir, `${uniqid}.data`);
|
||||
}
|
||||
|
||||
function getMetaPath(key) {
|
||||
const subDir = 'meta/' + key.substring(0, 1);
|
||||
const dir = path.join(CACHE_DIR, subDir);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
return path.join(dir, `${key}.meta`)
|
||||
}
|
||||
|
||||
function getTokenKey(token) {
|
||||
return crypto.createHash('md5').update(token).digest('hex');
|
||||
}
|
||||
|
||||
function normalizeApiData(metaJson) {
|
||||
if (metaJson && metaJson.api && metaJson.api.data) {
|
||||
return metaJson.api;
|
||||
}
|
||||
return metaJson;
|
||||
}
|
||||
|
||||
function isValidThumbSpec(thumb) {
|
||||
return thumb && typeof thumb === 'object' && !Array.isArray(thumb) && Number.isFinite(thumb.w) && thumb.w > 0;
|
||||
}
|
||||
|
||||
function getMimeFromUrl(u) {
|
||||
const ext = path.extname(new URL(u).pathname).toLowerCase();
|
||||
if (ext === '.png') return 'image/png';
|
||||
if (ext === '.webp') return 'image/webp';
|
||||
if (ext === '.gif') return 'image/gif';
|
||||
if (ext === '.jpg' || ext === '.jpeg' || ext === '.jfif') return 'image/jpeg';
|
||||
if (ext === '.mp4' || ext === '.m4v' || ext === '.mov') return 'video/mp4';
|
||||
if (ext === '.webm') return 'video/webm';
|
||||
return 'image/jpeg';
|
||||
}
|
||||
|
||||
/**
|
||||
* 生成缩略图并缓存
|
||||
* @param {*} reply
|
||||
* @param {*} apiData
|
||||
* @param {*} metaPath
|
||||
* @param {*} contentPath
|
||||
* @returns
|
||||
*/
|
||||
async function generateThumbAndCache(reply, apiData, metaPath, contentPath) {
|
||||
|
||||
const srcPath = contentPath;
|
||||
const dir = path.dirname(srcPath);
|
||||
const base = path.basename(srcPath);
|
||||
const thumbFinal = path.join(dir, base.replace('.data', `.thumb`));
|
||||
const metaThumbPath = metaPath.replace('.meta', '.thumb.meta');
|
||||
if (fs.existsSync(thumbFinal) && fs.existsSync(metaThumbPath)) {
|
||||
const st = fs.statSync(thumbFinal);
|
||||
if (st.size > 0) {
|
||||
// metaPath 读取 metadata
|
||||
const metaJson = JSON.parse(fs.readFileSync(metaThumbPath, 'utf8'));
|
||||
// inputFormat
|
||||
const inputFormat = metaJson.inputFormat || 'webp';
|
||||
const responseHeaders = {
|
||||
...apiData.data.headers, ...{
|
||||
'Content-Type': `image/${inputFormat === 'jpg' ? 'jpeg' : inputFormat}`,
|
||||
'Content-Length': st.size,
|
||||
'Accept-Ranges': 'bytes',
|
||||
}
|
||||
};
|
||||
reply.headers(responseHeaders);
|
||||
return fs.createReadStream(thumbFinal);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
console.log('Generating thumb:', srcPath);
|
||||
let attempts = 0;
|
||||
while (!fs.existsSync(srcPath) && attempts < 80) {
|
||||
await new Promise(r => setTimeout(r, 100));
|
||||
attempts++;
|
||||
}
|
||||
if (!fs.existsSync(srcPath)) {
|
||||
console.log('Thumb source file not found:', srcPath);
|
||||
reply.code(500);
|
||||
return 'Thumb source file not found';
|
||||
}
|
||||
let stat = fs.statSync(srcPath);
|
||||
attempts = 0;
|
||||
while (stat.size <= 0 && attempts < 80) {
|
||||
await new Promise(r => setTimeout(r, 100));
|
||||
attempts++;
|
||||
stat = fs.statSync(srcPath);
|
||||
}
|
||||
if (stat.size <= 0) {
|
||||
reply.code(500);
|
||||
return 'Thumb source file is empty';
|
||||
}
|
||||
const fit = (apiData.data.thumb && apiData.data.thumb.fit === 'max') ? 'inside' : 'cover';
|
||||
const width = (apiData.data.thumb && apiData.data.thumb.w) ? apiData.data.thumb.w : 100;
|
||||
const contentType = (apiData.data.headers && apiData.data.headers['content-type']) || getMimeFromUrl(apiData.data.url) || 'image/jpeg';
|
||||
const preferredFmt = contentType.includes('png') ? 'png' : contentType.includes('webp') ? 'webp' : 'jpeg';
|
||||
|
||||
if (contentType.includes('video/')) {
|
||||
console.log('Generating video thumb:', srcPath);
|
||||
const thumbTemp = path.join(dir, base.replace('.data', `.thumb.webp.tmp`));
|
||||
const { spawn } = require('child_process');
|
||||
const args = ['-ss', '1', '-i', srcPath, '-frames:v', '1', '-vf', `scale=${width}:-2`, '-y', thumbTemp];
|
||||
await new Promise((resolve, reject) => { const p = spawn('ffmpeg', args, { stdio: 'ignore' }); p.on('error', reject); p.on('close', c => c === 0 ? resolve() : reject(new Error(`ffmpeg exit ${c}`))); });
|
||||
try { fs.renameSync(thumbTemp, thumbFinal); } catch (e) { if (fs.existsSync(thumbFinal)) { try { fs.unlinkSync(thumbFinal); } catch (_) { } fs.renameSync(thumbTemp, thumbFinal); } else { throw e; } }
|
||||
await fs.promises.writeFile(metaThumbPath, JSON.stringify({ api: apiData, headers: apiData.data.headers || {}, srcSize: stat.size, inputFormat: 'video' }));
|
||||
const tstat = fs.statSync(thumbFinal);
|
||||
reply.headers({ 'Content-Type': 'image/webp', 'Content-Length': tstat.size, 'Accept-Ranges': 'bytes', 'Access-Control-Allow-Origin': '*' });
|
||||
return fs.createReadStream(thumbFinal);
|
||||
}
|
||||
const inputMeta = await sharp(srcPath).metadata();
|
||||
const outFmt = preferredFmt || inputMeta.format || 'jpeg';
|
||||
const thumbTemp = path.join(dir, base.replace('.data', `.thumb.tmp`));
|
||||
const pipeline = sharp(srcPath).resize({ width, fit });
|
||||
if (outFmt === 'jpeg') pipeline.jpeg({ quality: 85 });
|
||||
else if (outFmt === 'png') pipeline.png();
|
||||
else pipeline.webp({ quality: 80 });
|
||||
await pipeline.toFile(thumbTemp);
|
||||
try {
|
||||
fs.renameSync(thumbTemp, thumbFinal);
|
||||
} catch (e) {
|
||||
if (fs.existsSync(thumbFinal)) {
|
||||
try { fs.unlinkSync(thumbFinal); } catch (_) { }
|
||||
fs.renameSync(thumbTemp, thumbFinal);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
await fs.promises.writeFile(metaThumbPath, JSON.stringify({ api: apiData, headers: apiData.data.headers || {}, srcSize: stat.size, inputFormat: inputMeta.format || null }));
|
||||
const tstat = fs.statSync(thumbFinal);
|
||||
const responseHeaders = {
|
||||
...apiData.data.headers, ...{
|
||||
'Content-Type': `image/${outFmt === 'jpeg' ? 'jpeg' : outFmt}`,
|
||||
'Content-Length': tstat.size,
|
||||
'Accept-Ranges': 'bytes',
|
||||
}
|
||||
};
|
||||
reply.headers(responseHeaders);
|
||||
return fs.createReadStream(thumbFinal);
|
||||
} catch (e) {
|
||||
reply.code(500);
|
||||
return 'Thumb generation failed:' + e.message;
|
||||
} finally {
|
||||
// cleanup leftover temp if any
|
||||
const dir = path.dirname(srcPath);
|
||||
const base = path.basename(srcPath);
|
||||
const thumbTemp = path.join(dir, base.replace('.data', `.thumb.tmp`));
|
||||
try { if (fs.existsSync(thumbTemp)) fs.unlinkSync(thumbTemp); } catch (_) { }
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// Serve existing file
|
||||
function serveCompletedCache(reply, apiData, metaPath, contentPath) {
|
||||
const content = contentPath;
|
||||
const responseHeaders = { ...apiData.data.headers };
|
||||
if (!responseHeaders['Access-Control-Allow-Origin']) {
|
||||
responseHeaders['Access-Control-Allow-Origin'] = '*';
|
||||
}
|
||||
// Fastify handles Range requests automatically if we send the stream?
|
||||
// Actually, for full control over custom headers + Range, we often need manual handling or plugins.
|
||||
// But serving a raw stream in Fastify usually just pipes it.
|
||||
// For "High Performance", letting nginx handle static files is best, but here we do it in Node.
|
||||
// We will stick to the manual Range logic for consistency with previous "growing file" support.
|
||||
|
||||
// To support Range properly with Fastify + Stream, we can set headers and return stream.
|
||||
// But for "growing" files, we need our custom pump logic.
|
||||
// For completed files, we can use fs.createReadStream.
|
||||
|
||||
const range = reply.request.headers.range;
|
||||
const stat = fs.statSync(content); // Sync is okay for startup/metadata, but Async preferred in high-perf.
|
||||
// In strict high-perf, use fs.promises.stat or cache stats.
|
||||
|
||||
const totalSize = stat.size;
|
||||
|
||||
if (range) {
|
||||
const parts = range.replace(/bytes=/, "").split("-");
|
||||
const start = parseInt(parts[0], 10);
|
||||
const end = parts[1] ? parseInt(parts[1], 10) : totalSize - 1;
|
||||
|
||||
responseHeaders['Content-Range'] = `bytes ${start}-${end}/${totalSize}`;
|
||||
responseHeaders['Accept-Ranges'] = 'bytes';
|
||||
responseHeaders['Content-Length'] = (end - start) + 1;
|
||||
|
||||
reply.code(206).headers(responseHeaders);
|
||||
return fs.createReadStream(content, { start, end });
|
||||
} else {
|
||||
responseHeaders['Content-Length'] = totalSize;
|
||||
responseHeaders['Accept-Ranges'] = 'bytes';
|
||||
reply.code(200).headers(responseHeaders);
|
||||
return fs.createReadStream(content);
|
||||
}
|
||||
}
|
||||
|
||||
// Download and Serve logic
|
||||
async function downloadAndServe(reply, apiData, metaPath, contentPath, key) {
|
||||
|
||||
|
||||
let task = activeDownloads.get(key);
|
||||
// 如果任务已完成或出错但最终文件不存在,重置任务以便重新下载
|
||||
if (task && (task.done || task.error)) {
|
||||
const hasFinal = fs.existsSync(contentPath);
|
||||
if (!hasFinal) {
|
||||
activeDownloads.delete(key);
|
||||
task = null;
|
||||
}
|
||||
}
|
||||
const isValidTask = task && typeof task === 'object' && task.emitter instanceof EventEmitter && !task.done;
|
||||
if (!isValidTask) {
|
||||
const finalPath = contentPath;
|
||||
const tempPath = path.join(path.dirname(finalPath), `${key}_${crypto.randomBytes(8).toString('hex')}.tmp`);
|
||||
task = {
|
||||
emitter: new EventEmitter(),
|
||||
currentSize: 0,
|
||||
totalSize: 0,
|
||||
path: tempPath,
|
||||
done: false,
|
||||
error: null
|
||||
};
|
||||
task.emitter.setMaxListeners(0);
|
||||
// Prevent crash if no listeners for error event
|
||||
task.emitter.on('error', (err) => {
|
||||
console.error(`[${new Date().toISOString()}] Download Error (Key: ${key}):`, err.message);
|
||||
});
|
||||
activeDownloads.set(key, task);
|
||||
|
||||
// Start Download
|
||||
const targetUrl = apiData.data.url;
|
||||
|
||||
// Use Undici stream for high performance download
|
||||
// stream() is efficient for piping
|
||||
const { stream } = require('undici');
|
||||
stream(targetUrl, { method: 'GET' }, (res) => {
|
||||
const { statusCode, headers } = res;
|
||||
if (statusCode !== 200) {
|
||||
const err = new Error(`Upstream ${statusCode}`);
|
||||
task.error = err;
|
||||
task.emitter.emit('error', err);
|
||||
activeDownloads.delete(key);
|
||||
const ws = fs.createWriteStream('/dev/null');
|
||||
ws.end();
|
||||
return ws;
|
||||
}
|
||||
fs.writeFileSync(metaPath, JSON.stringify(apiData));
|
||||
task.totalSize = parseInt(headers['content-length'] || '0', 10);
|
||||
const fileStream = fs.createWriteStream(task.path);
|
||||
fileStream.on('error', (err) => {
|
||||
task.emitter.emit('error', err);
|
||||
});
|
||||
const originalWrite = fileStream.write.bind(fileStream);
|
||||
fileStream.write = (chunk, encoding, cb) => {
|
||||
const ret = originalWrite(chunk, encoding, cb);
|
||||
task.currentSize += chunk.length;
|
||||
task.emitter.emit('progress', task.currentSize);
|
||||
return ret;
|
||||
};
|
||||
return fileStream;
|
||||
}).then(() => {
|
||||
try {
|
||||
const finalPath = contentPath;
|
||||
try {
|
||||
fs.renameSync(task.path, finalPath);
|
||||
} catch (err) {
|
||||
if (fs.existsSync(finalPath)) {
|
||||
try { fs.unlinkSync(finalPath); } catch (_) { }
|
||||
fs.renameSync(task.path, finalPath);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
task.path = finalPath;
|
||||
task.done = true;
|
||||
task.emitter.emit('done');
|
||||
} catch (err) {
|
||||
task.error = err;
|
||||
task.emitter.emit('error', err);
|
||||
fs.unlink(task.path, () => { });
|
||||
} finally {
|
||||
activeDownloads.delete(key);
|
||||
}
|
||||
}).catch(err => {
|
||||
task.error = err;
|
||||
task.emitter.emit('error', err);
|
||||
activeDownloads.delete(key);
|
||||
fs.unlink(task.path, () => { });
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (task.done && fs.existsSync(contentPath)) {
|
||||
console.log('Download completed:', key);
|
||||
if (isValidThumbSpec(apiData.data.thumb)) {
|
||||
return generateThumbAndCache(reply, apiData, metaPath, contentPath).catch(() => { });
|
||||
}
|
||||
return serveCompletedCache(reply, apiData, metaPath, contentPath);//reply, apiData, metaPath, contentPath
|
||||
}
|
||||
|
||||
|
||||
console.log('Downloading:', key);
|
||||
if (isValidThumbSpec(apiData.data.thumb)) {
|
||||
return generateThumbAndCache(reply, apiData, metaPath, contentPath).catch(() => { });
|
||||
}
|
||||
|
||||
// Serve growing file
|
||||
return serveGrowingFile(reply, task, apiData);
|
||||
}
|
||||
|
||||
function serveGrowingFile(reply, task, apiData) {
|
||||
const responseHeaders = { ...apiData.data.headers };
|
||||
if (!responseHeaders['Access-Control-Allow-Origin']) responseHeaders['Access-Control-Allow-Origin'] = '*';
|
||||
// 移除可能来自meta的固定 Content-Length,避免增长流卡死
|
||||
delete responseHeaders['Content-Length'];
|
||||
delete responseHeaders['content-length'];
|
||||
|
||||
const range = reply.request.headers.range;
|
||||
let start = 0;
|
||||
|
||||
if (range) {
|
||||
const parts = range.replace(/bytes=/, "").split("-");
|
||||
start = parseInt(parts[0], 10) || 0;
|
||||
responseHeaders['Accept-Ranges'] = 'bytes';
|
||||
// 当总大小未知时,不能返回206,否则客户端会等待无效范围导致卡死
|
||||
if (task.totalSize) {
|
||||
responseHeaders['Content-Range'] = `bytes ${start}-${task.totalSize - 1}/${task.totalSize}`;
|
||||
responseHeaders['Content-Length'] = task.totalSize - start;
|
||||
reply.code(206);
|
||||
} else {
|
||||
reply.code(200);
|
||||
}
|
||||
} else {
|
||||
if (task.totalSize) responseHeaders['Content-Length'] = task.totalSize;
|
||||
reply.code(200);
|
||||
}
|
||||
|
||||
reply.headers(responseHeaders);
|
||||
|
||||
// Custom stream to pump data from file to response
|
||||
const { Readable } = require('stream');
|
||||
|
||||
return new Readable({
|
||||
read(size) {
|
||||
const self = this;
|
||||
let bytesSent = start; // State needs to be per-stream instance.
|
||||
// Wait, 'read' is called multiple times. We need to store state outside or on 'this'.
|
||||
if (this._bytesSent === undefined) this._bytesSent = start;
|
||||
|
||||
pump(this);
|
||||
|
||||
function pump(stream) {
|
||||
if (stream.destroyed) return;
|
||||
|
||||
if (task.error) {
|
||||
stream.destroy(task.error);
|
||||
return;
|
||||
}
|
||||
|
||||
// Open FD if needed
|
||||
if (!stream._fd) {
|
||||
fs.open(task.path, 'r', (err, fd) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
setTimeout(() => pump(stream), 100);
|
||||
} else {
|
||||
stream.destroy(err);
|
||||
}
|
||||
return;
|
||||
}
|
||||
stream._fd = fd;
|
||||
pump(stream);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const available = task.currentSize - stream._bytesSent;
|
||||
|
||||
if (available > 0) {
|
||||
const buffer = Buffer.alloc(Math.min(available, 64 * 1024));
|
||||
fs.read(stream._fd, buffer, 0, buffer.length, stream._bytesSent, (err, bytesRead) => {
|
||||
if (err) {
|
||||
stream.destroy(err);
|
||||
return;
|
||||
}
|
||||
if (bytesRead > 0) {
|
||||
stream._bytesSent += bytesRead;
|
||||
const keepPushing = stream.push(buffer.slice(0, bytesRead));
|
||||
// If push returns false, we should stop and wait for _read again?
|
||||
// Actually Node streams: if push returns true, we can push more.
|
||||
// But here we just push what we have and wait for next _read call or event?
|
||||
// Standard implementation: push until it returns false.
|
||||
// But for "live" tailing, we might want to just push what we have and exit,
|
||||
// expecting _read to be called again by consumer.
|
||||
} else {
|
||||
wait(stream);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (task.done) {
|
||||
fs.close(stream._fd, () => { });
|
||||
stream.push(null); // EOF
|
||||
} else {
|
||||
wait(stream);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function wait(stream) {
|
||||
// Wait for progress
|
||||
const onProgress = () => {
|
||||
cleanup();
|
||||
pump(stream);
|
||||
};
|
||||
const onDone = () => {
|
||||
cleanup();
|
||||
pump(stream);
|
||||
};
|
||||
const onError = (err) => {
|
||||
cleanup();
|
||||
stream.destroy(err);
|
||||
};
|
||||
|
||||
const cleanup = () => {
|
||||
task.emitter.off('progress', onProgress);
|
||||
task.emitter.off('done', onDone);
|
||||
task.emitter.off('error', onError);
|
||||
};
|
||||
|
||||
task.emitter.once('progress', onProgress);
|
||||
task.emitter.once('done', onDone); // Check done state
|
||||
task.emitter.once('error', onError);
|
||||
|
||||
// If stream destroyed, remove listeners?
|
||||
// Readable.read is active, so stream is active.
|
||||
}
|
||||
},
|
||||
destroy(err, cb) {
|
||||
if (this._fd) fs.close(this._fd, () => { });
|
||||
cb(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Global Fastify Error Handler
|
||||
fastify.setErrorHandler((error, request, reply) => {
|
||||
request.log.error(error);
|
||||
const statusCode = error.statusCode || 500;
|
||||
reply.status(statusCode).send({
|
||||
error: error.name,
|
||||
message: error.message,
|
||||
statusCode
|
||||
});
|
||||
});
|
||||
|
||||
// Routes
|
||||
fastify.get('/favicon.ico', async (request, reply) => {
|
||||
reply.code(204);
|
||||
return '';
|
||||
});
|
||||
|
||||
fastify.get('/*', async (request, reply) => {
|
||||
let token = (request.raw && request.raw.url ? request.raw.url : request.url).split('?')[0].substring(1);
|
||||
|
||||
if (!token) {
|
||||
reply.code(400);
|
||||
return 'Missing token';
|
||||
}
|
||||
|
||||
// .well-known 目录下的文件直接返回
|
||||
if (token.startsWith('.well-known')) {
|
||||
reply.code(200);
|
||||
return '';
|
||||
}
|
||||
|
||||
// 获取nocache 参数
|
||||
const nocache = request.query.nocache ? true : false;
|
||||
|
||||
try {
|
||||
const key = getTokenKey(token);
|
||||
const metaPath = getMetaPath(key);
|
||||
const ONE_DAY_MS = 24 * 60 * 60 * 1000;
|
||||
let apiData = null;
|
||||
if (fs.existsSync(metaPath) && !nocache) {
|
||||
try {
|
||||
const stat = fs.statSync(metaPath);
|
||||
if ((Date.now() - stat.mtimeMs) < ONE_DAY_MS) {
|
||||
const parsed = JSON.parse(fs.readFileSync(metaPath, 'utf8'));
|
||||
apiData = normalizeApiData(parsed);
|
||||
}
|
||||
} catch (e) {
|
||||
apiData = null;
|
||||
}
|
||||
}
|
||||
// 如果内容文件不存在,则强制刷新API,避免使用过期URL
|
||||
if (!apiData) {
|
||||
apiData = await fetchApi(token);
|
||||
if (apiData.code !== 200 || !apiData.data || !apiData.data.url) {
|
||||
console.log('Invalid API response:', apiData, token);
|
||||
reply.code(404);
|
||||
return 'Invalid API response';
|
||||
}
|
||||
fs.writeFileSync(metaPath, JSON.stringify(apiData));
|
||||
}
|
||||
|
||||
|
||||
if (apiData.code !== 200 || !apiData.data || !apiData.data.url) {
|
||||
reply.code(404);
|
||||
return 'Invalid API response';
|
||||
}
|
||||
|
||||
const contentPath = getContentPath(apiData.data.uniqid || key);
|
||||
if (fs.existsSync(contentPath) && fs.existsSync(metaPath)) {
|
||||
if (isValidThumbSpec(apiData.data.thumb)) {
|
||||
return generateThumbAndCache(reply, apiData, metaPath, contentPath).catch(() => { });
|
||||
}
|
||||
return serveCompletedCache(reply, apiData, metaPath, contentPath);
|
||||
}
|
||||
return await downloadAndServe(reply, apiData, metaPath, contentPath, key);
|
||||
} catch (err) {
|
||||
request.log.error(err);
|
||||
reply.code(502);
|
||||
return 'Gateway Error: ' + err.message;
|
||||
}
|
||||
});
|
||||
|
||||
// Run
|
||||
fastify.listen({ port: PORT, host: '0.0.0.0' }, (err, address) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`Fastify Server running at ${address}`);
|
||||
});
|
||||
542
index.php
542
index.php
@@ -1,542 +0,0 @@
|
||||
<?php
|
||||
|
||||
use Swoole\Coroutine\Http\Server;
|
||||
use Swoole\Coroutine\Http\Client;
|
||||
use function Swoole\Coroutine\run;
|
||||
|
||||
// 常量定义
|
||||
const CACHE_DIR_NAME = '.cache';
|
||||
const DEFAULT_PORT = 9001;
|
||||
const DEFAULT_API_ENDPOINT = 'http://183.6.121.121:9519/api';
|
||||
const CACHE_EXPIRY_MS = 24 * 60 * 60 * 1000; // 24小时
|
||||
const CACHE_CLEANUP_INTERVAL_MS = 60 * 60 * 1000; // 1小时
|
||||
const API_TIMEOUT_MS = 5000;
|
||||
const USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36';
|
||||
|
||||
// HTTP状态码
|
||||
const HTTP_STATUS = [
|
||||
'OK' => 200,
|
||||
'NO_CONTENT' => 204,
|
||||
'REDIRECT' => 302,
|
||||
'NOT_MODIFIED' => 304,
|
||||
'BAD_REQUEST' => 400,
|
||||
'NOT_FOUND' => 404,
|
||||
'INTERNAL_SERVER_ERROR' => 500,
|
||||
'BAD_GATEWAY' => 502,
|
||||
];
|
||||
|
||||
// 初始化变量
|
||||
$cacheDir = __DIR__ . '/' . CACHE_DIR_NAME;
|
||||
$pathIndex = [];
|
||||
$port = DEFAULT_PORT;
|
||||
$apiEndpoint = DEFAULT_API_ENDPOINT;
|
||||
|
||||
// 访问计数器
|
||||
$viewsInfo = [
|
||||
'request' => 0,
|
||||
'cacheHit' => 0,
|
||||
'apiCall' => 0,
|
||||
'cacheCall' => 0,
|
||||
'cacheReadError' => 0,
|
||||
'fetchApiError' => 0,
|
||||
'fetchApiWarning' => 0,
|
||||
];
|
||||
|
||||
// 解析命令行参数
|
||||
function parseArguments() {
|
||||
global $port, $apiEndpoint;
|
||||
|
||||
$options = getopt('', ['port:', 'api:']);
|
||||
|
||||
if (isset($options['port'])) {
|
||||
$parsedPort = intval($options['port']);
|
||||
if ($parsedPort > 0) {
|
||||
$port = $parsedPort;
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($options['api'])) {
|
||||
$apiEndpoint = $options['api'];
|
||||
}
|
||||
}
|
||||
|
||||
// 初始化应用
|
||||
function initializeApp() {
|
||||
global $cacheDir;
|
||||
|
||||
parseArguments();
|
||||
|
||||
if (!file_exists($cacheDir)) {
|
||||
try {
|
||||
mkdir($cacheDir, 0777, true);
|
||||
echo "Cache directory created: {$cacheDir}\n";
|
||||
} catch (Exception $e) {
|
||||
echo "Error creating cache directory {$cacheDir}: " . $e->getMessage() . "\n";
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 发送错误响应
|
||||
function sendErrorResponse($res, int $statusCode, string $message) {
|
||||
if (!$res->isWritable()) {
|
||||
return;
|
||||
}
|
||||
$res->status($statusCode);
|
||||
$res->header('Content-Type', 'text/plain;charset=UTF-8');
|
||||
$res->end($message);
|
||||
}
|
||||
|
||||
// 处理favicon请求
|
||||
function handleFavicon($req, $res) {
|
||||
$res->status(HTTP_STATUS['NO_CONTENT']);
|
||||
$res->end();
|
||||
}
|
||||
|
||||
// 处理endpoint请求
|
||||
function handleEndpoint($req, $res, array $queryParams) {
|
||||
global $apiEndpoint, $port, $cacheDir, $pathIndex, $viewsInfo;
|
||||
|
||||
if (isset($queryParams['api'])) {
|
||||
$urlRegex = '/^(https?:\/\/)?([\da-z.-]+)\.([a-z.]{2,6})([\/\w.-]*)*\/?$/';
|
||||
if (preg_match($urlRegex, $queryParams['api'])) {
|
||||
$apiEndpoint = $queryParams['api'];
|
||||
echo "API endpoint updated to: {$apiEndpoint}\n";
|
||||
}
|
||||
}
|
||||
|
||||
$res->status(HTTP_STATUS['OK']);
|
||||
$res->header('Content-Type', 'application/json; charset=utf-8');
|
||||
$res->end(json_encode([
|
||||
'code' => HTTP_STATUS['OK'],
|
||||
'data' => [
|
||||
'api' => $apiEndpoint,
|
||||
'port' => $port,
|
||||
'cacheDir' => $cacheDir,
|
||||
'pathIndexCount' => count($pathIndex),
|
||||
'viewsInfo' => $viewsInfo
|
||||
]
|
||||
]));
|
||||
}
|
||||
|
||||
// 处理API重定向
|
||||
function handleApiRedirect($res, array $apiData) {
|
||||
$res->status(HTTP_STATUS['REDIRECT']);
|
||||
$res->header('Location', $apiData['data']['url']);
|
||||
$res->end();
|
||||
}
|
||||
|
||||
// 检查缓存头并返回是否为304
|
||||
function checkCacheHeaders($req, string $cacheMetaFile) {
|
||||
try {
|
||||
$metaContent = file_get_contents($cacheMetaFile);
|
||||
$cacheData = json_decode($metaContent, true);
|
||||
$ifNoneMatch = isset($req->header['if-none-match']) ? $req->header['if-none-match'] : null;
|
||||
$ifModifiedSince = isset($req->header['if-modified-since']) ? $req->header['if-modified-since'] : null;
|
||||
|
||||
// 检查ETag
|
||||
if ($ifNoneMatch && isset($cacheData['uniqid']) && $ifNoneMatch === $cacheData['uniqid']) {
|
||||
return ['cacheData' => $cacheData, 'isNotModified' => true];
|
||||
}
|
||||
|
||||
// 检查If-Modified-Since
|
||||
if ($ifModifiedSince && isset($cacheData['headers']['last-modified'])) {
|
||||
try {
|
||||
$lastModifiedDate = strtotime($cacheData['headers']['last-modified']);
|
||||
$ifModifiedSinceDate = strtotime($ifModifiedSince);
|
||||
|
||||
if ($lastModifiedDate <= $ifModifiedSinceDate) {
|
||||
return ['cacheData' => $cacheData, 'isNotModified' => true];
|
||||
}
|
||||
} catch (Exception $e) {
|
||||
echo "Error parsing date for cache header check ({$cacheMetaFile}): " . $e->getMessage() . "\n";
|
||||
}
|
||||
}
|
||||
|
||||
return ['cacheData' => $cacheData, 'isNotModified' => false];
|
||||
} catch (Exception $e) {
|
||||
echo "Error reading or parsing cache meta file {$cacheMetaFile} in checkCacheHeaders: " . $e->getMessage() . "\n";
|
||||
return ['cacheData' => null, 'isNotModified' => false];
|
||||
}
|
||||
}
|
||||
|
||||
// 检查缓存是否有效
|
||||
function isCacheValid(string $cacheMetaFile, string $cacheContentFile) {
|
||||
if (!file_exists($cacheMetaFile) || !file_exists($cacheContentFile)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
$metaContent = file_get_contents($cacheMetaFile);
|
||||
$cacheData = json_decode($metaContent, true);
|
||||
|
||||
return isset($cacheData['expiration']) && is_numeric($cacheData['expiration']) && $cacheData['expiration'] > time() * 1000;
|
||||
} catch (Exception $e) {
|
||||
echo "Error reading or parsing cache meta file {$cacheMetaFile} for validation: " . $e->getMessage() . "\n";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// 从API获取数据
|
||||
function fetchApiData(string $reqPath, string $token, string $sign) {
|
||||
global $apiEndpoint;
|
||||
|
||||
$queryParams = http_build_query([
|
||||
'type' => $reqPath,
|
||||
'sign' => $sign
|
||||
]);
|
||||
|
||||
$apiUrl = "{$apiEndpoint}?{$queryParams}";
|
||||
$parsedApiUrl = parse_url($apiUrl);
|
||||
|
||||
$client = new Client($parsedApiUrl['host'], $parsedApiUrl['port'] ?? ($parsedApiUrl['scheme'] === 'https' ? 443 : 80), $parsedApiUrl['scheme'] === 'https');
|
||||
$client->setHeaders([
|
||||
'Accept' => 'application/json; charset=utf-8',
|
||||
'User-Agent' => USER_AGENT,
|
||||
'token' => $token
|
||||
]);
|
||||
$client->set(['timeout' => API_TIMEOUT_MS / 1000]);
|
||||
|
||||
$path = isset($parsedApiUrl['path']) ? $parsedApiUrl['path'] : '/';
|
||||
if (isset($parsedApiUrl['query'])) {
|
||||
$path .= '?' . $parsedApiUrl['query'];
|
||||
}
|
||||
|
||||
$client->get($path);
|
||||
|
||||
if ($client->statusCode >= 400) {
|
||||
echo "API request to {$apiUrl} failed with status {$client->statusCode}: {$client->body}\n";
|
||||
$errorPayload = ['code' => $client->statusCode, 'message' => "API Error: {$client->statusCode}"];
|
||||
|
||||
try {
|
||||
$parsedError = json_decode($client->body, true);
|
||||
if ($parsedError && isset($parsedError['message'])) {
|
||||
$errorPayload['message'] = $parsedError['message'];
|
||||
}
|
||||
} catch (Exception $e) {
|
||||
// Ignore if response is not JSON
|
||||
}
|
||||
|
||||
return $errorPayload;
|
||||
}
|
||||
|
||||
try {
|
||||
return json_decode($client->body, true);
|
||||
} catch (Exception $e) {
|
||||
echo "Error parsing JSON response from {$apiUrl}: " . $e->getMessage() . ", {$client->body}\n";
|
||||
throw new Exception("Failed to parse API response: " . $e->getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// 从缓存中读取数据并返回
|
||||
function serveFromCache(array $cacheData, string $cacheContentFile, string $cacheMetaFile, $res) {
|
||||
global $viewsInfo;
|
||||
|
||||
if (!$cacheData) {
|
||||
echo "serveFromCache called with null cacheData for {$cacheContentFile}\n";
|
||||
sendErrorResponse($res, HTTP_STATUS['INTERNAL_SERVER_ERROR'], 'Cache metadata unavailable.');
|
||||
return;
|
||||
}
|
||||
|
||||
$viewsInfo['cacheCall']++;
|
||||
|
||||
try {
|
||||
$fileContent = file_get_contents($cacheContentFile);
|
||||
|
||||
if ($fileContent === false) {
|
||||
throw new Exception("Failed to read cache file");
|
||||
}
|
||||
|
||||
$baseHeaders = [
|
||||
'Cloud-Type' => $cacheData['cloudtype'] ?? 'unknown',
|
||||
'Cloud-Expiration' => $cacheData['expiration'] ?? 0,
|
||||
'ETag' => $cacheData['uniqid'] ?? '',
|
||||
'Cache-Control' => 'public, max-age=31536000',
|
||||
'Expires' => gmdate('D, d M Y H:i:s', time() + 31536000) . ' GMT',
|
||||
'Accept-Ranges' => 'bytes',
|
||||
'Connection' => 'keep-alive',
|
||||
'Date' => gmdate('D, d M Y H:i:s') . ' GMT',
|
||||
'Last-Modified' => isset($cacheData['headers']['last-modified']) ? $cacheData['headers']['last-modified'] : gmdate('D, d M Y H:i:s', filemtime($cacheMetaFile)) . ' GMT',
|
||||
];
|
||||
|
||||
$isVideo = isset($cacheData['path']) && is_string($cacheData['path']) && strpos($cacheData['path'], '.mp4') !== false;
|
||||
$contentType = isset($cacheData['headers']['Content-Type']) ? $cacheData['headers']['Content-Type'] : ($isVideo ? 'video/mp4' : 'application/octet-stream');
|
||||
|
||||
$responseHeaders = array_merge($baseHeaders, [
|
||||
'Content-Type' => $contentType,
|
||||
]);
|
||||
|
||||
foreach ($responseHeaders as $key => $value) {
|
||||
$res->header($key, $value);
|
||||
}
|
||||
|
||||
$res->status(HTTP_STATUS['OK']);
|
||||
$res->end($fileContent);
|
||||
} catch (Exception $e) {
|
||||
$viewsInfo['cacheReadError']++;
|
||||
echo "Error reading from cache {$cacheContentFile}: " . $e->getMessage() . "\n";
|
||||
sendErrorResponse($res, HTTP_STATUS['INTERNAL_SERVER_ERROR'], "Failed to read from cache: " . $e->getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// 从真实URL获取数据并写入缓存
|
||||
function fetchAndServe(array $data, string $tempCacheContentFile, string $cacheContentFile, string $cacheMetaFile, $res) {
|
||||
global $viewsInfo;
|
||||
|
||||
$parsedUrl = parse_url($data['realUrl']);
|
||||
$client = new Client($parsedUrl['host'], $parsedUrl['port'] ?? ($parsedUrl['scheme'] === 'https' ? 443 : 80), $parsedUrl['scheme'] === 'https');
|
||||
$client->setHeaders([
|
||||
'User-Agent' => USER_AGENT
|
||||
]);
|
||||
|
||||
$path = isset($parsedUrl['path']) ? $parsedUrl['path'] : '/';
|
||||
if (isset($parsedUrl['query'])) {
|
||||
$path .= '?' . $parsedUrl['query'];
|
||||
}
|
||||
|
||||
$client->get($path);
|
||||
|
||||
if ($client->statusCode !== 200) {
|
||||
echo "Error fetching from {$data['realUrl']}: HTTP status {$client->statusCode}\n";
|
||||
sendErrorResponse($res, HTTP_STATUS['BAD_GATEWAY'], "Bad Gateway: Failed to fetch content from source");
|
||||
return;
|
||||
}
|
||||
|
||||
$isVideo = isset($data['path']) && is_string($data['path']) && strpos($data['path'], '.mp4') !== false;
|
||||
|
||||
// 检查content-length
|
||||
$contentLength = isset($client->headers['content-length']) ? $client->headers['content-length'] : null;
|
||||
if ($contentLength) {
|
||||
// contentLength小于2KB且与缓存文件大小不一致时,重新获取
|
||||
if ($contentLength < 2048 && isset($data['headers']['content-length']) && $data['headers']['content-length'] !== $contentLength) {
|
||||
echo "Warning: content-length is different for the response from: {$data['realUrl']}\n";
|
||||
sendErrorResponse($res, HTTP_STATUS['BAD_GATEWAY'], "Bad Gateway: Content-Length mismatch for {$data['realUrl']}");
|
||||
return;
|
||||
}
|
||||
// 更新data到缓存cacheMetaFile
|
||||
file_put_contents($cacheMetaFile, json_encode($data));
|
||||
} else {
|
||||
echo "Warning: content-length is undefined for the response from: {$data['realUrl']}\n";
|
||||
}
|
||||
|
||||
// 写入临时缓存文件
|
||||
file_put_contents($tempCacheContentFile, $client->body);
|
||||
|
||||
// 重命名临时文件为正式缓存文件
|
||||
try {
|
||||
$targetDir = dirname($cacheContentFile);
|
||||
if (!file_exists($targetDir)) {
|
||||
mkdir($targetDir, 0777, true);
|
||||
}
|
||||
rename($tempCacheContentFile, $cacheContentFile);
|
||||
echo "Successfully cached: {$cacheContentFile}\n";
|
||||
} catch (Exception $e) {
|
||||
echo "Error renaming temp cache file {$tempCacheContentFile} to {$cacheContentFile}: " . $e->getMessage() . "\n";
|
||||
// 如果重命名失败,尝试删除临时文件以避免混乱
|
||||
if (file_exists($tempCacheContentFile)) {
|
||||
unlink($tempCacheContentFile);
|
||||
}
|
||||
}
|
||||
|
||||
$baseHeaders = [
|
||||
'Cloud-Type' => $data['cloudtype'] ?? 'unknown',
|
||||
'Cloud-Expiration' => $data['expiration'] ?? 0,
|
||||
'ETag' => $data['uniqid'] ?? '',
|
||||
'Cache-Control' => 'public, max-age=31536000',
|
||||
'Expires' => gmdate('D, d M Y H:i:s', time() + 31536000) . ' GMT',
|
||||
'Accept-Ranges' => 'bytes',
|
||||
'Connection' => 'keep-alive',
|
||||
'Date' => gmdate('D, d M Y H:i:s') . ' GMT',
|
||||
'Last-Modified' => isset($data['headers']['last-modified']) ?
|
||||
$data['headers']['last-modified'] :
|
||||
gmdate('D, d M Y H:i:s', filemtime($cacheMetaFile)) . ' GMT',
|
||||
];
|
||||
|
||||
$responseHeaders = array_merge($baseHeaders, [
|
||||
'Content-Type' => isset($client->headers['content-type']) ? $client->headers['content-type'] : ($isVideo ? 'video/mp4' : 'application/octet-stream'),
|
||||
], $data['headers']);
|
||||
|
||||
foreach ($responseHeaders as $key => $value) {
|
||||
$res->header($key, $value);
|
||||
}
|
||||
|
||||
$res->status(HTTP_STATUS['OK']);
|
||||
$res->end($client->body);
|
||||
}
|
||||
|
||||
// 尝试从过期缓存提供服务或返回错误
|
||||
function tryServeFromStaleCacheOrError(string $uniqidhex, $res, string $errorMessage = null) {
|
||||
global $pathIndex, $cacheDir;
|
||||
|
||||
if (isset($pathIndex[$uniqidhex])) {
|
||||
$cacheMetaFile = $cacheDir . '/' . $uniqidhex . '.meta';
|
||||
$cacheContentFile = $cacheDir . '/' . $pathIndex[$uniqidhex]['uniqid'] . '.content';
|
||||
|
||||
if (file_exists($cacheMetaFile) && file_exists($cacheContentFile)) {
|
||||
echo "API call failed or returned non-200. Serving stale cache for {$uniqidhex}\n";
|
||||
|
||||
try {
|
||||
$cacheData = json_decode(file_get_contents($cacheMetaFile), true);
|
||||
serveFromCache($cacheData, $cacheContentFile, $cacheMetaFile, $res);
|
||||
return;
|
||||
} catch (Exception $e) {
|
||||
echo "Error parsing stale meta file {$cacheMetaFile}: " . $e->getMessage() . "\n";
|
||||
// 如果过期缓存也损坏,则返回通用错误
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sendErrorResponse($res, HTTP_STATUS['BAD_GATEWAY'], $errorMessage ?: 'Bad Gateway');
|
||||
}
|
||||
|
||||
// 处理主请求
|
||||
function handleMainRequest($req, $res) {
|
||||
global $pathIndex, $cacheDir, $viewsInfo;
|
||||
|
||||
$url = preg_replace('/\/{2,}/', '/', $req->server['request_uri']);
|
||||
$parsedUrl = parse_url($url);
|
||||
$queryParams = [];
|
||||
|
||||
if (isset($parsedUrl['query'])) {
|
||||
parse_str($parsedUrl['query'], $queryParams);
|
||||
}
|
||||
|
||||
$sign = $queryParams['sign'] ?? '';
|
||||
$pathParts = explode('/', trim($parsedUrl['path'], '/'));
|
||||
$reqPath = $pathParts[0] ?? '';
|
||||
$token = implode('/', array_slice($pathParts, 1));
|
||||
|
||||
if ($reqPath === 'favicon.ico') {
|
||||
return handleFavicon($req, $res);
|
||||
}
|
||||
|
||||
if ($reqPath === 'endpoint') {
|
||||
return handleEndpoint($req, $res, $queryParams);
|
||||
}
|
||||
|
||||
if (!$token && $reqPath) {
|
||||
$token = $reqPath;
|
||||
$reqPath = 'app'; // 默认为'app',如果只提供了一个路径段
|
||||
}
|
||||
|
||||
$allowedPaths = ['avatar', 'go', 'bbs', 'www', 'url', 'thumb', 'app'];
|
||||
if (!in_array($reqPath, $allowedPaths) || !$token) {
|
||||
return sendErrorResponse($res, HTTP_STATUS['BAD_REQUEST'], "Bad Request: Invalid path or missing token.");
|
||||
}
|
||||
|
||||
$viewsInfo['request']++;
|
||||
$uniqidhex = md5($reqPath . $token . $sign);
|
||||
$cacheMetaFile = '';
|
||||
$cacheContentFile = '';
|
||||
|
||||
if (isset($pathIndex[$uniqidhex])) {
|
||||
$cacheMetaFile = $cacheDir . '/' . $uniqidhex . '.meta';
|
||||
$cacheContentFile = $cacheDir . '/' . $pathIndex[$uniqidhex]['uniqid'] . '.content';
|
||||
}
|
||||
|
||||
if (isset($pathIndex[$uniqidhex]) && isCacheValid($cacheMetaFile, $cacheContentFile)) {
|
||||
$cacheResult = checkCacheHeaders($req, $cacheMetaFile);
|
||||
|
||||
if ($cacheResult['isNotModified']) {
|
||||
$res->status(HTTP_STATUS['NOT_MODIFIED']);
|
||||
$res->end();
|
||||
} else {
|
||||
$viewsInfo['cacheHit']++;
|
||||
serveFromCache($cacheResult['cacheData'], $cacheContentFile, $cacheMetaFile, $res);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
$viewsInfo['apiCall']++;
|
||||
$apiData = fetchApiData($reqPath, $token, $sign);
|
||||
|
||||
if (isset($apiData['code']) && ($apiData['code'] === HTTP_STATUS['REDIRECT'] || $apiData['code'] === 301)) {
|
||||
return handleApiRedirect($res, $apiData);
|
||||
}
|
||||
|
||||
if (isset($apiData['code']) && $apiData['code'] === HTTP_STATUS['OK'] && isset($apiData['data']) && isset($apiData['data']['url'])) {
|
||||
$data = [
|
||||
'realUrl' => $apiData['data']['url'],
|
||||
'cloudtype' => $apiData['data']['cloudtype'] ?? '',
|
||||
'expiration' => isset($apiData['data']['expiration']) ? $apiData['data']['expiration'] * 1000 : 0,
|
||||
'path' => $apiData['data']['path'] ?? '',
|
||||
'headers' => $apiData['data']['headers'] ?? [],
|
||||
'uniqid' => $apiData['data']['uniqid'] ?? '',
|
||||
'thumb' => $apiData['data']['thumb'] ?? ''
|
||||
];
|
||||
|
||||
$pathIndex[$uniqidhex] = ['uniqid' => $data['uniqid'], 'timestamp' => time() * 1000];
|
||||
$cacheMetaFile = $cacheDir . '/' . $uniqidhex . '.meta';
|
||||
$cacheContentFile = $cacheDir . '/' . $data['uniqid'] . '.content';
|
||||
$tempCacheContentFile = $cacheDir . '/' . $data['uniqid'] . '_' . bin2hex(random_bytes(16)) . '.temp';
|
||||
|
||||
try {
|
||||
file_put_contents($cacheMetaFile, json_encode($data));
|
||||
} catch (Exception $e) {
|
||||
echo "Error writing meta file {$cacheMetaFile}: " . $e->getMessage() . "\n";
|
||||
sendErrorResponse($res, HTTP_STATUS['INTERNAL_SERVER_ERROR'], 'Failed to write cache metadata.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (file_exists($cacheContentFile)) {
|
||||
$contentLength = filesize($cacheContentFile);
|
||||
if ($contentLength < 2048 && isset($data['headers']['content-length']) && intval($data['headers']['content-length']) !== $contentLength) {
|
||||
echo "Content length mismatch for {$cacheContentFile}. API: {$data['headers']['content-length']}, Cache: {$contentLength}. Re-fetching.\n";
|
||||
fetchAndServe($data, $tempCacheContentFile, $cacheContentFile, $cacheMetaFile, $res);
|
||||
} else {
|
||||
serveFromCache($data, $cacheContentFile, $cacheMetaFile, $res);
|
||||
}
|
||||
} else {
|
||||
fetchAndServe($data, $tempCacheContentFile, $cacheContentFile, $cacheMetaFile, $res);
|
||||
}
|
||||
} else {
|
||||
$viewsInfo['fetchApiWarning']++;
|
||||
tryServeFromStaleCacheOrError($uniqidhex, $res, $apiData['message'] ?? null);
|
||||
}
|
||||
} catch (Exception $e) {
|
||||
$viewsInfo['fetchApiError']++;
|
||||
echo "Error in API call or processing: " . $e->getMessage() . "\n";
|
||||
tryServeFromStaleCacheOrError($uniqidhex, $res, "Bad Gateway: API request failed. " . $e->getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 定时清理过期缓存数据
|
||||
function cleanupExpiredCache() {
|
||||
global $pathIndex;
|
||||
|
||||
$currentTime = time() * 1000;
|
||||
foreach ($pathIndex as $key => $value) {
|
||||
if ($currentTime - $value['timestamp'] > CACHE_EXPIRY_MS) {
|
||||
unset($pathIndex[$key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 主函数
|
||||
function main() {
|
||||
global $port;
|
||||
|
||||
initializeApp();
|
||||
|
||||
// 创建服务器
|
||||
$server = new Server('0.0.0.0', $port);
|
||||
|
||||
echo "Server started at http://0.0.0.0:{$port}\n";
|
||||
|
||||
// 设置定时器清理过期缓存
|
||||
Swoole\Timer::tick(CACHE_CLEANUP_INTERVAL_MS, function () {
|
||||
cleanupExpiredCache();
|
||||
});
|
||||
|
||||
// 处理请求
|
||||
$server->handle('/', function ($req, $res) {
|
||||
handleMainRequest($req, $res);
|
||||
});
|
||||
|
||||
$server->start();
|
||||
}
|
||||
|
||||
// 启动服务器
|
||||
run(function () {
|
||||
main();
|
||||
});
|
||||
@@ -8,7 +8,7 @@ const EventEmitter = require('events');
|
||||
|
||||
// Configuration
|
||||
const PORT = 9520;
|
||||
const API_BASE = 'http://183.6.121.121:9519/api';
|
||||
const API_BASE = 'http://127.0.0.1:9558/api';
|
||||
const CACHE_DIR = path.join(__dirname, '.cache');
|
||||
|
||||
// Ensure cache directory exists
|
||||
@@ -170,7 +170,8 @@ function downloadAndServe(req, res, apiData, cachePaths, key) {
|
||||
}
|
||||
|
||||
// Save meta
|
||||
fs.writeFileSync(cachePaths.meta, JSON.stringify(apiData));
|
||||
const metaPath = cachePaths.meta.replace('.meta', '.thumb.meta');
|
||||
fs.writeFileSync(metaPath, JSON.stringify(apiData));
|
||||
|
||||
const totalSize = parseInt(proxyRes.headers['content-length'] || '0', 10);
|
||||
task.totalSize = totalSize;
|
||||
363
new/fastify.js
363
new/fastify.js
@@ -1,363 +0,0 @@
|
||||
const fastify = require('fastify')({
|
||||
logger: false, // 关闭默认日志,极大提升吞吐量
|
||||
disableRequestLogging: true, // 关闭请求日志
|
||||
connectionTimeout: 30000, // 快速释放死连接
|
||||
keepAliveTimeout: 5000, // 调整 Keep-Alive
|
||||
});
|
||||
const { request } = require('undici'); // High-performance HTTP client
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
const EventEmitter = require('events');
|
||||
|
||||
// Configuration
|
||||
const PORT = 9520;
|
||||
const API_BASE = 'http://183.6.121.121:9519/api';
|
||||
const CACHE_DIR = path.join(__dirname, '.cache');
|
||||
|
||||
// Ensure cache directory exists
|
||||
if (!fs.existsSync(CACHE_DIR)) {
|
||||
fs.mkdirSync(CACHE_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
// Active downloads manager
|
||||
const activeDownloads = new Map();
|
||||
|
||||
// Helper to fetch JSON from API using Undici (Faster than http.get)
|
||||
async function fetchApi(token) {
|
||||
const apiUrl = new URL(API_BASE);
|
||||
if (token) {
|
||||
apiUrl.searchParams.set('token', token);
|
||||
}
|
||||
|
||||
const { statusCode, body } = await request(apiUrl, {
|
||||
method: 'GET',
|
||||
headers: { 'Connection': 'keep-alive' },
|
||||
bodyTimeout: 5000,
|
||||
headersTimeout: 5000
|
||||
});
|
||||
|
||||
if (statusCode !== 200) {
|
||||
throw new Error(`API Status Code: ${statusCode}`);
|
||||
}
|
||||
|
||||
const data = await body.json();
|
||||
return data;
|
||||
}
|
||||
|
||||
function getCacheKey(apiData) {
|
||||
if (apiData.data && apiData.data.uniqid) {
|
||||
return apiData.data.uniqid;
|
||||
}
|
||||
if (apiData.data && apiData.data.url) {
|
||||
return crypto.createHash('md5').update(apiData.data.url).digest('hex');
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function getCachePaths(key) {
|
||||
const subDir = key.substring(0, 1);
|
||||
const dir = path.join(CACHE_DIR, subDir);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
return {
|
||||
content: path.join(dir, `${key}.data`),
|
||||
meta: path.join(dir, `${key}.meta`)
|
||||
};
|
||||
}
|
||||
|
||||
// Serve existing file
|
||||
function serveCompletedCache(reply, cachePaths, apiData) {
|
||||
const { content } = cachePaths;
|
||||
const responseHeaders = { ...apiData.data.headers };
|
||||
|
||||
if (!responseHeaders['Access-Control-Allow-Origin']) {
|
||||
responseHeaders['Access-Control-Allow-Origin'] = '*';
|
||||
}
|
||||
|
||||
// Fastify handles Range requests automatically if we send the stream?
|
||||
// Actually, for full control over custom headers + Range, we often need manual handling or plugins.
|
||||
// But serving a raw stream in Fastify usually just pipes it.
|
||||
// For "High Performance", letting nginx handle static files is best, but here we do it in Node.
|
||||
// We will stick to the manual Range logic for consistency with previous "growing file" support.
|
||||
|
||||
// To support Range properly with Fastify + Stream, we can set headers and return stream.
|
||||
// But for "growing" files, we need our custom pump logic.
|
||||
// For completed files, we can use fs.createReadStream.
|
||||
|
||||
const range = reply.request.headers.range;
|
||||
const stat = fs.statSync(content); // Sync is okay for startup/metadata, but Async preferred in high-perf.
|
||||
// In strict high-perf, use fs.promises.stat or cache stats.
|
||||
|
||||
const totalSize = stat.size;
|
||||
|
||||
if (range) {
|
||||
const parts = range.replace(/bytes=/, "").split("-");
|
||||
const start = parseInt(parts[0], 10);
|
||||
const end = parts[1] ? parseInt(parts[1], 10) : totalSize - 1;
|
||||
|
||||
responseHeaders['Content-Range'] = `bytes ${start}-${end}/${totalSize}`;
|
||||
responseHeaders['Accept-Ranges'] = 'bytes';
|
||||
responseHeaders['Content-Length'] = (end - start) + 1;
|
||||
|
||||
reply.code(206).headers(responseHeaders);
|
||||
return fs.createReadStream(content, { start, end });
|
||||
} else {
|
||||
responseHeaders['Content-Length'] = totalSize;
|
||||
responseHeaders['Accept-Ranges'] = 'bytes';
|
||||
reply.code(200).headers(responseHeaders);
|
||||
return fs.createReadStream(content);
|
||||
}
|
||||
}
|
||||
|
||||
// Download and Serve logic
|
||||
function downloadAndServe(reply, apiData, cachePaths, key) {
|
||||
let task = activeDownloads.get(key);
|
||||
|
||||
if (!task) {
|
||||
task = {
|
||||
emitter: new EventEmitter(),
|
||||
currentSize: 0,
|
||||
totalSize: 0,
|
||||
path: cachePaths.content,
|
||||
done: false,
|
||||
error: null
|
||||
};
|
||||
task.emitter.setMaxListeners(0);
|
||||
activeDownloads.set(key, task);
|
||||
|
||||
// Start Download
|
||||
const targetUrl = apiData.data.url;
|
||||
|
||||
// Use Undici stream for high performance download
|
||||
// stream() is efficient for piping
|
||||
const { stream } = require('undici');
|
||||
|
||||
stream(targetUrl, { method: 'GET', opaque: task }, ({ statusCode, headers }) => {
|
||||
if (statusCode !== 200) {
|
||||
// handle error
|
||||
const err = new Error(`Upstream ${statusCode}`);
|
||||
task.error = err;
|
||||
task.emitter.emit('error', err);
|
||||
activeDownloads.delete(key);
|
||||
// We need to return a Writable to undici
|
||||
// return new Writable(...) or simple dummy
|
||||
return fs.createWriteStream('/dev/null');
|
||||
}
|
||||
|
||||
// Save meta
|
||||
fs.writeFileSync(cachePaths.meta, JSON.stringify(apiData));
|
||||
|
||||
task.totalSize = parseInt(headers['content-length'] || '0', 10);
|
||||
|
||||
// Return the write stream to file
|
||||
const fileStream = fs.createWriteStream(task.path);
|
||||
|
||||
// Monitor writing
|
||||
// We need to intercept the stream to update currentSize
|
||||
// PassThrough stream adds overhead.
|
||||
// Better to wrap the write?
|
||||
// Or just fs.watchFile? (Slow).
|
||||
// Let's use a custom Writable wrapper or event listener on fileStream 'drain'/'finish' isn't granular enough.
|
||||
// Undici stream factory returns a Writable.
|
||||
|
||||
// Let's stick to a simple approach:
|
||||
// We can't easily hook into fs.WriteStream bytesWritten in real-time without polling or proxying.
|
||||
// Proxying:
|
||||
const originalWrite = fileStream.write.bind(fileStream);
|
||||
fileStream.write = (chunk, encoding, cb) => {
|
||||
const ret = originalWrite(chunk, encoding, cb);
|
||||
task.currentSize += chunk.length;
|
||||
task.emitter.emit('progress', task.currentSize);
|
||||
return ret;
|
||||
};
|
||||
|
||||
return fileStream;
|
||||
|
||||
}, ({ opaque }) => {
|
||||
// Finished
|
||||
opaque.done = true;
|
||||
opaque.emitter.emit('done');
|
||||
activeDownloads.delete(key);
|
||||
}, (err, { opaque }) => {
|
||||
// Error
|
||||
if (err) {
|
||||
opaque.error = err;
|
||||
opaque.emitter.emit('error', err);
|
||||
activeDownloads.delete(key);
|
||||
fs.unlink(opaque.path, () => { });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Serve growing file
|
||||
return serveGrowingFile(reply, task, apiData);
|
||||
}
|
||||
|
||||
function serveGrowingFile(reply, task, apiData) {
|
||||
const responseHeaders = { ...apiData.data.headers };
|
||||
if (!responseHeaders['Access-Control-Allow-Origin']) responseHeaders['Access-Control-Allow-Origin'] = '*';
|
||||
|
||||
const range = reply.request.headers.range;
|
||||
let start = 0;
|
||||
|
||||
if (range) {
|
||||
const parts = range.replace(/bytes=/, "").split("-");
|
||||
start = parseInt(parts[0], 10) || 0;
|
||||
responseHeaders['Accept-Ranges'] = 'bytes';
|
||||
if (task.totalSize) {
|
||||
responseHeaders['Content-Range'] = `bytes ${start}-${task.totalSize - 1}/${task.totalSize}`;
|
||||
responseHeaders['Content-Length'] = task.totalSize - start;
|
||||
}
|
||||
reply.code(206);
|
||||
} else {
|
||||
if (task.totalSize) responseHeaders['Content-Length'] = task.totalSize;
|
||||
reply.code(200);
|
||||
}
|
||||
|
||||
reply.headers(responseHeaders);
|
||||
|
||||
// Custom stream to pump data from file to response
|
||||
const { Readable } = require('stream');
|
||||
|
||||
return new Readable({
|
||||
read(size) {
|
||||
const self = this;
|
||||
let bytesSent = start; // State needs to be per-stream instance.
|
||||
// Wait, 'read' is called multiple times. We need to store state outside or on 'this'.
|
||||
if (this._bytesSent === undefined) this._bytesSent = start;
|
||||
|
||||
pump(this);
|
||||
|
||||
function pump(stream) {
|
||||
if (stream.destroyed) return;
|
||||
|
||||
if (task.error) {
|
||||
stream.destroy(task.error);
|
||||
return;
|
||||
}
|
||||
|
||||
// Open FD if needed
|
||||
if (!stream._fd) {
|
||||
fs.open(task.path, 'r', (err, fd) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT' && !task.done) {
|
||||
setTimeout(() => pump(stream), 100);
|
||||
} else {
|
||||
stream.destroy(err);
|
||||
}
|
||||
return;
|
||||
}
|
||||
stream._fd = fd;
|
||||
pump(stream);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const available = task.currentSize - stream._bytesSent;
|
||||
|
||||
if (available > 0) {
|
||||
const buffer = Buffer.alloc(Math.min(available, 64 * 1024));
|
||||
fs.read(stream._fd, buffer, 0, buffer.length, stream._bytesSent, (err, bytesRead) => {
|
||||
if (err) {
|
||||
stream.destroy(err);
|
||||
return;
|
||||
}
|
||||
if (bytesRead > 0) {
|
||||
stream._bytesSent += bytesRead;
|
||||
const keepPushing = stream.push(buffer.slice(0, bytesRead));
|
||||
// If push returns false, we should stop and wait for _read again?
|
||||
// Actually Node streams: if push returns true, we can push more.
|
||||
// But here we just push what we have and wait for next _read call or event?
|
||||
// Standard implementation: push until it returns false.
|
||||
// But for "live" tailing, we might want to just push what we have and exit,
|
||||
// expecting _read to be called again by consumer.
|
||||
} else {
|
||||
wait(stream);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (task.done) {
|
||||
fs.close(stream._fd, () => { });
|
||||
stream.push(null); // EOF
|
||||
} else {
|
||||
wait(stream);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function wait(stream) {
|
||||
// Wait for progress
|
||||
const onProgress = () => {
|
||||
pump(stream);
|
||||
};
|
||||
task.emitter.once('progress', onProgress);
|
||||
task.emitter.once('done', onProgress); // Check done state
|
||||
|
||||
// If stream destroyed, remove listeners?
|
||||
// Readable.read is active, so stream is active.
|
||||
}
|
||||
},
|
||||
destroy(err, cb) {
|
||||
if (this._fd) fs.close(this._fd, () => { });
|
||||
cb(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Routes
|
||||
fastify.get('/favicon.ico', async (request, reply) => {
|
||||
reply.code(204);
|
||||
return '';
|
||||
});
|
||||
|
||||
fastify.get('/*', async (request, reply) => {
|
||||
const token = request.url.substring(1);
|
||||
if (!token) {
|
||||
reply.code(400);
|
||||
return 'Missing token';
|
||||
}
|
||||
|
||||
try {
|
||||
// 1. Fetch API
|
||||
// Note: fetchApi is async, so we await
|
||||
const apiData = await fetchApi(token);
|
||||
|
||||
if (apiData.code !== 200 || !apiData.data || !apiData.data.url) {
|
||||
reply.code(404);
|
||||
return 'Invalid API response';
|
||||
}
|
||||
|
||||
const key = getCacheKey(apiData);
|
||||
if (!key) {
|
||||
reply.code(500);
|
||||
return 'Key Error';
|
||||
}
|
||||
|
||||
const cachePaths = getCachePaths(key);
|
||||
|
||||
// 2. Serve
|
||||
if (activeDownloads.has(key)) {
|
||||
return downloadAndServe(reply, apiData, cachePaths, key);
|
||||
} else if (fs.existsSync(cachePaths.content) && fs.existsSync(cachePaths.meta)) {
|
||||
return serveCompletedCache(reply, cachePaths, apiData);
|
||||
} else {
|
||||
return downloadAndServe(reply, apiData, cachePaths, key);
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
request.log.error(err);
|
||||
reply.code(502);
|
||||
return 'Gateway Error: ' + err.message;
|
||||
}
|
||||
});
|
||||
|
||||
// Run
|
||||
fastify.listen({ port: PORT, host: '0.0.0.0' }, (err, address) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`Fastify Server running at ${address}`);
|
||||
});
|
||||
21
obfuscate.js
21
obfuscate.js
@@ -1,21 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const JavaScriptObfuscator = require('javascript-obfuscator');
|
||||
|
||||
const inputFilePath = './source.js'; // 替换为你的文件名
|
||||
const outputFilePath = './index.js';
|
||||
|
||||
const code = fs.readFileSync(inputFilePath, 'utf8');
|
||||
|
||||
const obfuscatedCode = JavaScriptObfuscator.obfuscate(code, {
|
||||
compact: true, // 压缩代码
|
||||
controlFlowFlattening: true, // 控制流混淆
|
||||
deadCodeInjection: true, // 死代码注入
|
||||
numbersToExpressions: true, // 将数字转换为表达式
|
||||
renameGlobals: true, // 重命名全局变量
|
||||
simplify: true, // 简化代码
|
||||
splitStrings: true, // 将字符串拆分为多个字符串
|
||||
stringArray: true, // 将字符串转换为数组
|
||||
}).getObfuscatedCode();
|
||||
|
||||
fs.writeFileSync(outputFilePath, obfuscatedCode);
|
||||
console.log('Code has been obfuscated and saved to', outputFilePath);
|
||||
@@ -2,7 +2,9 @@
|
||||
"dependencies": {
|
||||
"fastify": "^5.6.2",
|
||||
"javascript-obfuscator": "^4.1.1",
|
||||
"undici": "^7.16.0"
|
||||
"sharp": "^0.34.5",
|
||||
"undici": "^7.16.0",
|
||||
"fluent-ffmpeg": "^2.1.3"
|
||||
},
|
||||
"packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a"
|
||||
}
|
||||
|
||||
805
source.js
805
source.js
@@ -1,805 +0,0 @@
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
const url = require('url');
|
||||
const querystring = require('querystring');
|
||||
const fs = require('fs');
|
||||
const pathModule = require('path');
|
||||
const crypto = require('crypto');
|
||||
|
||||
const CACHE_DIR_NAME = '.cache';
|
||||
const DEFAULT_PORT = 9001;
|
||||
const DEFAULT_API_ENDPOINT = 'http://183.6.121.121:9519/api';
|
||||
|
||||
const cacheDir = pathModule.join(__dirname, CACHE_DIR_NAME);
|
||||
const pathIndex = {};
|
||||
|
||||
// 访问计数器
|
||||
const viewsInfo = {
|
||||
request: 0,
|
||||
cacheHit: 0,
|
||||
apiCall: 0,
|
||||
cacheCall: 0,
|
||||
cacheReadError: 0,
|
||||
fetchApiError: 0,
|
||||
fetchApiWarning: 0,
|
||||
increment: function (key) {
|
||||
if (this.hasOwnProperty(key)) {
|
||||
this[key]++;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let port = DEFAULT_PORT;
|
||||
let apiEndpoint = DEFAULT_API_ENDPOINT;
|
||||
|
||||
// 解析命令行参数函数
|
||||
function parseArguments() {
|
||||
const args = process.argv.slice(2);
|
||||
args.forEach(arg => {
|
||||
const cleanArg = arg.startsWith('--') ? arg.substring(2) : arg;
|
||||
const [key, value] = cleanArg.split('=');
|
||||
if (key === 'port' && value) {
|
||||
const parsedPort = parseInt(value, 10);
|
||||
if (!isNaN(parsedPort)) {
|
||||
port = parsedPort;
|
||||
}
|
||||
} else if (key === 'api' && value) {
|
||||
apiEndpoint = value;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 初始化函数,包含参数解析和目录创建
|
||||
function initializeApp() {
|
||||
parseArguments();
|
||||
if (!fs.existsSync(cacheDir)) {
|
||||
try {
|
||||
fs.mkdirSync(cacheDir, { recursive: true });
|
||||
console.log(`Cache directory created: ${cacheDir}`);
|
||||
} catch (err) {
|
||||
console.error(`Error creating cache directory ${cacheDir}:`, err);
|
||||
process.exit(1); // Exit if cache directory cannot be created
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
initializeApp();
|
||||
|
||||
const CACHE_EXPIRY_MS = 24 * 60 * 60 * 1000; // 24 hours
|
||||
const CACHE_CLEANUP_INTERVAL_MS = 60 * 60 * 1000; // 1 hour
|
||||
const HTTP_STATUS = {
|
||||
OK: 200,
|
||||
NO_CONTENT: 204,
|
||||
REDIRECT: 302,
|
||||
NOT_MODIFIED: 304,
|
||||
BAD_REQUEST: 400,
|
||||
NOT_FOUND: 404,
|
||||
INTERNAL_SERVER_ERROR: 500,
|
||||
BAD_GATEWAY: 502,
|
||||
};
|
||||
|
||||
// 定时清理过期缓存数据
|
||||
setInterval(async () => {
|
||||
const currentTime = Date.now();
|
||||
const keysToDelete = [];
|
||||
const filesToDelete = [];
|
||||
|
||||
// 第一步:收集需要删除的键和文件
|
||||
for (const key in pathIndex) {
|
||||
if (currentTime - pathIndex[key].timestamp > CACHE_EXPIRY_MS) {
|
||||
keysToDelete.push(key);
|
||||
const cacheMetaFile = pathModule.join(cacheDir, `${key}.meta`);
|
||||
const cacheContentFile = pathModule.join(cacheDir, `${pathIndex[key].uniqid}.content`);
|
||||
filesToDelete.push(cacheMetaFile, cacheContentFile);
|
||||
}
|
||||
}
|
||||
|
||||
// 第二步:从内存中删除过期索引
|
||||
keysToDelete.forEach(key => delete pathIndex[key]);
|
||||
|
||||
// 第三步:异步删除文件系统中的缓存文件
|
||||
if (filesToDelete.length > 0) {
|
||||
console.log(`Cleaning up ${keysToDelete.length} expired cache entries`);
|
||||
|
||||
// 并行删除文件,但限制并发数为10
|
||||
const deletePromises = filesToDelete.map(file =>
|
||||
fs.promises.unlink(file).catch(err => {
|
||||
if (err.code !== 'ENOENT') { // 忽略文件不存在的错误
|
||||
console.warn(`Failed to delete cache file ${file}:`, err.message);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// 使用Promise.all处理所有删除操作
|
||||
await Promise.all(deletePromises);
|
||||
}
|
||||
}, CACHE_CLEANUP_INTERVAL_MS);
|
||||
|
||||
// 统一发送错误响应
|
||||
function sendErrorResponse(res, statusCode, message) {
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(statusCode, { 'Content-Type': 'text/plain;charset=UTF-8' });
|
||||
res.end(message);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Request Handling Logic ---
|
||||
|
||||
async function handleFavicon(req, res) {
|
||||
res.writeHead(HTTP_STATUS.NO_CONTENT);
|
||||
res.end();
|
||||
}
|
||||
|
||||
async function handleEndpoint(req, res, parsedUrl) {
|
||||
if (parsedUrl.query.api) {
|
||||
const urlRegex = /^(https?:\/\/)?([\da-z.-]+)\.([a-z.]{2,6})([\/\w.-]*)*\/?$/;
|
||||
if (urlRegex.test(parsedUrl.query.api)) {
|
||||
apiEndpoint = parsedUrl.query.api;
|
||||
console.log(`API endpoint updated to: ${apiEndpoint}`);
|
||||
}
|
||||
}
|
||||
res.writeHead(HTTP_STATUS.OK, { 'Content-Type': 'application/json; charset=utf-8' });
|
||||
res.end(JSON.stringify({
|
||||
code: HTTP_STATUS.OK,
|
||||
data: {
|
||||
api: apiEndpoint,
|
||||
port: port,
|
||||
cacheDir: cacheDir,
|
||||
pathIndexCount: Object.keys(pathIndex).length,
|
||||
viewsInfo: {
|
||||
request: viewsInfo.request,
|
||||
cacheHit: viewsInfo.cacheHit,
|
||||
apiCall: viewsInfo.apiCall,
|
||||
cacheCall: viewsInfo.cacheCall,
|
||||
cacheReadError: viewsInfo.cacheReadError,
|
||||
fetchApiError: viewsInfo.fetchApiError,
|
||||
fetchApiWarning: viewsInfo.fetchApiWarning,
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
async function handleApiRedirect(res, apiData) {
|
||||
res.writeHead(HTTP_STATUS.REDIRECT, { Location: apiData.data.url });
|
||||
res.end();
|
||||
}
|
||||
|
||||
async function processSuccessfulApiData(apiData, uniqidhex, reqPath, token, sign, res, req) {
|
||||
const { url: realUrl, cloudtype, expiration, path: apiPath, headers, uniqid, thumb } = apiData.data;
|
||||
const data = { realUrl, cloudtype, expiration: expiration * 1000, path: apiPath, headers, uniqid, thumb };
|
||||
|
||||
pathIndex[uniqidhex] = { uniqid: data.uniqid, timestamp: Date.now() };
|
||||
const cacheMetaFile = pathModule.join(cacheDir, `${uniqidhex}.meta`);
|
||||
const cacheContentFile = pathModule.join(cacheDir, `${data.uniqid}.content`);
|
||||
const tempCacheContentFile = pathModule.join(cacheDir, `${data.uniqid}_${crypto.randomBytes(16).toString('hex')}.temp`);
|
||||
|
||||
try {
|
||||
fs.writeFileSync(cacheMetaFile, JSON.stringify(data));
|
||||
} catch (writeError) {
|
||||
console.error(`Error writing meta file ${cacheMetaFile}:`, writeError);
|
||||
sendErrorResponse(res, HTTP_STATUS.INTERNAL_SERVER_ERROR, 'Failed to write cache metadata.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (fs.existsSync(cacheContentFile)) {
|
||||
const stats = fs.statSync(cacheContentFile);
|
||||
const contentLength = stats.size;
|
||||
if (contentLength < 2048 && data.headers['content-length'] && parseInt(data.headers['content-length'], 10) !== contentLength) {
|
||||
console.warn(`Content length mismatch for ${cacheContentFile}. API: ${data.headers['content-length']}, Cache: ${contentLength}. Re-fetching.`);
|
||||
fetchAndServe(data, tempCacheContentFile, cacheContentFile, cacheMetaFile, res, req);
|
||||
} else {
|
||||
serveFromCache(data, cacheContentFile, cacheMetaFile, res, reqPath, token, sign, uniqidhex, req);
|
||||
}
|
||||
} else {
|
||||
fetchAndServe(data, tempCacheContentFile, cacheContentFile, cacheMetaFile, res, req);
|
||||
}
|
||||
}
|
||||
|
||||
async function tryServeFromStaleCacheOrError(uniqidhex, res, errorMessage, req) {
|
||||
if (pathIndex[uniqidhex]) {
|
||||
const cacheMetaFile = pathModule.join(cacheDir, `${uniqidhex}.meta`);
|
||||
const cacheContentFile = pathModule.join(cacheDir, `${pathIndex[uniqidhex].uniqid}.content`);
|
||||
if (fs.existsSync(cacheMetaFile) && fs.existsSync(cacheContentFile)) {
|
||||
console.warn(`API call failed or returned non-200. Serving stale cache for ${uniqidhex}`);
|
||||
try {
|
||||
const cacheData = JSON.parse(fs.readFileSync(cacheMetaFile, 'utf8'));
|
||||
serveFromCache(cacheData, cacheContentFile, cacheMetaFile, res, null, null, null, uniqidhex, req);
|
||||
return;
|
||||
} catch (parseError) {
|
||||
console.error(`Error parsing stale meta file ${cacheMetaFile}:`, parseError);
|
||||
// Fall through to generic error if stale cache is also broken
|
||||
}
|
||||
}
|
||||
}
|
||||
sendErrorResponse(res, HTTP_STATUS.BAD_GATEWAY, errorMessage || 'Bad Gateway');
|
||||
}
|
||||
|
||||
async function handleMainRequest(req, res) {
|
||||
// 处理OPTIONS请求,支持跨域预检
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.writeHead(200, {
|
||||
'Access-Control-Allow-Origin': req.headers.origin || '*',
|
||||
'Access-Control-Allow-Methods': 'GET, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type',
|
||||
'Access-Control-Max-Age': '86400'
|
||||
});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
req.url = req.url.replace(/\/{2,}/g, '/');
|
||||
const parsedUrl = url.parse(req.url, true);
|
||||
const sign = parsedUrl.query.sign || '';
|
||||
let reqPath = parsedUrl.pathname.split('/')[1] || ''; // Ensure reqPath is not undefined
|
||||
let token = parsedUrl.pathname.split('/').slice(2).join('/');
|
||||
|
||||
if (reqPath === 'favicon.ico') return handleFavicon(req, res);
|
||||
if (reqPath === 'endpoint') return handleEndpoint(req, res, parsedUrl);
|
||||
|
||||
if (!token && reqPath) { // If token is empty but reqPath is not, assume reqPath is the token
|
||||
token = reqPath;
|
||||
reqPath = 'app'; // Default to 'app' if only one path segment is provided
|
||||
}
|
||||
|
||||
const ALLOWED_PATHS = ['avatar', 'go', 'bbs', 'www', 'url', 'thumb', 'app', 'qrcode', 'school'];
|
||||
if (!ALLOWED_PATHS.includes(reqPath) || !token) {
|
||||
return sendErrorResponse(res, HTTP_STATUS.BAD_REQUEST, `Bad Request: Invalid path or missing token.`);
|
||||
}
|
||||
|
||||
viewsInfo.increment('request');
|
||||
const uniqidhex = crypto.createHash('md5').update(reqPath + token + sign).digest('hex');
|
||||
let cacheMetaFile = '';
|
||||
let cacheContentFile = '';
|
||||
|
||||
if (pathIndex[uniqidhex]) {
|
||||
cacheMetaFile = pathModule.join(cacheDir, `${uniqidhex}.meta`);
|
||||
cacheContentFile = pathModule.join(cacheDir, `${pathIndex[uniqidhex].uniqid}.content`);
|
||||
}
|
||||
|
||||
if (pathIndex[uniqidhex] && isCacheValid(cacheMetaFile, cacheContentFile)) {
|
||||
const { cacheData, isNotModified } = await checkCacheHeaders(req, cacheMetaFile);
|
||||
if (isNotModified) {
|
||||
res.writeHead(HTTP_STATUS.NOT_MODIFIED);
|
||||
res.end();
|
||||
} else {
|
||||
viewsInfo.increment('cacheHit');
|
||||
serveFromCache(cacheData, cacheContentFile, cacheMetaFile, res, reqPath, token, sign, uniqidhex, req);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
viewsInfo.increment('apiCall');
|
||||
const apiData = await fetchApiData(reqPath, token, sign);
|
||||
|
||||
if (apiData.code === HTTP_STATUS.REDIRECT || apiData.code === 301) {
|
||||
return handleApiRedirect(res, apiData);
|
||||
}
|
||||
|
||||
if (apiData.code === HTTP_STATUS.OK && apiData.data && apiData.data.url) {
|
||||
await processSuccessfulApiData(apiData, uniqidhex, reqPath, token, sign, res, req);
|
||||
} else {
|
||||
viewsInfo.increment('fetchApiWarning');
|
||||
await tryServeFromStaleCacheOrError(uniqidhex, res, apiData.message, req);
|
||||
}
|
||||
} catch (error) {
|
||||
viewsInfo.increment('fetchApiError');
|
||||
console.error('Error in API call or processing:', error);
|
||||
await tryServeFromStaleCacheOrError(uniqidhex, res, `Bad Gateway: API request failed. ${error.message}`, req);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const server = http.createServer(handleMainRequest);
|
||||
|
||||
// 检查缓存头并返回是否为304
|
||||
async function checkCacheHeaders(req, cacheMetaFile) {
|
||||
try {
|
||||
const metaContent = await fs.promises.readFile(cacheMetaFile, 'utf8');
|
||||
const cacheData = JSON.parse(metaContent);
|
||||
const ifNoneMatch = req.headers['if-none-match'];
|
||||
const ifModifiedSince = req.headers['if-modified-since'];
|
||||
|
||||
// Check ETag first
|
||||
if (ifNoneMatch && cacheData.uniqid && ifNoneMatch === cacheData.uniqid) {
|
||||
return { cacheData, isNotModified: true };
|
||||
}
|
||||
|
||||
// Check If-Modified-Since
|
||||
if (ifModifiedSince && cacheData.headers && cacheData.headers['last-modified']) {
|
||||
try {
|
||||
const lastModifiedDate = new Date(cacheData.headers['last-modified']);
|
||||
const ifModifiedSinceDate = new Date(ifModifiedSince);
|
||||
// The time resolution of an HTTP date is one second.
|
||||
// If If-Modified-Since is at least as new as Last-Modified, send 304.
|
||||
if (lastModifiedDate.getTime() <= ifModifiedSinceDate.getTime()) {
|
||||
return { cacheData, isNotModified: true };
|
||||
}
|
||||
} catch (dateParseError) {
|
||||
console.warn(`Error parsing date for cache header check (${cacheMetaFile}):`, dateParseError);
|
||||
// Proceed as if not modified check failed if dates are invalid
|
||||
}
|
||||
}
|
||||
return { cacheData, isNotModified: false };
|
||||
} catch (error) {
|
||||
console.error(`Error reading or parsing cache meta file ${cacheMetaFile} in checkCacheHeaders:`, error);
|
||||
return { cacheData: null, isNotModified: false }; // Indicate failure to load cacheData
|
||||
}
|
||||
}
|
||||
|
||||
// 检查缓存是否有效
|
||||
async function isCacheValid(cacheMetaFile, cacheContentFile) {
|
||||
try {
|
||||
// 使用Promise.all并行检查文件是否存在
|
||||
const [metaExists, contentExists] = await Promise.all([
|
||||
fs.promises.access(cacheMetaFile).then(() => true).catch(() => false),
|
||||
fs.promises.access(cacheContentFile).then(() => true).catch(() => false)
|
||||
]);
|
||||
|
||||
if (!metaExists || !contentExists) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const metaContent = await fs.promises.readFile(cacheMetaFile, 'utf8');
|
||||
const cacheData = JSON.parse(metaContent);
|
||||
// Ensure expiration is a number and in the future
|
||||
return typeof cacheData.expiration === 'number' && cacheData.expiration > Date.now();
|
||||
} catch (error) {
|
||||
console.warn(`Error reading or parsing cache meta file ${cacheMetaFile} for validation:`, error);
|
||||
return false; // If meta file is corrupt or unreadable, cache is not valid
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// 从 API 获取数据
|
||||
const API_TIMEOUT_MS = 5000;
|
||||
const USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36';
|
||||
|
||||
async function fetchApiData(reqPath, token, sign) {
|
||||
const queryParams = querystring.stringify({
|
||||
type: reqPath,
|
||||
sign: sign
|
||||
});
|
||||
const apiUrl = `${apiEndpoint}?${queryParams}`;
|
||||
const parsedApiUrl = new URL(apiUrl);
|
||||
const protocol = parsedApiUrl.protocol === 'https:' ? https : http;
|
||||
|
||||
const options = {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json; charset=utf-8',
|
||||
'User-Agent': USER_AGENT,
|
||||
'token': token
|
||||
},
|
||||
timeout: API_TIMEOUT_MS,
|
||||
rejectUnauthorized: false, // Allow self-signed certificates, use with caution
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const apiReq = protocol.request(apiUrl, options, (apiRes) => {
|
||||
let responseData = '';
|
||||
apiRes.setEncoding('utf8');
|
||||
apiRes.on('data', chunk => responseData += chunk);
|
||||
apiRes.on('end', () => {
|
||||
try {
|
||||
if (apiRes.statusCode >= 400) {
|
||||
// Treat HTTP errors from API as rejections for easier handling
|
||||
console.error(`API request to ${apiUrl} failed with status ${apiRes.statusCode}: ${responseData}`);
|
||||
// Attempt to parse for a message, but prioritize status code for error type
|
||||
let errorPayload = { code: apiRes.statusCode, message: `API Error: ${apiRes.statusCode}` };
|
||||
try {
|
||||
const parsedError = JSON.parse(responseData);
|
||||
if (parsedError && parsedError.message) errorPayload.message = parsedError.message;
|
||||
} catch (e) { /* Ignore if response is not JSON */ }
|
||||
resolve(errorPayload); // Resolve with error structure for consistency
|
||||
return;
|
||||
}
|
||||
resolve(JSON.parse(responseData));
|
||||
} catch (parseError) {
|
||||
console.error(`Error parsing JSON response from ${apiUrl}:`, parseError, responseData);
|
||||
reject(new Error(`Failed to parse API response: ${parseError.message}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
apiReq.on('timeout', () => {
|
||||
apiReq.destroy(); // Destroy the request to free up resources
|
||||
console.error(`API request to ${apiUrl} timed out after ${API_TIMEOUT_MS}ms`);
|
||||
reject(new Error('API request timed out'));
|
||||
});
|
||||
|
||||
apiReq.on('error', (networkError) => {
|
||||
console.error(`API request to ${apiUrl} failed:`, networkError);
|
||||
reject(networkError);
|
||||
});
|
||||
|
||||
apiReq.end();
|
||||
});
|
||||
}
|
||||
|
||||
// 从真实 URL 获取数据并写入缓存
|
||||
const REAL_URL_FETCH_TIMEOUT_MS = 0; // 0 means no timeout for the actual file download
|
||||
|
||||
const fetchAndServe = async (data, tempCacheContentFile, cacheContentFile, cacheMetaFile, res, req) => {
|
||||
const protocol = data.realUrl.startsWith('https:') ? https : http;
|
||||
|
||||
protocol.get(data.realUrl, { timeout: REAL_URL_FETCH_TIMEOUT_MS, rejectUnauthorized: false }, (realRes) => {
|
||||
const cacheStream = fs.createWriteStream(tempCacheContentFile, { flags: 'w', highWaterMark: 64 * 1024 }); // 增加缓冲区大小到64KB
|
||||
|
||||
let isVideo = data.path && typeof data.path === 'string' && data.path.includes('.mp4');
|
||||
// 确保 content-length 是有效的
|
||||
const contentLength = realRes.headers['content-length'];
|
||||
if (contentLength) {
|
||||
data.headers['content-length'] = contentLength;
|
||||
// 异步更新 data 到缓存 cacheMetaFile
|
||||
fs.promises.writeFile(cacheMetaFile, JSON.stringify(data))
|
||||
.catch(err => console.error(`Error writing meta file ${cacheMetaFile}:`, err));
|
||||
} else {
|
||||
console.warn('Warning: content-length is undefined for the response from:', data.realUrl);
|
||||
}
|
||||
|
||||
const baseHeaders = {
|
||||
'Cloud-Type': data.cloudtype,
|
||||
'ETag': data.uniqid || '',
|
||||
'Cache-Control': 'public, max-age=31536000', // 1 year
|
||||
'Expires': new Date(Date.now() + 31536000000).toUTCString(),
|
||||
'Accept-Ranges': 'bytes',
|
||||
'Connection': 'keep-alive',
|
||||
'Date': new Date().toUTCString(),
|
||||
'Last-Modified': data.headers['last-modified'] || new Date().toUTCString(),
|
||||
'Access-Control-Allow-Origin': req.headers.origin || '*',
|
||||
'Access-Control-Allow-Methods': 'GET, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type',
|
||||
};
|
||||
const responseHeaders = {
|
||||
...baseHeaders,
|
||||
'Content-Type': realRes.headers['content-type'] || (isVideo ? 'video/mp4' : 'application/octet-stream'),
|
||||
...data.headers,
|
||||
};
|
||||
|
||||
res.writeHead(realRes.statusCode, responseHeaders);
|
||||
|
||||
// 使用管道优化流传输
|
||||
const pipeline = require('stream').pipeline;
|
||||
|
||||
// 创建一个流分支,同时写入缓存和响应
|
||||
const { PassThrough } = require('stream');
|
||||
const passThrough = new PassThrough();
|
||||
|
||||
passThrough.pipe(cacheStream);
|
||||
passThrough.pipe(res);
|
||||
|
||||
// 使用pipeline处理流错误
|
||||
pipeline(
|
||||
realRes,
|
||||
passThrough,
|
||||
(err) => {
|
||||
if (err) {
|
||||
console.error(`Pipeline error for ${data.realUrl}:`, err);
|
||||
handleResponseError(res, tempCacheContentFile, data.realUrl);
|
||||
return;
|
||||
}
|
||||
|
||||
// 流处理完成后,重命名临时文件
|
||||
fs.promises.access(tempCacheContentFile)
|
||||
.then(() => {
|
||||
// 确保目标目录存在
|
||||
return fs.promises.mkdir(pathModule.dirname(cacheContentFile), { recursive: true })
|
||||
.then(() => fs.promises.rename(tempCacheContentFile, cacheContentFile))
|
||||
.then(() => console.log(`Successfully cached: ${cacheContentFile}`))
|
||||
.catch(renameError => {
|
||||
console.error(`Error renaming temp cache file ${tempCacheContentFile} to ${cacheContentFile}:`, renameError);
|
||||
return fs.promises.unlink(tempCacheContentFile).catch(() => {});
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
console.warn(`Temp cache file ${tempCacheContentFile} not found after stream end for ${data.realUrl}`);
|
||||
});
|
||||
}
|
||||
);
|
||||
}).on('error', (requestError) => {
|
||||
console.error(`Error making GET request to ${data.realUrl}:`, requestError);
|
||||
handleResponseError(res, tempCacheContentFile, data.realUrl);
|
||||
});
|
||||
};
|
||||
|
||||
// 从缓存中读取数据并返回
|
||||
async function serveFromCache(cacheData, cacheContentFile, cacheMetaFile, res, reqPath, token, sign, uniqidhex, req) {
|
||||
if (!cacheData) { // 缓存数据不可用,尝试重新获取
|
||||
console.warn(`Cache metadata unavailable for ${cacheContentFile}, attempting to fetch fresh data`);
|
||||
|
||||
// 如果提供了请求参数,尝试重新获取数据
|
||||
if (reqPath && token) {
|
||||
try {
|
||||
viewsInfo.increment('apiCall');
|
||||
const apiData = await fetchApiData(reqPath, token, sign);
|
||||
|
||||
if (apiData.code === HTTP_STATUS.REDIRECT || apiData.code === 301) {
|
||||
res.writeHead(HTTP_STATUS.REDIRECT, { Location: apiData.data.url });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (apiData.code === HTTP_STATUS.OK && apiData.data && apiData.data.url) {
|
||||
const { url: realUrl, cloudtype, expiration, path: apiPath, headers, uniqid, thumb } = apiData.data;
|
||||
const data = { realUrl, cloudtype, expiration: expiration * 1000, path: apiPath, headers, uniqid, thumb };
|
||||
|
||||
// 更新索引
|
||||
pathIndex[uniqidhex] = { uniqid: data.uniqid, timestamp: Date.now() };
|
||||
|
||||
// 写入新的元数据
|
||||
await fs.promises.mkdir(pathModule.dirname(cacheMetaFile), { recursive: true });
|
||||
await fs.promises.writeFile(cacheMetaFile, JSON.stringify(data));
|
||||
|
||||
// 获取并提供新数据
|
||||
const tempCacheContentFile = pathModule.join(cacheDir, `${data.uniqid}_${crypto.randomBytes(16).toString('hex')}.temp`);
|
||||
fetchAndServe(data, tempCacheContentFile, cacheContentFile, cacheMetaFile, res, req);
|
||||
return;
|
||||
} else {
|
||||
viewsInfo.increment('fetchApiWarning');
|
||||
sendErrorResponse(res, HTTP_STATUS.BAD_GATEWAY, apiData.message || 'Failed to fetch data from API');
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
viewsInfo.increment('fetchApiError');
|
||||
console.error('Error fetching fresh data:', error);
|
||||
sendErrorResponse(res, HTTP_STATUS.INTERNAL_SERVER_ERROR, `Failed to fetch fresh data: ${error.message}`);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
// 如果没有提供请求参数,无法重新获取
|
||||
console.error(`serveFromCache called with null cacheData and insufficient request info for ${cacheContentFile}`);
|
||||
sendErrorResponse(res, HTTP_STATUS.INTERNAL_SERVER_ERROR, 'Cache metadata unavailable and cannot fetch fresh data.');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// 使用异步方式获取ETag和Last-Modified
|
||||
let etag = cacheData.uniqid;
|
||||
let lastModified = cacheData.headers && cacheData.headers['last-modified'];
|
||||
|
||||
if (!etag || !lastModified) {
|
||||
try {
|
||||
const [fileStats, metaStats] = await Promise.all([
|
||||
fs.promises.stat(cacheContentFile).catch(() => null),
|
||||
fs.promises.stat(cacheMetaFile).catch(() => null)
|
||||
]);
|
||||
|
||||
if (!etag && fileStats) {
|
||||
// 使用文件大小和修改时间作为ETag的一部分,避免读取整个文件计算MD5
|
||||
etag = crypto.createHash('md5')
|
||||
.update(`${fileStats.size}-${fileStats.mtime.getTime()}`)
|
||||
.digest('hex');
|
||||
}
|
||||
|
||||
if (!lastModified && metaStats) {
|
||||
lastModified = new Date(metaStats.mtime).toUTCString();
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Error getting file stats for cache: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
const baseHeaders = {
|
||||
'Cloud-Type': cacheData.cloudtype || 'unknown',
|
||||
'ETag': etag || '',
|
||||
'Cache-Control': 'public, max-age=31536000', // 1 year
|
||||
'Expires': new Date(Date.now() + 31536000000).toUTCString(),
|
||||
'Accept-Ranges': 'bytes',
|
||||
'Connection': 'keep-alive',
|
||||
'Date': new Date().toUTCString(),
|
||||
'Last-Modified': lastModified || new Date().toUTCString(),
|
||||
'Access-Control-Allow-Origin': req.headers.origin || '*',
|
||||
'Access-Control-Allow-Methods': 'GET, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type',
|
||||
};
|
||||
|
||||
viewsInfo.increment('cacheCall');
|
||||
|
||||
// 先检查缓存文件是否存在且可读
|
||||
try {
|
||||
await fs.promises.access(cacheContentFile, fs.constants.R_OK);
|
||||
} catch (error) {
|
||||
console.warn(`Cache content file ${cacheContentFile} not accessible: ${error.message}`);
|
||||
|
||||
// 如果提供了请求参数,尝试重新获取数据
|
||||
if (reqPath && token) {
|
||||
console.log(`Attempting to fetch fresh data for ${cacheContentFile}`);
|
||||
try {
|
||||
viewsInfo.increment('apiCall');
|
||||
const apiData = await fetchApiData(reqPath, token, sign);
|
||||
|
||||
if (apiData.code === HTTP_STATUS.OK && apiData.data && apiData.data.url) {
|
||||
const { url: realUrl, cloudtype, expiration, path: apiPath, headers, uniqid, thumb } = apiData.data;
|
||||
const data = { realUrl, cloudtype, expiration: expiration * 1000, path: apiPath, headers, uniqid, thumb };
|
||||
|
||||
// 更新索引
|
||||
pathIndex[uniqidhex] = { uniqid: data.uniqid, timestamp: Date.now() };
|
||||
|
||||
// 获取并提供新数据
|
||||
const tempCacheContentFile = pathModule.join(cacheDir, `${data.uniqid}_${crypto.randomBytes(16).toString('hex')}.temp`);
|
||||
fetchAndServe(data, tempCacheContentFile, cacheContentFile, cacheMetaFile, res, req);
|
||||
return;
|
||||
} else {
|
||||
sendErrorResponse(res, HTTP_STATUS.BAD_GATEWAY, apiData.message || 'Failed to fetch data from API');
|
||||
return;
|
||||
}
|
||||
} catch (fetchError) {
|
||||
console.error(`Error fetching fresh data: ${fetchError.message}`);
|
||||
sendErrorResponse(res, HTTP_STATUS.INTERNAL_SERVER_ERROR, `Failed to fetch fresh data: ${fetchError.message}`);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
sendErrorResponse(res, HTTP_STATUS.INTERNAL_SERVER_ERROR, 'Unable to read cache content file and cannot fetch fresh data.');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const readStream = fs.createReadStream(cacheContentFile, { highWaterMark: 64 * 1024 }); // 增加读取缓冲区大小
|
||||
const isVideo = cacheData.path && typeof cacheData.path === 'string' && cacheData.path.includes('.mp4');
|
||||
|
||||
let currentContentLength = cacheData.headers && cacheData.headers['content-length'] ? parseInt(cacheData.headers['content-length'], 10) : 0;
|
||||
|
||||
if (!currentContentLength || currentContentLength === 0) {
|
||||
try {
|
||||
const stats = fs.statSync(cacheContentFile);
|
||||
currentContentLength = stats.size;
|
||||
if (currentContentLength > 0) {
|
||||
if (!cacheData.headers) cacheData.headers = {};
|
||||
cacheData.headers['content-length'] = currentContentLength.toString();
|
||||
// Update meta file if content-length was missing or zero
|
||||
fs.writeFileSync(cacheMetaFile, JSON.stringify(cacheData));
|
||||
console.log(`Updated content-length in ${cacheMetaFile} to ${currentContentLength}`);
|
||||
} else {
|
||||
console.warn(`Cached content file ${cacheContentFile} has size 0 or stat failed.`);
|
||||
// Potentially treat as an error or serve as is if 0 length is valid for some files
|
||||
}
|
||||
} catch (statError) {
|
||||
console.error(`Error stating cache content file ${cacheContentFile}:`, statError);
|
||||
handleCacheReadError(res, cacheContentFile, reqPath, token, sign, uniqidhex); // Treat stat error as read error
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
readStream.on('open', () => {
|
||||
|
||||
const responseHeaders = {
|
||||
...baseHeaders,
|
||||
'Content-Type': (cacheData.headers && cacheData.headers['content-type']) || (isVideo ? 'video/mp4' : 'application/octet-stream'),
|
||||
// Merge other headers from cacheData.headers, letting them override base if necessary
|
||||
// but ensure our critical headers like Content-Length (if updated) are preserved.
|
||||
...(cacheData.headers || {}),
|
||||
};
|
||||
|
||||
res.writeHead(HTTP_STATUS.OK, responseHeaders);
|
||||
readStream.pipe(res);
|
||||
});
|
||||
|
||||
readStream.on('error', (err) => {
|
||||
console.error(`Read stream error for ${cacheContentFile}:`, err);
|
||||
|
||||
// 如果提供了请求参数,尝试重新获取数据而不是直接报错
|
||||
if (reqPath && token) {
|
||||
console.log(`Read stream error, attempting to fetch fresh data for ${cacheContentFile}`);
|
||||
viewsInfo.increment('apiCall');
|
||||
|
||||
fetchApiData(reqPath, token, sign)
|
||||
.then(apiData => {
|
||||
if (apiData.code === HTTP_STATUS.OK && apiData.data && apiData.data.url) {
|
||||
const { url: realUrl, cloudtype, expiration, path: apiPath, headers, uniqid, thumb } = apiData.data;
|
||||
const data = { realUrl, cloudtype, expiration: expiration * 1000, path: apiPath, headers, uniqid, thumb };
|
||||
|
||||
// 更新索引
|
||||
pathIndex[uniqidhex] = { uniqid: data.uniqid, timestamp: Date.now() };
|
||||
|
||||
// 获取并提供新数据
|
||||
const tempCacheContentFile = pathModule.join(cacheDir, `${data.uniqid}_${crypto.randomBytes(16).toString('hex')}.temp`);
|
||||
fetchAndServe(data, tempCacheContentFile, cacheContentFile, cacheMetaFile, res, req);
|
||||
} else {
|
||||
viewsInfo.increment('fetchApiWarning');
|
||||
sendErrorResponse(res, HTTP_STATUS.BAD_GATEWAY, apiData.message || 'Failed to fetch data from API');
|
||||
}
|
||||
})
|
||||
.catch(fetchError => {
|
||||
viewsInfo.increment('fetchApiError');
|
||||
console.error(`Error fetching fresh data after read stream error: ${fetchError.message}`);
|
||||
sendErrorResponse(res, HTTP_STATUS.INTERNAL_SERVER_ERROR, `Failed to fetch fresh data: ${fetchError.message}`);
|
||||
});
|
||||
} else {
|
||||
// 如果没有提供请求参数,使用原始的错误处理
|
||||
handleCacheReadError(res, cacheContentFile, reqPath, token, sign, uniqidhex);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle cases where client closes connection prematurely
|
||||
res.on('close', () => {
|
||||
if (!res.writableEnded) {
|
||||
console.log(`Client closed connection prematurely for ${cacheContentFile}. Destroying read stream.`);
|
||||
readStream.destroy();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// 处理响应错误
|
||||
const handleResponseError = (res, tempCacheContentFile, realUrl) => {
|
||||
viewsInfo.increment('fetchApiError');
|
||||
console.error(`Error fetching from real URL: ${realUrl}`);
|
||||
sendErrorResponse(res, HTTP_STATUS.BAD_GATEWAY, `Bad Gateway: Failed to fetch from ${realUrl}`);
|
||||
if (fs.existsSync(tempCacheContentFile)) {
|
||||
try {
|
||||
fs.unlinkSync(tempCacheContentFile);
|
||||
} catch (unlinkErr) {
|
||||
console.error(`Error unlinking temp file ${tempCacheContentFile}:`, unlinkErr);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// 处理缓存读取错误
|
||||
const handleCacheReadError = (res, filePath, reqPath, token, sign, uniqidhex) => {
|
||||
viewsInfo.increment('cacheReadError');
|
||||
console.error(`Error reading cache file: ${filePath}`);
|
||||
|
||||
// 如果提供了请求参数,尝试重新获取数据
|
||||
if (reqPath && token) {
|
||||
console.log(`Cache read error, attempting to fetch fresh data for ${filePath}`);
|
||||
viewsInfo.increment('apiCall');
|
||||
|
||||
fetchApiData(reqPath, token, sign)
|
||||
.then(apiData => {
|
||||
if (apiData.code === HTTP_STATUS.OK && apiData.data && apiData.data.url) {
|
||||
const { url: realUrl, cloudtype, expiration, path: apiPath, headers, uniqid, thumb } = apiData.data;
|
||||
const data = { realUrl, cloudtype, expiration: expiration * 1000, path: apiPath, headers, uniqid, thumb };
|
||||
|
||||
// 更新索引
|
||||
if (uniqidhex) {
|
||||
pathIndex[uniqidhex] = { uniqid: data.uniqid, timestamp: Date.now() };
|
||||
}
|
||||
|
||||
// 获取并提供新数据
|
||||
const cacheMetaFile = pathModule.join(cacheDir, `${uniqidhex}.meta`);
|
||||
const cacheContentFile = pathModule.join(cacheDir, `${data.uniqid}.content`);
|
||||
const tempCacheContentFile = pathModule.join(cacheDir, `${data.uniqid}_${crypto.randomBytes(16).toString('hex')}.temp`);
|
||||
|
||||
// 写入新的元数据
|
||||
fs.promises.mkdir(pathModule.dirname(cacheMetaFile), { recursive: true })
|
||||
.then(() => fs.promises.writeFile(cacheMetaFile, JSON.stringify(data)))
|
||||
.then(() => {
|
||||
fetchAndServe(data, tempCacheContentFile, cacheContentFile, cacheMetaFile, res, req);
|
||||
})
|
||||
.catch(writeError => {
|
||||
console.error(`Error writing meta file after cache read error: ${writeError.message}`);
|
||||
sendErrorResponse(res, HTTP_STATUS.INTERNAL_SERVER_ERROR, 'Failed to write cache metadata');
|
||||
});
|
||||
} else {
|
||||
viewsInfo.increment('fetchApiWarning');
|
||||
sendErrorResponse(res, HTTP_STATUS.BAD_GATEWAY, apiData.message || 'Failed to fetch data from API');
|
||||
}
|
||||
})
|
||||
.catch(fetchError => {
|
||||
viewsInfo.increment('fetchApiError');
|
||||
console.error(`Error fetching fresh data after cache read error: ${fetchError.message}`);
|
||||
sendErrorResponse(res, HTTP_STATUS.INTERNAL_SERVER_ERROR, `Failed to fetch fresh data: ${fetchError.message}`);
|
||||
});
|
||||
} else {
|
||||
// 如果没有提供请求参数,返回错误
|
||||
sendErrorResponse(res, HTTP_STATUS.INTERNAL_SERVER_ERROR, 'Internal Server Error: Unable to read cache content file');
|
||||
}
|
||||
};
|
||||
|
||||
// 启动服务器
|
||||
server.listen(port, () => {
|
||||
console.log(`Proxy server is running on http://localhost:${port}`);
|
||||
});
|
||||
|
||||
// 处理 SIGINT 信号(Ctrl+C)
|
||||
process.on('SIGINT', () => {
|
||||
console.log('Received SIGINT. Shutting down gracefully...');
|
||||
server.close(() => {
|
||||
console.log('Server closed.');
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
console.error('Forcing shutdown...');
|
||||
process.exit(1);
|
||||
}, 10000);
|
||||
});
|
||||
Reference in New Issue
Block a user