1111
This commit is contained in:
parent
24cda32cbb
commit
41ad8b7267
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,6 +1,7 @@
|
||||
.DS_Store
|
||||
/node_modules
|
||||
/backup
|
||||
/.cache
|
||||
|
||||
# local env files
|
||||
.env.local
|
||||
|
105
index.js
105
index.js
@ -2,9 +2,11 @@ const http = require('http');
|
||||
const https = require('https');
|
||||
const url = require('url');
|
||||
const querystring = require('querystring');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const requestTimeout = 10000; // 10 seconds
|
||||
const cache = {};
|
||||
const cacheDir = path.join(__dirname, '.cache');
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
let port = 9001;
|
||||
@ -20,6 +22,11 @@ args.forEach(arg => {
|
||||
}
|
||||
});
|
||||
|
||||
// 确保缓存目录存在
|
||||
if (!fs.existsSync(cacheDir)) {
|
||||
fs.mkdirSync(cacheDir);
|
||||
}
|
||||
|
||||
const server = http.createServer(async (req, res) => {
|
||||
if (req.url === '/favicon.ico') {
|
||||
res.writeHead(204);
|
||||
@ -28,54 +35,61 @@ const server = http.createServer(async (req, res) => {
|
||||
}
|
||||
|
||||
const parsedUrl = url.parse(req.url, true);
|
||||
const path = parsedUrl.pathname;
|
||||
const reqPath = parsedUrl.pathname;
|
||||
const sign = parsedUrl.query.sign || '';
|
||||
|
||||
if (!sign || path === '/') {
|
||||
// 只要reqPath的文件名,不要路径
|
||||
const reqName = parsedUrl.pathname.split('/').pop();
|
||||
const cacheMetaFile = path.join(cacheDir, `${reqName.replace(/\//g, '_')}.meta`);
|
||||
const cacheContentFile = path.join(cacheDir, `${reqName.replace(/\//g, '_')}.content`);
|
||||
const tempCacheContentFile = path.join(cacheDir, `${reqName.replace(/\//g, '_')}.temp`);
|
||||
|
||||
if (!sign || reqPath === '/') {
|
||||
res.writeHead(400, { 'Content-Type': 'text/plain' });
|
||||
res.end('Bad Request: Missing sign or path');
|
||||
return;
|
||||
}
|
||||
|
||||
if (isCacheValid(path)) {
|
||||
cleanExpiredCache();
|
||||
fetchAndServe(cache[path], res);
|
||||
if (isCacheValid(cacheMetaFile, cacheContentFile)) {
|
||||
serveFromCache(cacheMetaFile, cacheContentFile, res);
|
||||
} else {
|
||||
delete cache[path]; // Remove expired cache entry if exists
|
||||
try {
|
||||
const apiData = await fetchApiData(path, sign);
|
||||
const apiData = await fetchApiData(reqPath, sign);
|
||||
if (apiData.code === 200 && apiData.data && apiData.data.url) {
|
||||
const { url: realUrl, cloudtype, expiration } = apiData.data;
|
||||
const data = { realUrl, cloudtype, expiration: expiration * 1000 };
|
||||
|
||||
if (expiration > 0) {
|
||||
cache[path] = data;
|
||||
fs.writeFileSync(cacheMetaFile, JSON.stringify(data));
|
||||
}
|
||||
fetchAndServe(data, res);
|
||||
|
||||
// 如果 cacheContentFile 存在 直接调用它
|
||||
if (fs.existsSync(cacheContentFile)) {
|
||||
serveFromCache(cacheMetaFile, cacheContentFile, res);
|
||||
return;
|
||||
}
|
||||
fetchAndServe(data, tempCacheContentFile, cacheContentFile, res);
|
||||
} else {
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end(apiData.message || 'Bad Gateway');
|
||||
}
|
||||
} catch (error) {
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end('Bad Gateway: Failed to decode JSON');
|
||||
res.end('Bad Gateway: Failed to decode JSON' + error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const isCacheValid = (path) => cache[path] && cache[path].expiration > Date.now();
|
||||
const isCacheValid = (cacheMetaFile, cacheContentFile) => {
|
||||
if (!fs.existsSync(cacheMetaFile) || !fs.existsSync(cacheContentFile)) return false;
|
||||
|
||||
const cleanExpiredCache = () => {
|
||||
Object.keys(cache).forEach(key => {
|
||||
if (cache[key].expiration < Date.now()) {
|
||||
delete cache[key];
|
||||
}
|
||||
});
|
||||
const cacheData = JSON.parse(fs.readFileSync(cacheMetaFile, 'utf8'));
|
||||
return cacheData.expiration > Date.now();
|
||||
};
|
||||
|
||||
const fetchApiData = (path, sign) => {
|
||||
const fetchApiData = (reqPath, sign) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const postData = querystring.stringify({ path, sign });
|
||||
const postData = querystring.stringify({ path: reqPath, sign });
|
||||
|
||||
const apiReq = https.request(apiEndpoint, {
|
||||
method: 'POST',
|
||||
@ -104,17 +118,60 @@ const fetchApiData = (path, sign) => {
|
||||
});
|
||||
};
|
||||
|
||||
const fetchAndServe = (data, res) => {
|
||||
const fetchAndServe = (data, tempCacheContentFile, cacheContentFile, res) => {
|
||||
https.get(data.realUrl, { timeout: requestTimeout * 10 }, (realRes) => {
|
||||
// 创建临时缓存文件流
|
||||
const cacheStream = fs.createWriteStream(tempCacheContentFile, { flags: 'w' });
|
||||
|
||||
res.writeHead(realRes.statusCode, {
|
||||
...realRes.headers,
|
||||
'Cloud-Type': data.cloudtype,
|
||||
'Cloud-Expiration': data.expiration,
|
||||
});
|
||||
|
||||
realRes.pipe(cacheStream);
|
||||
realRes.pipe(res);
|
||||
|
||||
realRes.on('end', () => {
|
||||
// 下载完成后,将临时文件重命名为最终缓存文件
|
||||
fs.renameSync(tempCacheContentFile, cacheContentFile);
|
||||
cacheStream.end();
|
||||
});
|
||||
|
||||
realRes.on('error', (e) => {
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end(`Bad Gateway: ${data.realUrl}`);
|
||||
}
|
||||
fs.unlinkSync(tempCacheContentFile); // 删除临时文件
|
||||
});
|
||||
}).on('error', (e) => {
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end(`Bad Gateway: ${data.realUrl}`);
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end(`Bad Gateway: ${data.realUrl}`);
|
||||
}
|
||||
fs.unlinkSync(tempCacheContentFile); // 删除临时文件
|
||||
});
|
||||
};
|
||||
|
||||
const serveFromCache = (cacheMetaFile, cacheContentFile, res) => {
|
||||
const cacheData = JSON.parse(fs.readFileSync(cacheMetaFile, 'utf8'));
|
||||
const readStream = fs.createReadStream(cacheContentFile);
|
||||
|
||||
readStream.on('open', () => {
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Cloud-Type': cacheData.cloudtype,
|
||||
'Cloud-Expiration': cacheData.expiration,
|
||||
});
|
||||
readStream.pipe(res);
|
||||
});
|
||||
|
||||
readStream.on('error', (err) => {
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
||||
res.end('Internal Server Error: Unable to read cache content file');
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@ -135,4 +192,4 @@ process.on('SIGINT', () => {
|
||||
console.error('Forcing shutdown...');
|
||||
process.exit(1);
|
||||
}, 10000);
|
||||
});
|
||||
});
|
||||
|
112
index.py
112
index.py
@ -1,112 +0,0 @@
|
||||
import http.server
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import json
|
||||
import ssl
|
||||
import time
|
||||
|
||||
class ProxyHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
|
||||
api_endpoint = 'https://oss.x-php.com/alist/link'
|
||||
request_timeout = 10 # 10 seconds
|
||||
cache = {}
|
||||
|
||||
def do_GET(self):
|
||||
self.proxy_request()
|
||||
|
||||
def do_POST(self):
|
||||
self.proxy_request()
|
||||
|
||||
def proxy_request(self):
|
||||
path = self.path
|
||||
|
||||
# Filter out favicon.ico requests
|
||||
if path == '/favicon.ico':
|
||||
self.send_response(204)
|
||||
self.end_headers()
|
||||
return
|
||||
|
||||
sign = self.headers.get('sign', '')
|
||||
|
||||
# Check if the data is in cache and not expired
|
||||
cache_entry = self.cache.get(path)
|
||||
if cache_entry and cache_entry['expiration'] > time.time():
|
||||
self.serve_from_cache(cache_entry)
|
||||
return
|
||||
else:
|
||||
self.cache.pop(path, None) # Remove expired cache entry if exists
|
||||
|
||||
# Construct the POST data
|
||||
post_data = urllib.parse.urlencode({'path': path, 'sign': sign}).encode('utf-8')
|
||||
|
||||
try:
|
||||
# Request the real URL from the API
|
||||
context = ssl._create_unverified_context()
|
||||
req = urllib.request.Request(self.api_endpoint, data=post_data, method='POST')
|
||||
req.add_header('Accept', 'application/json')
|
||||
with urllib.request.urlopen(req, timeout=self.request_timeout, context=context) as response:
|
||||
api_response = response.read().decode('utf-8')
|
||||
# Ensure the response is JSON
|
||||
try:
|
||||
api_data = json.loads(api_response)
|
||||
except json.JSONDecodeError:
|
||||
self.send_error(502, 'Bad Gateway: Failed to decode JSON')
|
||||
return
|
||||
|
||||
if isinstance(api_data, dict) and api_data.get('code') == 200 and api_data.get('data') and api_data['data'].get('url'):
|
||||
real_url = api_data['data']['url']
|
||||
cloud_type = api_data['data']['cloudtype']
|
||||
expiration = int(api_data['data'].get('expiration', 0)) # Convert expiration to int
|
||||
|
||||
# Cache the response if expiration is greater than 0
|
||||
if expiration > 0:
|
||||
self.cache[path] = {
|
||||
'real_url': real_url,
|
||||
'cloud_type': cloud_type,
|
||||
'expiration': time.time() + expiration
|
||||
}
|
||||
|
||||
self.fetch_and_serve(real_url, cloud_type)
|
||||
else:
|
||||
self.send_error(502, api_data.get('message', 'Bad Gateway'))
|
||||
|
||||
except urllib.error.URLError as api_error:
|
||||
if isinstance(api_error.reason, str) and 'timed out' in api_error.reason:
|
||||
self.send_error(504, 'Gateway Timeout')
|
||||
else:
|
||||
self.send_error(500, 'Internal Server Error')
|
||||
|
||||
def fetch_and_serve(self, real_url, cloud_type):
|
||||
try:
|
||||
context = ssl._create_unverified_context()
|
||||
with urllib.request.urlopen(real_url, timeout=self.request_timeout, context=context) as real_response:
|
||||
self.send_response(real_response.status)
|
||||
for key, value in real_response.getheaders():
|
||||
self.send_header(key, value)
|
||||
self.send_header('cloudtype', cloud_type)
|
||||
self.end_headers()
|
||||
self.wfile.write(real_response.read())
|
||||
except ConnectionResetError:
|
||||
print(f"Connection reset by peer when fetching {real_url}")
|
||||
except BrokenPipeError:
|
||||
print(f"Broken pipe when serving {real_url}")
|
||||
except Exception as real_error:
|
||||
self.send_error(502, f'Bad Gateway: {real_url}')
|
||||
|
||||
def serve_from_cache(self, cache_entry):
|
||||
real_url = cache_entry['real_url']
|
||||
cloud_type = cache_entry['cloud_type']
|
||||
self.fetch_and_serve(real_url, cloud_type)
|
||||
|
||||
def run(server_class=http.server.HTTPServer, handler_class=ProxyHTTPRequestHandler, port=3000):
|
||||
server_address = ('', port)
|
||||
httpd = server_class(server_address, handler_class)
|
||||
print(f"Proxy server is running on http://localhost:{port}")
|
||||
try:
|
||||
httpd.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
print("\nServer is shutting down...")
|
||||
httpd.server_close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
run()
|
79
oss.js
79
oss.js
@ -1,7 +1,8 @@
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
const fs = require('fs');
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const { URL } = require('url');
|
||||
|
||||
const PORT = 3000;
|
||||
const CACHE_DIR = path.join(__dirname, '.cache');
|
||||
@ -9,68 +10,56 @@ const CACHE_EXPIRY = 30 * 24 * 60 * 60 * 1000; // 30 days in milliseconds
|
||||
const CLEAN_INTERVAL = 24 * 60 * 60 * 1000; // 1 day in milliseconds
|
||||
|
||||
// Ensure the cache directory exists
|
||||
if (!fs.existsSync(CACHE_DIR)) {
|
||||
fs.mkdirSync(CACHE_DIR);
|
||||
}
|
||||
fs.mkdir(CACHE_DIR, { recursive: true }).catch(console.error);
|
||||
|
||||
// Helper function to get cache file path
|
||||
const getCacheFilePath = (requestUrl) => {
|
||||
const sanitizedUrl = requestUrl.replace(/[^a-z0-9]/gi, '_').toLowerCase();
|
||||
const urlObj = new URL(requestUrl);
|
||||
const sanitizedUrl = (urlObj.host + urlObj.pathname).replace(/[^a-z0-9]/gi, '_').toLowerCase();
|
||||
return path.join(CACHE_DIR, sanitizedUrl);
|
||||
};
|
||||
|
||||
// Function to clean up expired cache files
|
||||
const cleanUpCache = () => {
|
||||
fs.readdir(CACHE_DIR, (err, files) => {
|
||||
if (err) {
|
||||
console.error('Error reading cache directory:', err);
|
||||
return;
|
||||
}
|
||||
|
||||
const cleanUpCache = async () => {
|
||||
try {
|
||||
const files = await fs.readdir(CACHE_DIR);
|
||||
const now = Date.now();
|
||||
|
||||
files.forEach(file => {
|
||||
for (const file of files) {
|
||||
const filePath = path.join(CACHE_DIR, file);
|
||||
const stats = await fs.stat(filePath);
|
||||
|
||||
fs.stat(filePath, (err, stats) => {
|
||||
if (err) {
|
||||
console.error('Error getting file stats:', err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (now - stats.mtimeMs > CACHE_EXPIRY) {
|
||||
fs.unlink(filePath, (err) => {
|
||||
if (err) {
|
||||
console.error('Error deleting file:', err);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
if (now - stats.mtimeMs > CACHE_EXPIRY) {
|
||||
await fs.unlink(filePath);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('Error cleaning up cache:', err);
|
||||
}
|
||||
};
|
||||
|
||||
// Schedule cache clean-up at regular intervals
|
||||
setInterval(cleanUpCache, CLEAN_INTERVAL);
|
||||
|
||||
// Function to handle proxying and caching
|
||||
const handleRequest = (req, res) => {
|
||||
|
||||
const handleRequest = async (req, res) => {
|
||||
const targetUrl = `https://oss.x-php.com${req.url}`;
|
||||
const cacheFilePath = getCacheFilePath(targetUrl);
|
||||
|
||||
// Check if the cache file exists and is still valid
|
||||
if (fs.existsSync(cacheFilePath)) {
|
||||
const stats = fs.statSync(cacheFilePath);
|
||||
try {
|
||||
// Check if the cache file exists and is still valid
|
||||
const cacheStats = await fs.stat(cacheFilePath);
|
||||
const now = Date.now();
|
||||
|
||||
if (now - stats.mtimeMs < CACHE_EXPIRY) {
|
||||
if (now - cacheStats.mtimeMs < CACHE_EXPIRY) {
|
||||
// Serve from cache
|
||||
const cachedData = JSON.parse(fs.readFileSync(cacheFilePath, 'utf8'));
|
||||
const cachedData = JSON.parse(await fs.readFile(cacheFilePath, 'utf8'));
|
||||
res.writeHead(cachedData.statusCode, cachedData.headers);
|
||||
res.end(Buffer.from(cachedData.body, 'base64'));
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
// Cache file does not exist or is invalid, proceed to fetch from the target URL
|
||||
}
|
||||
|
||||
// Fetch from the target URL
|
||||
@ -81,16 +70,18 @@ const handleRequest = (req, res) => {
|
||||
data.push(chunk);
|
||||
});
|
||||
|
||||
proxyRes.on('end', () => {
|
||||
proxyRes.on('end', async () => {
|
||||
const responseData = Buffer.concat(data);
|
||||
|
||||
// Save the response to cache
|
||||
const cacheData = {
|
||||
statusCode: proxyRes.statusCode,
|
||||
headers: proxyRes.headers,
|
||||
body: responseData.toString('base64')
|
||||
};
|
||||
fs.writeFileSync(cacheFilePath, JSON.stringify(cacheData));
|
||||
if (proxyRes.statusCode === 200 && proxyRes.headers['content-type'] && proxyRes.headers['content-type'].startsWith('image/')) {
|
||||
// Save the response to cache if it is an image
|
||||
const cacheData = {
|
||||
statusCode: proxyRes.statusCode,
|
||||
headers: proxyRes.headers,
|
||||
body: responseData.toString('base64')
|
||||
};
|
||||
await fs.writeFile(cacheFilePath, JSON.stringify(cacheData)).catch(console.error);
|
||||
}
|
||||
|
||||
// Serve the response
|
||||
res.writeHead(proxyRes.statusCode, proxyRes.headers);
|
||||
|
86
torrents.js
86
torrents.js
@ -1,86 +0,0 @@
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
const url = require('url');
|
||||
|
||||
const PORT = 3000;
|
||||
const MAX_RETRIES = 3;
|
||||
const INITIAL_TIMEOUT = 3000; // 初始超时时间设置为3秒
|
||||
const BACKOFF_FACTOR = 2; // 指数退避因子
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const reqUrl = url.parse(req.url, true);
|
||||
const id = reqUrl.pathname.split('/').pop();
|
||||
|
||||
// 检查 ID 是否存在且格式正确
|
||||
if (!id || isNaN(id)) {
|
||||
res.writeHead(400, { 'Content-Type': 'text/plain' });
|
||||
res.end('Invalid ID');
|
||||
return;
|
||||
}
|
||||
|
||||
const targetUrl = `https://xxxclub.to/torrents/details/${id}`;
|
||||
console.log(`Target URL: ${targetUrl}`);
|
||||
|
||||
let responseSent = false;
|
||||
|
||||
const makeRequest = (retryCount = 0, timeout = INITIAL_TIMEOUT) => {
|
||||
if (responseSent) return;
|
||||
|
||||
const options = url.parse(targetUrl);
|
||||
options.method = 'GET';
|
||||
options.timeout = timeout;
|
||||
|
||||
const proxyReq = https.request(options, (proxyRes) => {
|
||||
let data = '';
|
||||
|
||||
proxyRes.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
});
|
||||
|
||||
proxyRes.on('end', () => {
|
||||
if (!responseSent) {
|
||||
res.writeHead(proxyRes.statusCode, proxyRes.headers);
|
||||
res.end(data);
|
||||
responseSent = true;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
proxyReq.on('timeout', () => {
|
||||
console.error('Request timed out.');
|
||||
proxyReq.abort();
|
||||
|
||||
if (retryCount < MAX_RETRIES) {
|
||||
const newTimeout = timeout * BACKOFF_FACTOR;
|
||||
console.log(`Retrying... (${retryCount + 1}/${MAX_RETRIES}) with timeout ${newTimeout}ms`);
|
||||
makeRequest(retryCount + 1, newTimeout);
|
||||
} else if (!responseSent) {
|
||||
res.writeHead(504, { 'Content-Type': 'text/plain' });
|
||||
res.end('Request timed out.');
|
||||
responseSent = true;
|
||||
}
|
||||
});
|
||||
|
||||
proxyReq.on('error', (e) => {
|
||||
console.error(`Problem with request: ${e.message}`);
|
||||
|
||||
if (retryCount < MAX_RETRIES) {
|
||||
const newTimeout = timeout * BACKOFF_FACTOR;
|
||||
console.log(`Retrying... (${retryCount + 1}/${MAX_RETRIES}) with timeout ${newTimeout}ms`);
|
||||
makeRequest(retryCount + 1, newTimeout);
|
||||
} else if (!responseSent) {
|
||||
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
||||
res.end('Error occurred while fetching the data.');
|
||||
responseSent = true;
|
||||
}
|
||||
});
|
||||
|
||||
proxyReq.end();
|
||||
};
|
||||
|
||||
makeRequest();
|
||||
});
|
||||
|
||||
server.listen(PORT, () => {
|
||||
console.log(`Proxy server is running on http://localhost:${PORT}`);
|
||||
});
|
Loading…
Reference in New Issue
Block a user