chore: initial commit - CloudSearch v0.0.2
This commit is contained in:
615
packages/backend/src/cloud/admin.routes.ts
Normal file
615
packages/backend/src/cloud/admin.routes.ts
Normal file
@@ -0,0 +1,615 @@
|
||||
import { Router, Request, Response } from 'express';
|
||||
// Native fetch available in Node 20+
|
||||
import fs from "fs";
|
||||
import { execSync } from 'child_process';
|
||||
import { adminLimiter, loginLimiter } from '../middleware/rate-limit';
|
||||
import { getSaveRecords } from '../cloud/cloud.service';
|
||||
import { getCloudConfigs, getCloudConfigById, saveCloudConfig, deleteCloudConfig, getCloudConfigByType, testCloudConnection, testCloudConnectionWithCookie } from '../cloud/credential.service';
|
||||
// Note: check-in routes were removed (sign-in feature removed)
|
||||
import { getAllCloudTypes } from '../cloud/cloud-types.service';
|
||||
import { login, authMiddleware, verifyToken, changePassword } from '../admin/auth.service';
|
||||
import { getStats } from '../admin/stats.service';
|
||||
import { getAllSystemConfigs, updateSystemConfig, updateSystemConfigs, getSystemConfig } from '../admin/system-config.service';
|
||||
import { testProxyConnection } from '../utils/proxy-agent';
|
||||
import { getDb } from '../database/database';
|
||||
import { reconnectRedis, testRedisConnection } from '../middleware/cache';
|
||||
import { startQrLogin, getQrLoginStatus, cancelQrLogin } from '../cloud/qr-login.service';
|
||||
import { BaiduDriver } from '../cloud/drivers/baidu.driver';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Public routes (no auth required)
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/**
|
||||
* POST /api/admin/login
|
||||
* Admin login
|
||||
*/
|
||||
router.post('/admin/login', loginLimiter, (req: Request, res: Response) => {
|
||||
try {
|
||||
const { username, password } = req.body;
|
||||
if (!username || !password) {
|
||||
res.status(400).json({ error: 'Username and password are required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const token = login(username, password);
|
||||
if (!token) {
|
||||
res.status(401).json({ error: 'Invalid credentials' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ token });
|
||||
} catch (err: any) {
|
||||
console.error('[Login] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/admin/cloud-types
|
||||
* List all cloud types (public, read-only).
|
||||
*/
|
||||
router.get('/admin/cloud-types', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const types = getAllCloudTypes();
|
||||
res.json({ types });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// QR Login routes (no auth — user not logged in yet)
|
||||
// MUST be before authMiddleware!
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
// ===== 夸克扫码登录 =====
|
||||
router.post('/admin/quark/qr-login/start', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await startQrLogin();
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/admin/quark/qr-login/:sessionId/status', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
const result = await getQrLoginStatus(sessionId);
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/admin/quark/qr-login/:sessionId/cancel', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
await cancelQrLogin(sessionId);
|
||||
res.json({ ok: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// ===== 百度扫码登录 =====
|
||||
router.post("/admin/baidu/qr-login/start", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await BaiduDriver.startQrLogin();
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/admin/baidu/qr-login/:sessionId/status", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
const result: any = await BaiduDriver.getQrLoginStatus(sessionId);
|
||||
// Map to frontend-expected format (frontend reads data.cookie)
|
||||
res.json({
|
||||
ok: true,
|
||||
status: result.status,
|
||||
cookie: result.cookie || result.access_token || "",
|
||||
nickname: result.nickname || "",
|
||||
storage_used: result.storage_used || "",
|
||||
storage_total: result.storage_total || "",
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/admin/baidu/qr-login/:sessionId/cancel", async (req: Request, res: Response) => {
|
||||
try {
|
||||
BaiduDriver.cancelQrLogin(req.params.sessionId as string);
|
||||
} catch {}
|
||||
res.json({ ok: true });
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Auth wall — all routes below require JWT
|
||||
// ═══════════════════════════════════════
|
||||
router.use('/admin', authMiddleware);
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Cloud Configs CRUD
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/cloud-configs — list all cloud configs */
|
||||
router.get('/admin/cloud-configs', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const configs = getCloudConfigs();
|
||||
res.json(configs);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to fetch cloud configs' });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/cloud-configs — create or smart-replace a cloud config */
|
||||
router.post('/admin/cloud-configs', (req: Request, res: Response) => {
|
||||
try {
|
||||
const data = req.body;
|
||||
if (!data.cloud_type) {
|
||||
res.status(400).json({ error: 'cloud_type is required' });
|
||||
return;
|
||||
}
|
||||
// Normalize is_active: frontend sends boolean, SQLite needs 0/1
|
||||
if (typeof data.is_active === 'boolean') data.is_active = data.is_active ? 1 : 0;
|
||||
// Normalize is_transfer_enabled: frontend sends boolean, SQLite needs 0/1
|
||||
if (typeof data.is_transfer_enabled === 'boolean') data.is_transfer_enabled = data.is_transfer_enabled ? 1 : 0;
|
||||
const saved = saveCloudConfig(data);
|
||||
res.json(saved);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to save cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** PUT /api/admin/cloud-configs/:id — update an existing cloud config */
|
||||
router.put('/admin/cloud-configs/:id', (req: Request, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id as string);
|
||||
const existing = getCloudConfigById(id);
|
||||
if (!existing) {
|
||||
res.status(404).json({ error: 'Cloud config not found' });
|
||||
return;
|
||||
}
|
||||
const saved = saveCloudConfig({ ...req.body, id });
|
||||
res.json(saved);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to update cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** DELETE /api/admin/cloud-configs/:id */
|
||||
router.delete('/admin/cloud-configs/:id', (req: Request, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id as string);
|
||||
const ok = deleteCloudConfig(id);
|
||||
if (!ok) {
|
||||
res.status(404).json({ error: 'Cloud config not found' });
|
||||
return;
|
||||
}
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to delete cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/cloud-configs/:type/test — test cloud connection (by type or id) */
|
||||
router.post('/admin/cloud-configs/:type/test', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const type = req.params.type as string;
|
||||
const { cookie, id } = req.body;
|
||||
|
||||
// If cookie is provided directly, test with it (for new configs not yet saved)
|
||||
if (cookie) {
|
||||
const result = await testCloudConnectionWithCookie(type, cookie);
|
||||
res.json(result);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise test by config id
|
||||
if (id) {
|
||||
const result = await testCloudConnection(parseInt(id));
|
||||
res.json(result);
|
||||
return;
|
||||
}
|
||||
|
||||
res.status(400).json({ success: false, message: 'Provide either cookie or id' });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, message: err.message || 'Connection test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Stats
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/stats */
|
||||
router.get('/admin/stats', (req: Request, res: Response) => {
|
||||
try {
|
||||
const days = req.query.days ? parseInt(req.query.days as string) : 7;
|
||||
const stats = getStats(days);
|
||||
res.json(stats);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get stats' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Save Records (转存日志)
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/save-records */
|
||||
router.get('/admin/save-records', (req: Request, res: Response) => {
|
||||
try {
|
||||
const page = parseInt(req.query.page as string) || 1;
|
||||
const pageSize = parseInt(req.query.pageSize as string) || 20;
|
||||
const startDate = req.query.startDate as string | undefined;
|
||||
const endDate = req.query.endDate as string | undefined;
|
||||
const status = req.query.status as string | undefined;
|
||||
const sourceType = req.query.sourceType as string | undefined;
|
||||
const keyword = req.query.keyword as string | undefined;
|
||||
const result = getSaveRecords(page, pageSize, startDate, endDate, status, sourceType, keyword);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get save records' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// System Configs
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/system-configs */
|
||||
router.get('/admin/system-configs', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const configs = getAllSystemConfigs();
|
||||
res.json(configs);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get system configs' });
|
||||
}
|
||||
});
|
||||
|
||||
/** PUT /api/admin/system-configs — batch update */
|
||||
router.put('/admin/system-configs', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { entries } = req.body;
|
||||
if (!entries || !Array.isArray(entries)) {
|
||||
res.status(400).json({ error: 'entries array is required' });
|
||||
return;
|
||||
}
|
||||
updateSystemConfigs(entries);
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to update system configs' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Cloud Types Toggle
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** PUT /api/admin/cloud-types — toggle cloud type enabled/disabled */
|
||||
router.put('/admin/cloud-types', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { type, enabled } = req.body;
|
||||
if (!type) {
|
||||
res.status(400).json({ error: 'type is required' });
|
||||
return;
|
||||
}
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`INSERT INTO system_configs (key, value, description) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value = excluded.value`
|
||||
).run(`cloud_type_${type}_enabled`, enabled ? '1' : '0', `Enable/disable ${type} cloud drive`);
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to toggle cloud type' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Change Password
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/change-password */
|
||||
router.post('/admin/change-password', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { oldPassword, newPassword } = req.body;
|
||||
if (!oldPassword || !newPassword) {
|
||||
res.status(400).json({ error: 'Both old and new passwords are required' });
|
||||
return;
|
||||
}
|
||||
// Get username from JWT
|
||||
const authHeader = req.headers.authorization || '';
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const payload = verifyToken(token);
|
||||
if (!payload) {
|
||||
res.status(401).json({ error: 'Invalid token' });
|
||||
return;
|
||||
}
|
||||
const result = changePassword(payload.username, oldPassword, newPassword);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to change password' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// DB Status
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/db-status */
|
||||
router.get('/admin/db-status', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const dbFile = getSystemConfig('db_path') || '';
|
||||
let dbSize = 'N/A';
|
||||
if (dbFile) {
|
||||
try {
|
||||
const stats = fs.statSync(dbFile);
|
||||
dbSize = (stats.size / 1024 / 1024).toFixed(2) + ' MB';
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const db = getDb();
|
||||
const counts = {
|
||||
save_records: (db.prepare('SELECT COUNT(*) as c FROM save_records').get() as any)?.c || 0,
|
||||
search_stats: (db.prepare('SELECT COUNT(*) as c FROM search_stats').get() as any)?.c || 0,
|
||||
system_configs: (db.prepare('SELECT COUNT(*) as c FROM system_configs').get() as any)?.c || 0,
|
||||
cloud_configs: (db.prepare('SELECT COUNT(*) as c FROM cloud_configs').get() as any)?.c || 0,
|
||||
content_cache: (db.prepare('SELECT COUNT(*) as c FROM content_cache').get() as any)?.c || 0,
|
||||
};
|
||||
|
||||
// Redis status
|
||||
let redis_status = 'disconnected';
|
||||
let redis_url = getSystemConfig('redis_url') || '';
|
||||
try {
|
||||
const testResult = await testRedisConnection(redis_url);
|
||||
redis_status = testResult.ok ? 'connected' : 'disconnected';
|
||||
} catch {
|
||||
redis_status = 'error';
|
||||
}
|
||||
|
||||
res.json({
|
||||
db_size: dbSize,
|
||||
db_path: dbFile,
|
||||
...counts,
|
||||
redis_status,
|
||||
redis_url,
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get DB status' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Test Redis Connection
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/test-redis */
|
||||
router.post('/admin/test-redis', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { url } = req.body;
|
||||
if (!url) {
|
||||
res.status(400).json({ ok: false, info: 'Redis URL is required' });
|
||||
return;
|
||||
}
|
||||
const result = await testRedisConnection(url);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, info: err.message || 'Redis test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Test External Service
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/test-external-service */
|
||||
router.post('/admin/test-external-service', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { type, url, token } = req.body;
|
||||
const start = Date.now();
|
||||
|
||||
switch (type) {
|
||||
case 'pansou': {
|
||||
const pansouUrl = url || getSystemConfig('pansou_url') || '';
|
||||
if (!pansouUrl) {
|
||||
res.json({ ok: false, info: 'PanSou URL not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch(pansouUrl + '/api/health', { signal: AbortSignal.timeout(8000) });
|
||||
const data: any = await response.json();
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok && data?.status === 'ok',
|
||||
latency,
|
||||
info: response.ok ? `连接成功 (${data?.channels_count || 0} 频道, ${data?.plugin_count || 0} 插件)` : '连接失败',
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'video_parser': {
|
||||
const parserUrl = url || getSystemConfig('video_parser_url') || '';
|
||||
if (!parserUrl) {
|
||||
res.json({ ok: false, info: 'Video Parser URL not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch(parserUrl + '/health', { signal: AbortSignal.timeout(8000) });
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok,
|
||||
latency,
|
||||
info: response.ok ? '连接成功' : `HTTP ${response.status}`,
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'tmdb': {
|
||||
const tmdbToken = token || getSystemConfig('tmdb_api_key') || '';
|
||||
if (!tmdbToken) {
|
||||
res.json({ ok: false, info: 'TMDB API Key not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch('https://api.themoviedb.org/3/configuration', {
|
||||
headers: { Authorization: `Bearer ${tmdbToken}` },
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok,
|
||||
latency,
|
||||
info: response.ok ? '连接成功' : `HTTP ${response.status}`,
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'proxy': {
|
||||
const proxyUrl = url || getSystemConfig('search_proxy_url') || '';
|
||||
if (!proxyUrl) {
|
||||
res.json({ ok: false, info: 'Proxy URL not configured' });
|
||||
return;
|
||||
}
|
||||
const result = await testProxyConnection(proxyUrl);
|
||||
res.json(result);
|
||||
break;
|
||||
}
|
||||
case 'ip_geo': {
|
||||
const geoUrl = url || getSystemConfig('ip_geo_api_url') || '';
|
||||
if (!geoUrl) {
|
||||
res.json({ ok: false, info: '请先输入 IP 归属地查询 API 地址' });
|
||||
return;
|
||||
}
|
||||
const testUrl = geoUrl.replace('{ip}', '8.8.8.8');
|
||||
const response = await fetch(testUrl, { signal: AbortSignal.timeout(8000) });
|
||||
const data: any = await response.json();
|
||||
const latency = Date.now() - start;
|
||||
const valid = !!(data?.country || data?.region || data?.city || data?.countryCode);
|
||||
res.json({ ok: valid, latency, info: valid ? '连接成功' : '响应格式不符' });
|
||||
break;
|
||||
}
|
||||
default:
|
||||
res.json({ ok: false, info: `Unknown service type: ${type}` });
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, info: err.message || 'External service test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Pansou Info & Update
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/pansou-info — pansou health + version + update check */
|
||||
router.get('/admin/pansou-info', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const baseUrl = getSystemConfig('pansou_url') || '';
|
||||
if (!baseUrl) {
|
||||
res.json({ status: 'disconnected', channelCount: 0, pluginCount: 0, diskCount: 0, version: '', hasUpdate: false, latestVersion: '' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch PanSou health
|
||||
const healthUrl = baseUrl + '/api/health';
|
||||
const response = await fetch(healthUrl, { signal: AbortSignal.timeout(8000) });
|
||||
const healthData: any = await response.json();
|
||||
const channelCount = healthData.channels_count || 0;
|
||||
const pluginCount = healthData.plugin_count || 0;
|
||||
|
||||
// Derive disk count from channel names
|
||||
const driveKeywords = ['aliyun', 'baidu', 'quark', '115', 'pikpak', 'xunlei', 'uc', '123', '139', '189', 'tianyi', 'netease'];
|
||||
const drives = new Set<string>();
|
||||
for (const ch of (healthData.channels || [])) {
|
||||
for (const kw of driveKeywords) {
|
||||
if (ch.toLowerCase().includes(kw)) { drives.add(kw); break; }
|
||||
}
|
||||
}
|
||||
const diskCount = drives.size || 5;
|
||||
|
||||
// Get local version from docker label
|
||||
let version = '';
|
||||
let hasUpdate = false;
|
||||
let latestVersion = '';
|
||||
try {
|
||||
const created = execSync(
|
||||
`docker inspect CloudSearch_PanSou --format '{{index .Config.Labels "org.opencontainers.image.created"}}'`,
|
||||
{ timeout: 5000, encoding: 'utf8' }
|
||||
).trim();
|
||||
version = created ? created.slice(0, 10) : '';
|
||||
|
||||
// Check update cache
|
||||
const cacheFile = '/tmp/pansou-update-cache.json';
|
||||
let cache: any = null;
|
||||
try { cache = JSON.parse(fs.readFileSync(cacheFile, 'utf8') || 'null'); } catch {}
|
||||
const threeDays = 3 * 24 * 3600 * 1000;
|
||||
|
||||
if (!cache || (Date.now() - cache.checkedAt) > threeDays) {
|
||||
// Check GHCR for latest version
|
||||
try {
|
||||
const tokenRes = await fetch(
|
||||
'https://ghcr.io/token?scope=repository:fish2018/pansou-web:pull&service=ghcr.io'
|
||||
);
|
||||
const ghcrToken = (await tokenRes.json() as any).token;
|
||||
const manifestRes = await fetch(
|
||||
'https://ghcr.io/v2/fish2018/pansou-web/manifests/latest',
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}`, Accept: 'application/vnd.oci.image.index.v1+json, application/vnd.docker.distribution.manifest.list.v2+json' } }
|
||||
);
|
||||
const manifestList: any = await manifestRes.json();
|
||||
const amd64 = manifestList.manifests?.find((m: any) => m.platform?.architecture === 'amd64' && m.platform?.os === 'linux');
|
||||
if (amd64) {
|
||||
const blobRes = await fetch(
|
||||
`https://ghcr.io/v2/fish2018/pansou-web/manifests/${amd64.digest}`,
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}`, Accept: 'application/vnd.oci.image.manifest.v1+json' } }
|
||||
);
|
||||
const blobData: any = await blobRes.json();
|
||||
const cfgDigest = blobData.config?.digest;
|
||||
if (cfgDigest) {
|
||||
const cfgRes = await fetch(
|
||||
`https://ghcr.io/v2/fish2018/pansou-web/blobs/${cfgDigest}`,
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}` } }
|
||||
);
|
||||
const cfgData: any = await cfgRes.json();
|
||||
const remoteCreated = cfgData.config?.Labels?.['org.opencontainers.image.created'];
|
||||
if (remoteCreated) {
|
||||
latestVersion = remoteCreated.slice(0, 10);
|
||||
if (version && latestVersion !== version) hasUpdate = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
fs.writeFileSync(cacheFile, JSON.stringify({ checkedAt: Date.now(), hasUpdate, latestVersion }));
|
||||
} else {
|
||||
hasUpdate = cache.hasUpdate;
|
||||
latestVersion = cache.latestVersion;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
res.json({
|
||||
status: response.ok ? 'connected' : 'disconnected',
|
||||
channelCount,
|
||||
pluginCount,
|
||||
diskCount,
|
||||
version,
|
||||
hasUpdate,
|
||||
latestVersion,
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.json({ status: 'error', channelCount: 0, pluginCount: 0, diskCount: 0, version: '', hasUpdate: false, latestVersion: '', error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/update-pansou — pull latest pansou image + recreate container */
|
||||
router.post('/admin/update-pansou', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
execSync('docker pull ghcr.io/fish2018/pansou-web:latest', { timeout: 120000 });
|
||||
execSync('docker compose -p cloudsearch -f /app/docker-compose.yml up -d pansou', { timeout: 60000 });
|
||||
try { fs.unlinkSync('/tmp/pansou-update-cache.json'); } catch {}
|
||||
res.json({ success: true, message: 'PanSou 更新成功' });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, error: err.message || 'PanSou 更新失败' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
254
packages/backend/src/cloud/cleanup.service.ts
Executable file
254
packages/backend/src/cloud/cleanup.service.ts
Executable file
@@ -0,0 +1,254 @@
|
||||
import { getDb } from '../database/database';
|
||||
import { getSystemConfig, updateSystemConfig } from '../admin/system-config.service';
|
||||
import { formatLocalDate, formatLocalDateTime } from '../utils/time';
|
||||
import { QuarkDriver } from './drivers/quark.driver';
|
||||
import { BaiduDriver } from './drivers/baidu.driver';
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// CloudCleanupDriver — contract that each cloud driver must fulfill
|
||||
// to participate in the cleanup cycle.
|
||||
//
|
||||
// To add a new cloud type (e.g. Baidu, Aliyun), implement these three
|
||||
// methods in the driver and register it in getDriverForCleanup() below.
|
||||
// The controller (this file) handles WHEN and WITH WHAT parameters;
|
||||
// the driver handles HOW.
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/** Each cleanup operation returns { trashed: number; errors: string[] } */
|
||||
interface CleanupOpResult { trashed: number; errors: string[] }
|
||||
|
||||
interface CloudCleanupDriver {
|
||||
/** Trash date folders (YYYY-MM-DD) older than `days`. */
|
||||
cleanupOldDateFolders(days: number): Promise<CleanupOpResult>;
|
||||
/**
|
||||
* If used space exceeds thresholdPercent% of TOTAL capacity,
|
||||
* delete oldest date folders until totalBytes * deletePercent/100
|
||||
* of TOTAL capacity is freed.
|
||||
* @param thresholdPercent — trigger when usage >= this % of total
|
||||
* @param deletePercent — free this % of total capacity
|
||||
*/
|
||||
cleanupBySpaceThreshold(thresholdPercent: number, deletePercent: number): Promise<CleanupOpResult>;
|
||||
/** Permanently empty the recycle bin. */
|
||||
emptyTrash(): Promise<boolean>;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Driver factory — create the right driver for a given cloud config.
|
||||
// When adding a new cloud type, add a case here.
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
function getDriverForCleanup(config: { cloud_type: string; cookie: string }): CloudCleanupDriver | null {
|
||||
switch (config.cloud_type) {
|
||||
case 'quark':
|
||||
return new QuarkDriver({ cookie: config.cookie });
|
||||
case 'baidu':
|
||||
return new BaiduDriver({ cookie: config.cookie });
|
||||
// case 'aliyun': return new AliyunDriver({ cookie: config.cookie });
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Cleanup controller — reads system configs and dispatches to each
|
||||
// active cloud driver. Every driver receives the same parameters;
|
||||
// the driver decides whether/how to act.
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
interface CleanupStats {
|
||||
filesTrashed: number;
|
||||
logsDeleted: number;
|
||||
trashEmptied: boolean;
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
/** Get all active cloud configs (any type). Used by the orchestrator. */
|
||||
function getActiveCleanupConfigs(): Array<{ id: number; cloud_type: string; cookie: string; nickname?: string }> {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname FROM cloud_configs
|
||||
WHERE is_active = 1 AND cookie IS NOT NULL AND cookie != ''`
|
||||
).all() as Array<{ id: number; cloud_type: string; cookie: string; nickname?: string }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch cleanupOldDateFolders to every active driver.
|
||||
* Each driver receives the same `days` parameter.
|
||||
*/
|
||||
async function cleanupCloudFiles(days: number): Promise<CleanupOpResult> {
|
||||
const configs = getActiveCleanupConfigs();
|
||||
const errors: string[] = [];
|
||||
let totalTrashed = 0;
|
||||
|
||||
for (const cfg of configs) {
|
||||
const driver = getDriverForCleanup(cfg);
|
||||
if (!driver) {
|
||||
console.log(`[Cleanup] No driver for cloud_type="${cfg.cloud_type}", skipping config #${cfg.id}`);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const result = await driver.cleanupOldDateFolders(days);
|
||||
totalTrashed += result.trashed;
|
||||
errors.push(...result.errors.map(e => `[${cfg.cloud_type}#${cfg.id}] ${e}`));
|
||||
} catch (err: any) {
|
||||
errors.push(`[${cfg.cloud_type}#${cfg.id}] cleanupOldDateFolders: ${err.message}`);
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
|
||||
return { trashed: totalTrashed, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch cleanupBySpaceThreshold to every active driver.
|
||||
* Each driver receives the same threshold/delete percentages.
|
||||
*/
|
||||
async function cleanupAllBySpaceThreshold(
|
||||
thresholdPercent: number,
|
||||
deletePercent: number,
|
||||
): Promise<CleanupOpResult> {
|
||||
const configs = getActiveCleanupConfigs();
|
||||
const errors: string[] = [];
|
||||
let totalTrashed = 0;
|
||||
|
||||
for (const cfg of configs) {
|
||||
const driver = getDriverForCleanup(cfg);
|
||||
if (!driver) {
|
||||
console.log(`[Cleanup] No driver for cloud_type="${cfg.cloud_type}", skipping config #${cfg.id}`);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const result = await driver.cleanupBySpaceThreshold(thresholdPercent, deletePercent);
|
||||
totalTrashed += result.trashed;
|
||||
errors.push(...result.errors.map(e => `[${cfg.cloud_type}#${cfg.id}] ${e}`));
|
||||
} catch (err: any) {
|
||||
errors.push(`[${cfg.cloud_type}#${cfg.id}] cleanupBySpaceThreshold: ${err.message}`);
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
|
||||
return { trashed: totalTrashed, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch emptyTrash to every active driver.
|
||||
*/
|
||||
export async function emptyAllTrash(): Promise<{ emptied: boolean; errors: string[] }> {
|
||||
const configs = getActiveCleanupConfigs();
|
||||
const errors: string[] = [];
|
||||
let emptied = false;
|
||||
|
||||
for (const cfg of configs) {
|
||||
const driver = getDriverForCleanup(cfg);
|
||||
if (!driver) continue;
|
||||
try {
|
||||
const ok = await driver.emptyTrash();
|
||||
if (ok) {
|
||||
emptied = true;
|
||||
console.log(`[Cleanup] ✅ Emptied trash for [${cfg.cloud_type}#${cfg.id}]`);
|
||||
} else {
|
||||
errors.push(`[${cfg.cloud_type}#${cfg.id}] empty trash failed`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
errors.push(`[${cfg.cloud_type}#${cfg.id}]: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
return { emptied, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete save_records older than the specified number of days.
|
||||
*/
|
||||
function cleanupLogs(days: number): number {
|
||||
const db = getDb();
|
||||
const cutoffStr = formatLocalDateTime(new Date(Date.now() - days * 24 * 60 * 60 * 1000));
|
||||
|
||||
const result = db.prepare('DELETE FROM save_records WHERE created_at < ?').run(cutoffStr);
|
||||
console.log(`[Cleanup] Deleted ${result.changes} save records older than ${days} days (before ${cutoffStr})`);
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run full cleanup cycle:
|
||||
* 0. Force-clean by space threshold (if enabled & exceeded) — priority highest
|
||||
* 1. Delete old save_records
|
||||
* 2. Trash old date folders by retention days
|
||||
* 3. Empty recycle bin (permanently free space)
|
||||
*/
|
||||
export async function runFullCleanup(): Promise<CleanupStats> {
|
||||
const fileDays = parseInt(getSystemConfig('cleanup_file_retention_days') || '7', 10);
|
||||
const logDays = parseInt(getSystemConfig('cleanup_log_retention_days') || '30', 10);
|
||||
const emptyTrashEnabled = getSystemConfig('cleanup_empty_trash') !== 'false';
|
||||
|
||||
const stats: CleanupStats = { filesTrashed: 0, logsDeleted: 0, trashEmptied: false, errors: [] };
|
||||
|
||||
// 0. Space threshold (highest priority)
|
||||
const thresholdEnabled = getSystemConfig('cleanup_space_threshold_enabled');
|
||||
if (thresholdEnabled === 'true') {
|
||||
const thresholdPercent = parseInt(getSystemConfig('cleanup_space_threshold_percent') || '90', 10);
|
||||
const deletePercent = parseInt(getSystemConfig('cleanup_space_threshold_delete_percent') || '10', 10);
|
||||
if (thresholdPercent > 0 && thresholdPercent < 100) {
|
||||
try {
|
||||
const result = await cleanupAllBySpaceThreshold(thresholdPercent, deletePercent);
|
||||
stats.filesTrashed += result.trashed;
|
||||
stats.errors.push(...result.errors);
|
||||
} catch (err: any) {
|
||||
stats.errors.push(`空间阈值清理失败: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1. Delete old save_records
|
||||
try {
|
||||
stats.logsDeleted = cleanupLogs(logDays);
|
||||
} catch (err: any) {
|
||||
stats.errors.push(`日志清理失败: ${err.message}`);
|
||||
}
|
||||
|
||||
// 2. Trash old files from cloud drives
|
||||
try {
|
||||
const result = await cleanupCloudFiles(fileDays);
|
||||
stats.filesTrashed += result.trashed;
|
||||
stats.errors.push(...result.errors);
|
||||
} catch (err: any) {
|
||||
stats.errors.push(`文件清理失败: ${err.message}`);
|
||||
}
|
||||
|
||||
// 3. Empty recycle bin (only if enabled, and only if we trashed something)
|
||||
if (emptyTrashEnabled && stats.filesTrashed > 0) {
|
||||
try {
|
||||
const result = await emptyAllTrash();
|
||||
stats.trashEmptied = result.emptied;
|
||||
stats.errors.push(...result.errors);
|
||||
} catch (err: any) {
|
||||
stats.errors.push(`清空回收站失败: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Save last run timestamp and stats
|
||||
updateSystemConfig('cleanup_last_run', formatLocalDateTime());
|
||||
updateSystemConfig('cleanup_last_stats',
|
||||
JSON.stringify({ filesTrashed: stats.filesTrashed, logsDeleted: stats.logsDeleted, trashEmptied: stats.trashEmptied, errors: stats.errors.length })
|
||||
);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a daily cleanup is due and run it.
|
||||
* Called periodically by the scheduler (setInterval).
|
||||
*/
|
||||
export async function checkAndRunScheduledCleanup(): Promise<void> {
|
||||
const enabled = getSystemConfig('cleanup_enabled');
|
||||
if (enabled !== 'true') return;
|
||||
|
||||
const lastRun = getSystemConfig('cleanup_last_run');
|
||||
const todayStr = formatLocalDate();
|
||||
|
||||
if (lastRun && lastRun.startsWith(todayStr)) return;
|
||||
|
||||
console.log(`[Cleanup] Scheduled cleanup starting at ${new Date().toISOString()}...`);
|
||||
const stats = await runFullCleanup();
|
||||
console.log(`[Cleanup] Done: trashed ${stats.filesTrashed} folders, deleted ${stats.logsDeleted} logs, emptied trash: ${stats.trashEmptied}, errors: ${stats.errors.length}`);
|
||||
}
|
||||
69
packages/backend/src/cloud/cloud-types.service.ts
Executable file
69
packages/backend/src/cloud/cloud-types.service.ts
Executable file
@@ -0,0 +1,69 @@
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
|
||||
export interface CloudTypeInfo {
|
||||
type: string;
|
||||
label: string;
|
||||
icon: string;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* 网盘图标 — 使用打包进镜像的 PNG 图标文件
|
||||
* 图标存放在 /app/dist/frontend/icons/,通过 Express static 中间件对外提供
|
||||
*/
|
||||
/**
|
||||
* 网盘图标 — 内联 SVG data URI,无需外部文件
|
||||
*/
|
||||
function makeSvgIcon(bg: string, letter: string): string {
|
||||
const c = encodeURIComponent(bg);
|
||||
const l = encodeURIComponent(letter);
|
||||
return `data:image/svg+xml,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Crect%20width%3D%2224%22%20height%3D%2224%22%20rx%3D%224%22%20fill%3D%22${c}%22%2F%3E%3Ctext%20x%3D%2212%22%20y%3D%2217%22%20font-size%3D%2213%22%20font-weight%3D%22bold%22%20fill%3D%22%23fff%22%20text-anchor%3D%22middle%22%20font-family%3D%22Arial%2Csans-serif%22%3E${l}%3C%2Ftext%3E%3C%2Fsvg%3E`;
|
||||
}
|
||||
|
||||
const ICONS: Record<string, string> = {
|
||||
baidu: makeSvgIcon('#4e6ef2', '百'),
|
||||
aliyun: makeSvgIcon('#ff6a00', '阿'),
|
||||
quark: makeSvgIcon('#07c160', '夸'),
|
||||
'115': makeSvgIcon('#9b59b6', '1'),
|
||||
tianyi: makeSvgIcon('#00a1d6', '天'),
|
||||
'123pan': makeSvgIcon('#e74c3c', '1'),
|
||||
uc: makeSvgIcon('#f39c12', 'U'),
|
||||
xunlei: makeSvgIcon('#2ecc71', '迅'),
|
||||
pikpak: makeSvgIcon('#8e44ad', 'P'),
|
||||
magnet: 'data:image/svg+xml,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Crect%20width%3D%2224%22%20height%3D%2224%22%20rx%3D%224%22%20fill%3D%22%236366F1%22%2F%3E%3Cpath%20d%3D%22M7%2016l5-5m-5%200l5%205m5-5l-5-5m5%200l-5%205%22%20stroke%3D%22%23fff%22%20stroke-width%3D%222%22%20stroke-linecap%3D%22round%22%20fill%3D%22none%22%2F%3E%3Ccircle%20cx%3D%2212%22%20cy%3D%2211%22%20r%3D%221%22%20fill%3D%22%23fff%22%2F%3E%3C%2Fsvg%3E',
|
||||
ed2k: 'data:image/svg+xml,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Crect%20width%3D%2224%22%20height%3D%2224%22%20rx%3D%224%22%20fill%3D%22%238B4513%22%2F%3E%3Ctext%20x%3D%2212%22%20y%3D%2217%22%20font-size%3D%2211%22%20font-weight%3D%22bold%22%20fill%3D%22%23fff%22%20text-anchor%3D%22middle%22%20font-family%3D%22Arial%2Csans-serif%22%3EeD%3C%2Ftext%3E%3C%2Fsvg%3E',
|
||||
others: 'data:image/svg+xml,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Crect%20width%3D%2224%22%20height%3D%2224%22%20rx%3D%224%22%20fill%3D%22%239CA3AF%22%2F%3E%3Cpath%20d%3D%22M6%2013c0-2.8%202.2-5%205-5a5%205%200%200%201%204.5%202.7A4%204%200%200%201%2020%2014a4%204%200%200%201-3%203.9h-8A4%204%200%200%201%206%2013z%22%20fill%3D%22none%22%20stroke%3D%22%23fff%22%20stroke-width%3D%221.5%22%20stroke-linejoin%3D%22round%22%2F%3E%3C%2Fsvg%3E',
|
||||
};
|
||||
|
||||
const ALL_CLOUD_TYPES: { type: string; label: string; icon: string }[] = [
|
||||
{ type: 'quark', label: '夸克网盘', icon: ICONS.quark },
|
||||
{ type: 'baidu', label: '百度网盘', icon: ICONS.baidu },
|
||||
{ type: 'aliyun', label: '阿里云盘', icon: ICONS.aliyun },
|
||||
{ type: '115', label: '115 网盘', icon: ICONS['115'] },
|
||||
{ type: 'tianyi', label: '天翼云盘', icon: ICONS.tianyi },
|
||||
{ type: '123pan', label: '123 云盘', icon: ICONS['123pan'] },
|
||||
{ type: 'uc', label: 'UC 网盘', icon: ICONS.uc },
|
||||
{ type: 'xunlei', label: '迅雷网盘', icon: ICONS.xunlei },
|
||||
{ type: 'pikpak', label: 'PikPak', icon: ICONS.pikpak },
|
||||
{ type: 'magnet', label: '磁力链接', icon: ICONS.magnet },
|
||||
{ type: 'ed2k', label: '电驴链接', icon: ICONS.ed2k },
|
||||
{ type: 'others', label: '其他', icon: ICONS.others },
|
||||
];
|
||||
|
||||
export function isCloudTypeEnabled(type: string): boolean {
|
||||
const val = getSystemConfig(`cloud_type_${type}_enabled`);
|
||||
if (val === null) return type !== 'others';
|
||||
return val === "true" || val === "1";
|
||||
}
|
||||
|
||||
export function getAllCloudTypes(): CloudTypeInfo[] {
|
||||
return ALL_CLOUD_TYPES.map(ct => ({ ...ct, enabled: isCloudTypeEnabled(ct.type) }));
|
||||
}
|
||||
|
||||
export function getEnabledCloudTypeSet(): Set<string> {
|
||||
const enabled = new Set<string>();
|
||||
for (const ct of ALL_CLOUD_TYPES) {
|
||||
if (isCloudTypeEnabled(ct.type)) enabled.add(ct.type);
|
||||
}
|
||||
return enabled;
|
||||
}
|
||||
323
packages/backend/src/cloud/cloud.service.ts
Normal file
323
packages/backend/src/cloud/cloud.service.ts
Normal file
@@ -0,0 +1,323 @@
|
||||
import { getDb } from '../database/database';
|
||||
import { localTimestamp, formatLocalDateTime } from '../utils/time';
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
import { QuarkDriver } from './drivers/quark.driver';
|
||||
import { BaiduDriver } from './drivers/baidu.driver';
|
||||
import { CloudConfig, getAndValidateCredential, getActiveCloudConfigs } from './credential.service';
|
||||
import { lookupIpLocation } from './ip-lookup';
|
||||
|
||||
/** In-flight save dedup: prevents concurrent saves of the same URL (race condition fix) */
|
||||
const inFlightSaves = new Map<string, Promise<SaveResult>>();
|
||||
|
||||
export interface SaveResult {
|
||||
success: boolean;
|
||||
shareUrl?: string;
|
||||
share_url?: string;
|
||||
sharePwd?: string;
|
||||
folderName?: string;
|
||||
message: string;
|
||||
file_count?: number;
|
||||
folder_count?: number;
|
||||
duration_ms?: number;
|
||||
}
|
||||
|
||||
export interface SaveRecord {
|
||||
id: number;
|
||||
source_type: string;
|
||||
source_title: string | null;
|
||||
source_url: string;
|
||||
target_cloud: string;
|
||||
share_url: string | null;
|
||||
share_pwd: string | null;
|
||||
file_size: string | null;
|
||||
file_count: number;
|
||||
folder_count: number;
|
||||
duration_ms: number;
|
||||
status: string;
|
||||
error_message: string | null;
|
||||
folder_name: string | null;
|
||||
original_folder_name: string | null;
|
||||
ip_address: string | null;
|
||||
ip_location: string | null;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/** Core save logic extracted so inFlight dedup can wrap it */
|
||||
async function doSaveFromShare(shareUrl: string, cloudType: string, sourceTitle?: string, ipAddress?: string): Promise<SaveResult> {
|
||||
const db = getDb();
|
||||
const ipLocation = await lookupIpLocation(ipAddress || '');
|
||||
|
||||
// ── Short-term dedup: prevent duplicate saves of the same URL within 60 seconds ──
|
||||
const DEDUP_WINDOW_SEC = 60;
|
||||
let dedupCutoff = '';
|
||||
try {
|
||||
const recentCutoff = db.prepare(
|
||||
`SELECT datetime('now','localtime', '-${DEDUP_WINDOW_SEC} seconds') as cutoff`
|
||||
).get() as { cutoff: string };
|
||||
dedupCutoff = recentCutoff.cutoff;
|
||||
|
||||
const recentRecord = db.prepare(
|
||||
`SELECT share_url, share_pwd, status, error_message, folder_name, original_folder_name FROM save_records
|
||||
WHERE source_url = ? AND created_at >= ?
|
||||
ORDER BY created_at DESC LIMIT 1`
|
||||
).get(shareUrl, dedupCutoff) as {
|
||||
share_url: string | null; share_pwd: string | null; status: string;
|
||||
error_message: string | null; folder_name: string | null; original_folder_name: string | null;
|
||||
} | undefined;
|
||||
|
||||
if (recentRecord) {
|
||||
const alreadySaved = recentRecord.status === 'success' || recentRecord.status === 'reused';
|
||||
if (alreadySaved && recentRecord.share_url) {
|
||||
console.log(`[Share] 🛡️ Dedup: ${shareUrl} was saved ${DEDUP_WINDOW_SEC}s ago (status=${recentRecord.status}), returning existing share link`);
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_title, source_url, target_cloud, share_url, share_pwd, file_size, file_count, folder_count, duration_ms, status, error_message, folder_name, original_folder_name, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
cloudType, sourceTitle || null, shareUrl, cloudType,
|
||||
recentRecord.share_url, recentRecord.share_pwd || null,
|
||||
null, 0, 0, 0, 'reused', null,
|
||||
recentRecord.folder_name || null, recentRecord.original_folder_name || null,
|
||||
ipAddress || null, ipLocation, localTimestamp(),
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
message: `🛡️ 此资源刚在 ${DEDUP_WINDOW_SEC} 秒内转存过,直接返回已有分享链接`,
|
||||
share_url: recentRecord.share_url, shareUrl: recentRecord.share_url,
|
||||
sharePwd: recentRecord.share_pwd || '', folderName: '',
|
||||
file_count: 0, folder_count: 0, duration_ms: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.log(`[Share] Dedup check failed: ${err.message}, proceeding with normal save`);
|
||||
}
|
||||
|
||||
// ── Share link reuse: if same source URL was already saved successfully, validate and reuse ──
|
||||
const reuseEnabled = getSystemConfig('save_reuse_enabled');
|
||||
if (reuseEnabled !== 'false') {
|
||||
try {
|
||||
const existing = db.prepare(
|
||||
`SELECT share_url, share_pwd, folder_name, original_folder_name FROM save_records
|
||||
WHERE source_url = ? AND status IN ('success', 'reused') AND share_url IS NOT NULL AND share_url != ''
|
||||
ORDER BY created_at DESC LIMIT 1`
|
||||
).get(shareUrl) as { share_url: string; share_pwd: string; folder_name: string | null; original_folder_name: string | null } | undefined;
|
||||
|
||||
if (existing?.share_url) {
|
||||
const { LinkValidator } = await import('../validation/link-validator.service');
|
||||
const validator = new LinkValidator();
|
||||
const validation = await validator.validate(existing.share_url, 'quark');
|
||||
if (validation.status === 'valid') {
|
||||
const isFirstReuse = dedupCutoff ? !db.prepare(
|
||||
`SELECT 1 FROM save_records WHERE source_url = ? AND created_at >= ? AND status = 'reused' LIMIT 1`
|
||||
).get(shareUrl, dedupCutoff) : true;
|
||||
const reuseStatus = isFirstReuse ? 'success' : 'reused';
|
||||
const reuseMsg = isFirstReuse
|
||||
? `♻️ 检测到此资源之前已转存过,直接复用已存在的分享链接`
|
||||
: `♻️ 短时间内重复请求,复用已有分享链接`;
|
||||
|
||||
console.log(`[Share] ♻️ Reusing existing share link for ${shareUrl}: ${existing.share_url} (firstReuse=${isFirstReuse})`);
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_title, source_url, target_cloud, share_url, share_pwd, file_size, file_count, folder_count, duration_ms, status, error_message, folder_name, original_folder_name, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
cloudType, sourceTitle || null, shareUrl, cloudType,
|
||||
existing.share_url, existing.share_pwd || null,
|
||||
null, 0, 0, 0, reuseStatus, null,
|
||||
existing.folder_name || null, existing.original_folder_name || null,
|
||||
ipAddress || null, ipLocation, localTimestamp(),
|
||||
);
|
||||
return {
|
||||
success: true, message: reuseMsg,
|
||||
share_url: existing.share_url, shareUrl: existing.share_url,
|
||||
sharePwd: existing.share_pwd || '', folderName: '',
|
||||
file_count: 0, folder_count: 0, duration_ms: 0,
|
||||
};
|
||||
}
|
||||
console.log(`[Share] Existing share link for ${shareUrl} is invalid/expired, will re-save`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.log(`[Share] Link reuse check failed: ${err.message}, proceeding with normal save`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Unified credential validation ──
|
||||
const credential = await getAndValidateCredential(cloudType);
|
||||
if (!credential.valid || !credential.config) {
|
||||
return { success: false, message: credential.message };
|
||||
}
|
||||
const config = credential.config;
|
||||
|
||||
// ── Check transfer enabled ──
|
||||
if (config.is_transfer_enabled === 0) {
|
||||
return { success: false, message: `${config.nickname || cloudType} 的转存功能已关闭,请先在后台开启` };
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
let driverResult: { success: boolean; message: string; shareUrl?: string; sharePwd?: string; folderName?: string; fileCount?: number; folderCount?: number; originalFolderName?: string };
|
||||
|
||||
switch (cloudType) {
|
||||
case 'quark': {
|
||||
const driver = new QuarkDriver({ cookie: config.cookie!, nickname: config.nickname });
|
||||
driverResult = await driver.saveFromShare(shareUrl, sourceTitle);
|
||||
break;
|
||||
}
|
||||
case 'baidu': {
|
||||
const driver = new BaiduDriver({ cookie: config.cookie!, nickname: config.nickname });
|
||||
driverResult = await driver.saveFromShare(shareUrl, sourceTitle);
|
||||
break;
|
||||
}
|
||||
case 'aliyun':
|
||||
return { success: false, message: '阿里云盘保存功能暂未实现' };
|
||||
default:
|
||||
return { success: false, message: `暂不支持 ${cloudType} 的保存功能` };
|
||||
}
|
||||
|
||||
const durationMs = Date.now() - startTime;
|
||||
|
||||
if (driverResult.success) {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET last_used_at = datetime('now','localtime'), total_saves = total_saves + 1, consecutive_failures = 0 WHERE id = ?`
|
||||
).run(config.id);
|
||||
} else if ((driverResult as any).cookieExpired) {
|
||||
// Cookie expired — don't count as failure, user needs to re-login
|
||||
} else {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET consecutive_failures = consecutive_failures + 1 WHERE id = ?`
|
||||
).run(config.id);
|
||||
}
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_title, source_url, target_cloud, share_url, share_pwd, file_size, file_count, folder_count, duration_ms, status, error_message, folder_name, original_folder_name, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
cloudType, sourceTitle || driverResult.folderName || null, shareUrl, cloudType,
|
||||
driverResult.shareUrl || null, driverResult.sharePwd || null,
|
||||
null, driverResult.fileCount || 0, driverResult.folderCount || 0,
|
||||
durationMs, driverResult.success ? 'success' : 'failed',
|
||||
driverResult.success ? null : driverResult.message,
|
||||
driverResult.folderName || null, driverResult.originalFolderName || null,
|
||||
ipAddress || null, ipLocation, localTimestamp(),
|
||||
);
|
||||
|
||||
return {
|
||||
success: driverResult.success,
|
||||
message: driverResult.message,
|
||||
share_url: driverResult.shareUrl || '',
|
||||
shareUrl: driverResult.shareUrl,
|
||||
sharePwd: (driverResult as any).sharePwd || '',
|
||||
folderName: driverResult.folderName || '',
|
||||
file_count: driverResult.fileCount || 0,
|
||||
folder_count: driverResult.folderCount || 0,
|
||||
duration_ms: durationMs,
|
||||
};
|
||||
} catch (err: any) {
|
||||
const durationMs = Date.now() - startTime;
|
||||
const errorMessage = err.message || 'Failed to save to cloud';
|
||||
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET consecutive_failures = consecutive_failures + 1 WHERE id = ?`
|
||||
).run(config.id);
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_url, target_cloud, duration_ms, status, error_message, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(cloudType, shareUrl, cloudType, durationMs, 'failed', errorMessage, ipAddress || null, ipLocation, localTimestamp());
|
||||
|
||||
return { success: false, message: errorMessage };
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveFromShare(shareUrl: string, cloudType: string, sourceTitle?: string, ipAddress?: string): Promise<SaveResult> {
|
||||
const key = `${cloudType}:${shareUrl}`;
|
||||
|
||||
const inflight = inFlightSaves.get(key);
|
||||
if (inflight) {
|
||||
console.log(`[Share] ⏳ In-flight: ${shareUrl} — another save is already running, awaiting result`);
|
||||
return inflight;
|
||||
}
|
||||
|
||||
const promise = doSaveFromShare(shareUrl, cloudType, sourceTitle, ipAddress);
|
||||
inFlightSaves.set(key, promise);
|
||||
try {
|
||||
return await promise;
|
||||
} finally {
|
||||
inFlightSaves.delete(key);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Save Records ──────────────────────────────────────────────────
|
||||
|
||||
export function getSaveRecords(page: number = 1, pageSize: number = 20, startDate?: string, endDate?: string, status?: string, sourceType?: string, keyword?: string): { total: number; records: SaveRecord[]; summary?: { total: number; success: number; failed: number; reused: number } } {
|
||||
const db = getDb();
|
||||
const offset = (page - 1) * pageSize;
|
||||
const conditions: string[] = [];
|
||||
const params: any[] = [];
|
||||
const summaryConditions: string[] = [];
|
||||
const summaryParams: any[] = [];
|
||||
if (startDate) {
|
||||
conditions.push('created_at >= ?'); params.push(startDate);
|
||||
summaryConditions.push('created_at >= ?'); summaryParams.push(startDate);
|
||||
}
|
||||
if (endDate) {
|
||||
conditions.push('created_at < ?'); params.push(endDate);
|
||||
summaryConditions.push('created_at < ?'); summaryParams.push(endDate);
|
||||
}
|
||||
if (status) { conditions.push('status = ?'); params.push(status); }
|
||||
if (sourceType) {
|
||||
conditions.push('source_type = ?'); params.push(sourceType);
|
||||
summaryConditions.push('source_type = ?'); summaryParams.push(sourceType);
|
||||
}
|
||||
if (keyword) { conditions.push('source_title LIKE ?'); params.push(`%${keyword}%`); }
|
||||
const where = conditions.length > 0 ? 'WHERE ' + conditions.join(' AND ') : '';
|
||||
const total = (db.prepare(`SELECT COUNT(*) as count FROM save_records ${where}`).get(...params) as any).count;
|
||||
const records = db.prepare(
|
||||
`SELECT * FROM save_records ${where} ORDER BY created_at DESC LIMIT ? OFFSET ?`
|
||||
).all(...params, pageSize, offset) as SaveRecord[];
|
||||
|
||||
const summaryWhere = summaryConditions.length > 0 ? 'WHERE ' + summaryConditions.join(' AND ') : '';
|
||||
const summaryRows = db.prepare(
|
||||
`SELECT status, COUNT(*) as cnt FROM save_records ${summaryWhere} GROUP BY status`
|
||||
).all(...summaryParams) as { status: string; cnt: number }[];
|
||||
let sumTotal = 0, sumSuccess = 0, sumFailed = 0, sumReused = 0;
|
||||
for (const r of summaryRows) {
|
||||
sumTotal += r.cnt;
|
||||
if (r.status === 'success') sumSuccess = r.cnt;
|
||||
else if (r.status === 'failed') sumFailed = r.cnt;
|
||||
else if (r.status === 'reused') sumReused = r.cnt;
|
||||
}
|
||||
const summary = { total: sumTotal, success: sumSuccess, failed: sumFailed, reused: sumReused };
|
||||
|
||||
return { total, records, summary };
|
||||
}
|
||||
|
||||
export function cleanupOldSaveRecords(): void {
|
||||
const db = getDb();
|
||||
const cutoff = formatLocalDateTime(new Date(Date.now() - 60 * 24 * 60 * 60 * 1000));
|
||||
const deleted = db.prepare('DELETE FROM save_records WHERE created_at < ?').run(cutoff);
|
||||
console.log(`[Cleanup] Deleted ${deleted.changes} save records older than 60 days (before ${cutoff})`);
|
||||
}
|
||||
|
||||
// ── Storage Refresh ───────────────────────────────────────────────
|
||||
|
||||
export async function refreshAllStorageInfo(): Promise<void> {
|
||||
const configs = getActiveCloudConfigs().filter(c => c.cloud_type === 'quark' && c.cookie);
|
||||
if (configs.length === 0) return;
|
||||
|
||||
for (const cfg of configs) {
|
||||
try {
|
||||
const { QuarkDriver } = require('./drivers/quark.driver');
|
||||
const driver = new QuarkDriver({ cookie: cfg.cookie, nickname: cfg.nickname });
|
||||
const storage = await driver.getStorageInfo();
|
||||
if (storage.totalBytes > 0 || storage.usedBytes > 0) {
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET storage_used = ?, storage_total = ? WHERE id = ?`
|
||||
).run(storage.used, storage.total, cfg.id);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(`[Storage] Failed to refresh quark#${cfg.id}:`, err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
472
packages/backend/src/cloud/credential.service.ts
Normal file
472
packages/backend/src/cloud/credential.service.ts
Normal file
@@ -0,0 +1,472 @@
|
||||
import { getDb } from '../database/database';
|
||||
import { localTimestamp, formatLocalDate, formatLocalDateTime } from '../utils/time';
|
||||
import { encrypt, decrypt, isEncrypted } from '../utils/crypto';
|
||||
|
||||
// ── Background Used-Space Calculation ──────────────────────────
|
||||
|
||||
/**
|
||||
* Fire-and-forget: recursively calculate used space for a quark drive
|
||||
* and update the database when done.
|
||||
*/
|
||||
async function calculateUsedSpaceAsync(cookie: string, configId: number): Promise<void> {
|
||||
const { calculateUsedSpace } = require('./drivers/quark-cleanup');
|
||||
const usedBytes = await calculateUsedSpace(cookie);
|
||||
if (usedBytes > 0) {
|
||||
const usedFormatted = usedBytes >= 1024 ** 4
|
||||
? (usedBytes / 1024 ** 4).toFixed(1) + ' TB'
|
||||
: usedBytes >= 1024 ** 3
|
||||
? (usedBytes / 1024 ** 3).toFixed(1) + ' GB'
|
||||
: (usedBytes / 1024 ** 2).toFixed(1) + ' MB';
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET storage_used = ?, updated_at = ? WHERE id = ?`
|
||||
).run(usedFormatted, localTimestamp(), configId);
|
||||
console.log(`[UsedSpace] Updated config #${configId}: used=${usedFormatted}`);
|
||||
}
|
||||
}
|
||||
|
||||
export interface CloudConfig {
|
||||
id: number;
|
||||
cloud_type: string;
|
||||
cookie?: string;
|
||||
nickname?: string;
|
||||
is_active: number;
|
||||
promotion_account?: string;
|
||||
is_transfer_enabled: number;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
checkin_status: string; // 'none'|'success'|'failed'|'pending'|'skipped'
|
||||
last_checkin_at?: string;
|
||||
checkin_message?: string;
|
||||
consecutive_failures: number;
|
||||
last_used_at?: string;
|
||||
total_saves: number;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
verification_status?: string;
|
||||
cloud_type_uid?: string;
|
||||
}
|
||||
|
||||
// ── Cookie Encryption Helper ──────────────────────────────────────
|
||||
/** Decrypt cookie. Handles legacy plaintext data transparently. */
|
||||
function decryptCookie(encrypted: string | null | undefined): string {
|
||||
if (!encrypted) return '';
|
||||
// If already plaintext (legacy data), return as-is
|
||||
if (!isEncrypted(encrypted)) return encrypted;
|
||||
return decrypt(encrypted);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract Quark __uid from cookie string.
|
||||
* Used for dedup: same cloud_type + same __uid = same account.
|
||||
*/
|
||||
function extractQuarkUid(cookie: string): string | null {
|
||||
const match = cookie.match(/(?:^|;\s*)__uid=([^;]+)/);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
// ── Config CRUD ──────────────────────────────────────────────────
|
||||
|
||||
export function getCloudConfigs(): CloudConfig[] {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at, verification_status
|
||||
FROM cloud_configs ORDER BY id ASC`
|
||||
).all() as CloudConfig[];
|
||||
}
|
||||
|
||||
export function getAvailableClouds(): CloudConfig[] {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at
|
||||
FROM cloud_configs WHERE is_active = 1 ORDER BY id ASC`
|
||||
).all() as CloudConfig[];
|
||||
}
|
||||
|
||||
/** Returns the first active config matching the given cloud type. */
|
||||
export function getCloudConfigByType(cloudType: string): CloudConfig | undefined {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at, verification_status
|
||||
FROM cloud_configs WHERE cloud_type = ? AND is_active = 1
|
||||
ORDER BY id ASC LIMIT 1`
|
||||
).get(cloudType) as CloudConfig | undefined;
|
||||
}
|
||||
|
||||
export function getCloudConfigById(id: number): CloudConfig | undefined {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at, verification_status
|
||||
FROM cloud_configs WHERE id = ?`
|
||||
).get(id) as CloudConfig | undefined;
|
||||
}
|
||||
|
||||
/** Returns all active cloud configs (used by save flow for cloud type switching). */
|
||||
export function getActiveCloudConfigs(): CloudConfig[] {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at
|
||||
FROM cloud_configs WHERE is_active = 1
|
||||
ORDER BY cloud_type ASC, id ASC`
|
||||
).all() as CloudConfig[];
|
||||
}
|
||||
|
||||
export function saveCloudConfig(data: {
|
||||
id?: number;
|
||||
cloud_type: string;
|
||||
cookie?: string;
|
||||
nickname?: string;
|
||||
is_active?: number;
|
||||
promotion_account?: string;
|
||||
is_transfer_enabled?: number;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
}): CloudConfig {
|
||||
const db = getDb();
|
||||
// Encrypt cookie before storing
|
||||
const encryptedCookie = data.cookie ? encrypt(data.cookie) : null;
|
||||
|
||||
// Extract cloud_type_uid from cookie (Quark __uid)
|
||||
let cloudTypeUid: string | null = null;
|
||||
if (data.cookie) {
|
||||
cloudTypeUid = extractQuarkUid(data.cookie);
|
||||
}
|
||||
|
||||
if (data.id) {
|
||||
// Update by ID — always succeeds
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET
|
||||
cloud_type = COALESCE(?, cloud_type),
|
||||
cookie = COALESCE(?, cookie),
|
||||
nickname = COALESCE(?, nickname),
|
||||
is_active = COALESCE(?, is_active),
|
||||
promotion_account = COALESCE(?, promotion_account),
|
||||
is_transfer_enabled = COALESCE(?, is_transfer_enabled),
|
||||
storage_used = COALESCE(?, storage_used),
|
||||
storage_total = COALESCE(?, storage_total),
|
||||
cloud_type_uid = COALESCE(?, cloud_type_uid),
|
||||
consecutive_failures = 0,
|
||||
updated_at = ?
|
||||
WHERE id = ?`
|
||||
).run(data.cloud_type, encryptedCookie || null, data.nickname || null, data.is_active ?? 1, data.promotion_account ?? '', data.is_transfer_enabled ?? 1, data.storage_used || null, data.storage_total || null, cloudTypeUid || null, localTimestamp(), data.id);
|
||||
} else {
|
||||
// Try to find existing config by cloud_type + cloud_type_uid
|
||||
let existing: any = null;
|
||||
if (cloudTypeUid) {
|
||||
existing = db.prepare(
|
||||
`SELECT id FROM cloud_configs WHERE cloud_type = ? AND cloud_type_uid = ? LIMIT 1`
|
||||
).get(data.cloud_type, cloudTypeUid);
|
||||
}
|
||||
|
||||
// Fallback: match by cloud_type alone (legacy records without cloud_type_uid)
|
||||
if (!existing) {
|
||||
existing = db.prepare(
|
||||
'SELECT id FROM cloud_configs WHERE cloud_type = ? AND is_active = 1 LIMIT 1'
|
||||
).get(data.cloud_type) as any;
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET
|
||||
cookie = COALESCE(?, cookie),
|
||||
nickname = COALESCE(?, nickname),
|
||||
is_active = COALESCE(?, is_active),
|
||||
promotion_account = COALESCE(?, promotion_account),
|
||||
is_transfer_enabled = COALESCE(?, is_transfer_enabled),
|
||||
storage_used = COALESCE(?, storage_used),
|
||||
storage_total = COALESCE(?, storage_total),
|
||||
cloud_type_uid = COALESCE(?, cloud_type_uid),
|
||||
consecutive_failures = 0,
|
||||
updated_at = ?
|
||||
WHERE id = ?`
|
||||
).run(encryptedCookie || null, data.nickname || null, data.is_active ?? 1, data.promotion_account ?? '', data.is_transfer_enabled ?? 1, data.storage_used || null, data.storage_total || null, cloudTypeUid || null, localTimestamp(), existing.id);
|
||||
|
||||
// Re-read savedId for return
|
||||
const savedId = existing.id;
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at
|
||||
FROM cloud_configs WHERE id = ?`
|
||||
).get(savedId) as CloudConfig;
|
||||
}
|
||||
|
||||
// No existing config found — insert new
|
||||
db.prepare(
|
||||
'INSERT INTO cloud_configs (cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total, cloud_type_uid, consecutive_failures) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 0)'
|
||||
).run(data.cloud_type, encryptedCookie || null, data.nickname || null, data.is_active ?? 1, data.promotion_account ?? '', data.is_transfer_enabled ?? 1, data.storage_used || null, data.storage_total || null, cloudTypeUid || null);
|
||||
}
|
||||
|
||||
const savedId = data.id || (db.prepare('SELECT last_insert_rowid() as id').get() as any).id;
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at
|
||||
FROM cloud_configs WHERE id = ?`
|
||||
).get(savedId) as CloudConfig;
|
||||
}
|
||||
|
||||
export function deleteCloudConfig(id: number): boolean {
|
||||
const db = getDb();
|
||||
const result = db.prepare('DELETE FROM cloud_configs WHERE id = ?').run(id);
|
||||
return result.changes > 0;
|
||||
}
|
||||
|
||||
// ── Cookie Validation ────────────────────────────────────────────
|
||||
|
||||
async function fetchQuarkNickname(cookie: string): Promise<string | null> {
|
||||
const MAX_RETRIES = 2;
|
||||
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
||||
try {
|
||||
const response = await fetch('https://pan.quark.cn/account/info?fr=pc&platform=pc', {
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/3.14.2 Chrome/112.0.5615.165 Electron/24.1.3.8 Safari/537.36 Channel/pckk_other_ch',
|
||||
'Cookie': cookie,
|
||||
'Accept': 'application/json',
|
||||
'Referer': 'https://pan.quark.cn/',
|
||||
},
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
if (!response.ok) return null;
|
||||
const data = await response.json() as any;
|
||||
if (data?.data?.nickname) return data.data.nickname;
|
||||
} catch {
|
||||
if (attempt < MAX_RETRIES) {
|
||||
await new Promise(r => setTimeout(r, 1500));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function testCloudConnection(id: number): Promise<{
|
||||
success: boolean;
|
||||
message: string;
|
||||
nickname?: string;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
}> {
|
||||
const config = getCloudConfigById(id);
|
||||
if (!config) {
|
||||
return { success: false, message: 'Cloud config not found' };
|
||||
}
|
||||
|
||||
if (!config.cookie) {
|
||||
return { success: false, message: 'Cookie not configured' };
|
||||
}
|
||||
|
||||
try {
|
||||
let valid = false;
|
||||
let nickname = '';
|
||||
let storageUsed = config.storage_used || '';
|
||||
let storageTotal = config.storage_total || '';
|
||||
|
||||
if (config.cloud_type === 'baidu') {
|
||||
const { BaiduDriver } = require('./drivers/baidu.driver');
|
||||
const driver = new BaiduDriver({ cookie: config.cookie, nickname: config.nickname });
|
||||
valid = await driver.validate();
|
||||
if (valid) {
|
||||
const info = await driver.getUserInfo();
|
||||
if (info) {
|
||||
nickname = config.nickname || info.nickname || '百度网盘';
|
||||
const fmt = (b: number) => b >= 1024**3 ? (b/1024**3).toFixed(2)+' GB' : (b/1024**2).toFixed(2)+' MB';
|
||||
storageUsed = fmt(info.usedBytes);
|
||||
storageTotal = fmt(info.totalBytes);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const decodedCookie = decrypt(config.cookie);
|
||||
const { QuarkDriver } = require('./drivers/quark.driver');
|
||||
const driver = new QuarkDriver({ cookie: decodedCookie, nickname: config.nickname });
|
||||
valid = await driver.validate();
|
||||
if (valid) {
|
||||
nickname = config.nickname || (await fetchQuarkNickname(decodedCookie)) || '夸克网盘';
|
||||
const storage = await driver.getStorageInfoQuick(config.storage_total);
|
||||
storageTotal = (storage.total !== '-' && storage.total !== '0 B') ? storage.total : (config.storage_total || '');
|
||||
storageUsed = (storage.used && storage.used !== '-' && storage.used !== '0 B') ? storage.used : (config.storage_used || '');
|
||||
}
|
||||
}
|
||||
|
||||
const db = getDb();
|
||||
if (!valid) {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET verification_status = 'invalid', updated_at = ? WHERE id = ?`
|
||||
).run(localTimestamp(), id);
|
||||
return { success: false, message: '连接失败:Cookie 无效或已过期,或网络暂时异常' };
|
||||
}
|
||||
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET nickname = ?, storage_total = ?, storage_used = ?, is_active = 1, verification_status = 'valid', updated_at = ? WHERE id = ?`
|
||||
).run(nickname, storageTotal, storageUsed, localTimestamp(), id);
|
||||
|
||||
// Fire-and-forget: recalculate used space in background (slow for big drives)
|
||||
if (config.cloud_type === 'quark') {
|
||||
calculateUsedSpaceAsync(decrypt(config.cookie), id).catch(err => console.error(`[UsedSpace] Background calc failed for #${id}:`, err.message));
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: '连接成功',
|
||||
nickname,
|
||||
storage_used: storageUsed,
|
||||
storage_total: storageTotal,
|
||||
};
|
||||
} catch (err: any) {
|
||||
try {
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET verification_status = 'invalid', updated_at = ? WHERE id = ?`
|
||||
).run(localTimestamp(), id);
|
||||
} catch {}
|
||||
return { success: false, message: `连接失败:${err.message || '未知错误'}` };
|
||||
}
|
||||
}
|
||||
|
||||
export async function testCloudConnectionWithCookie(cloudType: string, cookie: string): Promise<{
|
||||
success: boolean;
|
||||
message: string;
|
||||
nickname?: string;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
}> {
|
||||
try {
|
||||
const { QuarkDriver } = require('./drivers/quark.driver');
|
||||
const driver = new QuarkDriver({ cookie, nickname: '' });
|
||||
const valid = await driver.validate();
|
||||
if (!valid) {
|
||||
return { success: false, message: '连接失败:Cookie 无效或已过期' };
|
||||
}
|
||||
const nickname = (await fetchQuarkNickname(cookie)) || cloudType;
|
||||
// getStorageInfo may timeout from overseas servers, don't fail if it does
|
||||
let storage: { used: string; total: string } = { used: '-', total: '-' };
|
||||
try {
|
||||
const s = await driver.getStorageInfoQuick();
|
||||
if (s) {
|
||||
storage = { used: s.used || '-', total: s.total || '-' };
|
||||
}
|
||||
} catch {
|
||||
// storage info is optional
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
message: '连接成功',
|
||||
nickname,
|
||||
storage_used: storage.used,
|
||||
storage_total: storage.total,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return { success: false, message: `连接失败:${err.message || '未知错误'}` };
|
||||
}
|
||||
}
|
||||
|
||||
// ── Unified Credential Validation ─────────────────────────────────
|
||||
|
||||
export interface CredentialValidationResult {
|
||||
valid: boolean;
|
||||
config?: CloudConfig;
|
||||
errorCode?: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get and validate a credential for the given cloud type.
|
||||
*
|
||||
* This is the unified entry point for all save/transfer operations.
|
||||
* It handles:
|
||||
* 1. Finding an active config with < 5 consecutive failures (round-robin)
|
||||
* 2. Validating cookie freshness via driver.validate()
|
||||
* 3. Returning structured result with error codes
|
||||
*
|
||||
* Reference: search-ucmao get_and_validate_credential() pattern.
|
||||
*/
|
||||
export async function getAndValidateCredential(cloudType: string): Promise<CredentialValidationResult> {
|
||||
const db = getDb();
|
||||
|
||||
const config = db.prepare(
|
||||
`SELECT * FROM cloud_configs
|
||||
WHERE cloud_type = ? AND is_active = 1
|
||||
AND consecutive_failures < 5
|
||||
ORDER BY last_used_at ASC NULLS FIRST
|
||||
LIMIT 1`
|
||||
).get(cloudType) as CloudConfig | undefined;
|
||||
|
||||
if (!config) {
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'NO_AVAILABLE_DRIVE',
|
||||
message: `Cloud type "${cloudType}" is not configured or no available drives`,
|
||||
};
|
||||
}
|
||||
|
||||
if (!config.cookie) {
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'COOKIE_MISSING',
|
||||
message: `Cookie not configured for ${cloudType} drive #${config.id}`,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Decrypt cookie before validation
|
||||
const decryptedCookie = decryptCookie(config.cookie);
|
||||
if (!decryptedCookie) {
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'COOKIE_MISSING',
|
||||
message: `Cookie not configured for ${cloudType} drive #${config.id}`,
|
||||
};
|
||||
}
|
||||
|
||||
let cookieValid = false;
|
||||
if (cloudType === 'baidu') {
|
||||
const { BaiduDriver } = require('./drivers/baidu.driver');
|
||||
const driver = new BaiduDriver({ cookie: decryptedCookie, nickname: config.nickname });
|
||||
cookieValid = await driver.validate();
|
||||
} else {
|
||||
const { QuarkDriver } = require('./drivers/quark.driver');
|
||||
const driver = new QuarkDriver({ cookie: decryptedCookie, nickname: config.nickname });
|
||||
cookieValid = await driver.validate();
|
||||
}
|
||||
|
||||
if (!cookieValid) {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET verification_status = 'invalid', updated_at = ? WHERE id = ?`
|
||||
).run(localTimestamp(), config.id);
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'COOKIE_EXPIRED',
|
||||
message: `Cookie expired or invalid for ${cloudType} drive #${config.id}`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
config: { ...config, cookie: decryptedCookie },
|
||||
message: 'ok',
|
||||
};
|
||||
} catch (err: any) {
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'VALIDATION_ERROR',
|
||||
message: `Credential validation failed: ${err.message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
327
packages/backend/src/cloud/database.ts
Executable file
327
packages/backend/src/cloud/database.ts
Executable file
@@ -0,0 +1,327 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import path from 'path';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import config from '../config';
|
||||
import { formatLocalDateTime } from '../utils/time';
|
||||
|
||||
let db: Database.Database | null = null;
|
||||
|
||||
export function getDb(): Database.Database {
|
||||
if (db) return db;
|
||||
|
||||
const dbDir = path.dirname(config.dbPath);
|
||||
const fs = require('fs');
|
||||
if (!fs.existsSync(dbDir)) {
|
||||
fs.mkdirSync(dbDir, { recursive: true });
|
||||
}
|
||||
|
||||
db = new Database(config.dbPath);
|
||||
db.pragma('journal_mode = WAL');
|
||||
db.pragma('foreign_keys = ON');
|
||||
|
||||
runMigrations(db);
|
||||
seedAdmin(db);
|
||||
|
||||
return db;
|
||||
}
|
||||
|
||||
function runMigrations(db: Database.Database): void {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS admins (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
last_login TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS cloud_configs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cloud_type TEXT NOT NULL,
|
||||
cookie TEXT,
|
||||
nickname TEXT,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
storage_used TEXT,
|
||||
storage_total TEXT,
|
||||
checkin_status TEXT NOT NULL DEFAULT 'none',
|
||||
last_checkin_at TEXT,
|
||||
checkin_message TEXT,
|
||||
consecutive_failures INTEGER DEFAULT 0,
|
||||
last_used_at TEXT,
|
||||
total_saves INTEGER DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS promotions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
description TEXT,
|
||||
image_url TEXT,
|
||||
link_url TEXT,
|
||||
position TEXT,
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
active INTEGER NOT NULL DEFAULT 1,
|
||||
click_count INTEGER NOT NULL DEFAULT 0,
|
||||
start_time TEXT,
|
||||
end_time TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS save_records (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
source_type TEXT,
|
||||
source_title TEXT,
|
||||
source_url TEXT,
|
||||
target_cloud TEXT,
|
||||
share_url TEXT,
|
||||
share_pwd TEXT,
|
||||
file_size TEXT,
|
||||
file_count INTEGER DEFAULT 0,
|
||||
duration_ms INTEGER DEFAULT 0,
|
||||
status TEXT NOT NULL DEFAULT '',
|
||||
error_message TEXT,
|
||||
ip_address TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS search_stats (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
keyword TEXT,
|
||||
intent TEXT,
|
||||
result_count INTEGER DEFAULT 0,
|
||||
ip_address TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS hot_keywords (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
keyword TEXT UNIQUE NOT NULL,
|
||||
search_count INTEGER NOT NULL DEFAULT 1,
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS system_configs (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL DEFAULT '',
|
||||
description TEXT,
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS content_cache (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
keyword TEXT UNIQUE NOT NULL,
|
||||
title TEXT,
|
||||
description TEXT,
|
||||
tags TEXT,
|
||||
cover TEXT,
|
||||
source TEXT,
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
`);
|
||||
seedSystemConfigs(db);
|
||||
migrateSaveRecords(db);
|
||||
migrateContentCache(db);
|
||||
migrateCloudConfigs(db);
|
||||
cleanupOldSaveRecords(db);
|
||||
}
|
||||
|
||||
/** 迁移: 给已有 save_records 表补充新列 */
|
||||
function migrateSaveRecords(db: Database.Database): void {
|
||||
const newCols: { col: string; def: string }[] = [
|
||||
{ col: 'share_pwd', def: 'TEXT' },
|
||||
{ col: 'file_count', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'folder_count', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'duration_ms', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'status', def: "TEXT NOT NULL DEFAULT ''" },
|
||||
{ col: 'error_message', def: 'TEXT' },
|
||||
{ col: 'folder_name', def: 'TEXT' },
|
||||
{ col: 'request_url', def: 'TEXT' },
|
||||
{ col: 'ip_location', def: 'TEXT' },
|
||||
{ col: 'original_folder_name', def: 'TEXT' },
|
||||
];
|
||||
for (const { col, def } of newCols) {
|
||||
try {
|
||||
db.exec(`ALTER TABLE save_records ADD COLUMN ${col} ${def}`);
|
||||
} catch {
|
||||
// Column already exists — ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** 迁移: 给 content_cache 表加 douban_url 列 */
|
||||
function migrateContentCache(db: Database.Database): void {
|
||||
const columns: { col: string; def: string }[] = [
|
||||
{ col: 'douban_url', def: 'TEXT' },
|
||||
{ col: 'rating', def: 'TEXT' },
|
||||
{ col: 'rating_count', def: 'TEXT' },
|
||||
{ col: 'year', def: 'TEXT' },
|
||||
{ col: 'genres', def: 'TEXT' },
|
||||
{ col: 'directors', def: 'TEXT' },
|
||||
{ col: 'actors', def: 'TEXT' },
|
||||
{ col: 'region', def: 'TEXT' },
|
||||
{ col: 'duration', def: 'TEXT' },
|
||||
];
|
||||
for (const { col, def } of columns) {
|
||||
try {
|
||||
db.exec(`ALTER TABLE content_cache ADD COLUMN ${col} ${def}`);
|
||||
} catch {
|
||||
// Column already exists — ignore
|
||||
}
|
||||
}
|
||||
// 修复旧记录:source 为 NULL 但实际有 TMDB 数据的,标记为 tmdb
|
||||
db.exec(`UPDATE content_cache SET source = 'tmdb' WHERE source IS NULL AND title IS NOT NULL AND title != ''`);
|
||||
}
|
||||
|
||||
/** 迁移: 给 cloud_configs 表去UNIQUE约束 + 加签到/轮训字段 */
|
||||
function migrateCloudConfigs(db: Database.Database): void {
|
||||
// 加新列
|
||||
const newCols: { col: string; def: string }[] = [
|
||||
{ col: 'checkin_status', def: "TEXT NOT NULL DEFAULT 'none'" },
|
||||
{ col: 'last_checkin_at', def: 'TEXT' },
|
||||
{ col: 'checkin_message', def: 'TEXT' },
|
||||
{ col: 'consecutive_failures', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'last_used_at', def: 'TEXT' },
|
||||
{ col: 'total_saves', def: 'INTEGER DEFAULT 0' },
|
||||
];
|
||||
for (const { col, def } of newCols) {
|
||||
try { db.exec(`ALTER TABLE cloud_configs ADD COLUMN ${col} ${def}`); } catch {}
|
||||
}
|
||||
// 检查旧表是否有 UNIQUE 约束,有则重建表
|
||||
const row = db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='cloud_configs'`).get() as any;
|
||||
if (row && row.sql && row.sql.includes('cloud_type TEXT UNIQUE')) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS cloud_configs_v2 (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cloud_type TEXT NOT NULL,
|
||||
cookie TEXT,
|
||||
nickname TEXT,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
storage_used TEXT,
|
||||
storage_total TEXT,
|
||||
checkin_status TEXT NOT NULL DEFAULT 'none',
|
||||
last_checkin_at TEXT,
|
||||
checkin_message TEXT,
|
||||
consecutive_failures INTEGER DEFAULT 0,
|
||||
last_used_at TEXT,
|
||||
total_saves INTEGER DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
INSERT INTO cloud_configs_v2 (id, cloud_type, cookie, nickname, is_active, storage_used, storage_total, checkin_status, last_checkin_at, checkin_message, consecutive_failures, last_used_at, total_saves, created_at, updated_at)
|
||||
SELECT id, cloud_type, cookie, nickname, is_active, storage_used, storage_total, COALESCE(checkin_status,'none'), last_checkin_at, checkin_message, COALESCE(consecutive_failures,0), last_used_at, COALESCE(total_saves,0), created_at, updated_at FROM cloud_configs;
|
||||
DROP TABLE cloud_configs;
|
||||
ALTER TABLE cloud_configs_v2 RENAME TO cloud_configs;
|
||||
`);
|
||||
console.log('[DB] cloud_configs migration: UNIQUE constraint removed, new fields added');
|
||||
}
|
||||
|
||||
// Migration 2: Add verification_status column
|
||||
const row2 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%verification_status%'").get();
|
||||
if (!row2) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN verification_status TEXT DEFAULT NULL");
|
||||
console.log('[DB] cloud_configs migration: verification_status column added');
|
||||
}
|
||||
|
||||
// Migration 3: Add cloud_type_uid column (for Quark __uid dedup)
|
||||
const row3 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%cloud_type_uid%'").get();
|
||||
if (!row3) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN cloud_type_uid TEXT DEFAULT NULL");
|
||||
console.log('[DB] cloud_configs migration: cloud_type_uid column added');
|
||||
}
|
||||
|
||||
// Migration 4: Add promotion_account column
|
||||
const row4 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%promotion_account%'").get();
|
||||
if (!row4) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN promotion_account TEXT DEFAULT ''");
|
||||
console.log('[DB] cloud_configs migration: promotion_account column added');
|
||||
}
|
||||
|
||||
// Migration 5: Add is_transfer_enabled column
|
||||
const row5 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%is_transfer_enabled%'").get();
|
||||
if (!row5) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN is_transfer_enabled INTEGER DEFAULT 1");
|
||||
console.log('[DB] cloud_configs migration: is_transfer_enabled column added');
|
||||
}
|
||||
}
|
||||
|
||||
function seedAdmin(db: Database.Database): void {
|
||||
const existing = db.prepare('SELECT id FROM admins WHERE username = ?').get(config.adminUsername);
|
||||
if (existing) return;
|
||||
|
||||
const salt = bcrypt.genSaltSync(10);
|
||||
const hash = bcrypt.hashSync(config.adminPassword, salt);
|
||||
|
||||
db.prepare(
|
||||
'INSERT INTO admins (username, password_hash) VALUES (?, ?)'
|
||||
).run(config.adminUsername, hash);
|
||||
|
||||
console.log(`[DB] Admin user "${config.adminUsername}" created`);
|
||||
}
|
||||
|
||||
function seedSystemConfigs(db: Database.Database): void {
|
||||
const defaults: { key: string; value: string; description: string }[] = [
|
||||
{ key: 'pansou_url', value: config.pansouUrl, description: 'PanSou 搜索引擎服务地址' },
|
||||
{ key: 'video_parser_url', value: config.videoParserUrl, description: '视频解析服务地址' },
|
||||
{ key: 'validation_concurrency', value: String(config.validation.concurrency), description: '链接验证并发数' },
|
||||
{ key: 'validation_timeout', value: String(config.validation.timeout), description: '链接验证超时(ms)' },
|
||||
{ key: 'validation_cache_ttl_valid', value: String(config.validation.cacheTtlValid), description: '有效链接缓存时间(s)' },
|
||||
{ key: 'validation_cache_ttl_invalid', value: String(config.validation.cacheTtlInvalid), description: '无效链接缓存时间(s)' },
|
||||
{ key: 'search_proxy_enabled', value: 'false', description: '搜索代理开关(true/false)' },
|
||||
{ key: 'search_proxy_url', value: '', description: '搜索代理地址 (如 http://127.0.0.1:7890)' },
|
||||
{ key: 'search_strategy', value: 'wait_all', description: '搜索结果展示方式: wait_all=等待全部后展示, stream_channel=频道逐步展示' },
|
||||
{ key: 'link_validation_enabled', value: 'true', description: '资源链接有效性检测开关(true/false)' },
|
||||
{ key: 'cloud_enabled_quark', value: 'true', description: '夸克网盘' },
|
||||
{ key: 'cloud_enabled_baidu', value: 'true', description: '百度网盘' },
|
||||
{ key: 'cloud_enabled_aliyun', value: 'true', description: '阿里云盘' },
|
||||
{ key: 'cloud_enabled_115', value: 'true', description: '115 网盘' },
|
||||
{ key: 'cloud_enabled_tianyi', value: 'true', description: '天翼云盘' },
|
||||
{ key: 'cloud_enabled_123pan', value: 'true', description: '123 云盘' },
|
||||
{ key: 'cloud_enabled_uc', value: 'true', description: 'UC 网盘' },
|
||||
{ key: 'cloud_enabled_xunlei', value: 'true', description: '迅雷网盘' },
|
||||
{ key: 'cloud_enabled_pikpak', value: 'true', description: 'PikPak 网盘' },
|
||||
{ key: 'cloud_enabled_magnet', value: 'true', description: '磁力链接' },
|
||||
{ key: 'cloud_enabled_ed2k', value: 'true', description: '电驴链接' },
|
||||
{ key: 'cloud_enabled_others', value: 'false', description: '其他类型(默认关闭)' },
|
||||
{ key: 'search_result_limit', value: '10', description: '每类网盘最多展示的有效结果数' },
|
||||
{ key: 'search_fallback_image', value: '', description: '无图资源的兜底封面图 URL(留空使用渐变色)' },
|
||||
{ key: 'site_logo', value: '', description: '网站 LOGO 图片 URL(留空使用默认图标/文字)' },
|
||||
{ key: 'site_name', value: 'CloudSearch', description: '网站名称(显示在首页标题/页脚)' },
|
||||
{ key: 'site_disclaimer', value: '本站为非盈利性个人站点,所有资源仅供学习、研究使用,版权归原作者所有。请于下载后24小时内删除,切勿用于商业或非法用途。若侵犯了您的权益,请联系我们(邮箱:3337598077@qq.com),我们将及时处理。', description: '网站底部免责声明' },
|
||||
{ key: 'site_marquee', value: '📢 欢迎使用CloudSearch,所有资源仅供学习交流,请于下载后24小时内删除', description: '搜索栏下方滚动通知文字(从右往左滚动显示)' },
|
||||
{ key: 'tmdb_api_token', value: '', description: 'TMDB API 读取令牌(用于增强豆瓣内容信息)' },
|
||||
{ key: 'ip_geo_api_url', value: 'https://cn.apihz.cn/api/ip/chaapi.php?id=10014356&key=ca7ccb3b9ca044dd993c8604bc9afd93&ip={ip}&td=0', description: 'IP 归属地查询接口({ip} 会被替换为实际IP)' },
|
||||
{ key: 'ip_geo_api_key', value: '', description: 'IP 归属地备用 API Key(留空使用默认)' },
|
||||
{ key: 'title_filter_rules', value: '', description: '搜索结果标题过滤规则(一行一条:纯文本直接移除 / 正则用/包围/)' },
|
||||
{ key: 'timezone', value: 'Asia/Shanghai', description: '系统时区(如 Asia/Shanghai、America/New_York、UTC)' },
|
||||
{ key: 'redis_url', value: 'redis://redis:6379', description: 'Redis 连接地址(用于缓存优化)' },
|
||||
{ key: 'pansou_auth_token', value: '', description: 'PanSou API 认证令牌(用于私有搜索服务)' },
|
||||
{ key: 'pansou_web_enabled', value: 'false', description: '启用 PanSou Web 端访问(在 /pansou 路径提供 PanSou 搜索引擎管理界面)' },
|
||||
{ key: 'cleanup_enabled', value: 'true', description: '启用自动清理(每天检查一次,移入回收站+清空日志+清空回收站)' },
|
||||
{ key: 'cleanup_file_retention_days', value: '7', description: '云盘文件保留天数(超过此天数的日期文件夹将被移入回收站)' },
|
||||
{ key: 'cleanup_log_retention_days', value: '30', description: '转存日志保留天数' },
|
||||
{ key: 'cleanup_empty_trash', value: 'true', description: '清理时是否清空回收站(永久删除释放空间)' },
|
||||
{ key: 'cleanup_space_threshold_enabled', value: 'false', description: '启用空间阈值自动清理(已用空间超过XX%时按比例删除最旧的转存文件)' },
|
||||
{ key: 'cleanup_space_threshold_percent', value: '90', description: '空间使用阈值百分比(超过此值时触发强制清理)' },
|
||||
{ key: 'cleanup_space_threshold_delete_percent', value: '10', description: '触发阈值清理时释放总空间的百分比(如 10 表示累计删除最旧文件直到达到总空间的 10%,6TB 总空间 → 释放 ~600GB)' },
|
||||
{ key: 'save_reuse_enabled', value: 'true', description: '启用分享链接复用(相同原始链接不再重复转存,直接复用之前的分享链接)' },
|
||||
{ key: 'cleanup_last_run', value: '', description: '上次自动清理时间' },
|
||||
{ key: 'cleanup_last_stats', value: '', description: '上次清理结果统计(JSON)' },
|
||||
];
|
||||
const insert = db.prepare(
|
||||
'INSERT OR IGNORE INTO system_configs (key, value, description) VALUES (?, ?, ?)'
|
||||
);
|
||||
for (const entry of defaults) {
|
||||
insert.run(entry.key, entry.value, entry.description);
|
||||
}
|
||||
}
|
||||
|
||||
/** 清理 60 天前的转存记录 */
|
||||
function cleanupOldSaveRecords(db: Database.Database): void {
|
||||
const cutoff = formatLocalDateTime(new Date(Date.now() - 60 * 24 * 60 * 60 * 1000));
|
||||
const deleted = db.prepare('DELETE FROM save_records WHERE created_at < ?').run(cutoff);
|
||||
console.log(`[DB] Cleaned up ${deleted.changes} save records older than 60 days (before ${cutoff})`);
|
||||
}
|
||||
|
||||
export default getDb;
|
||||
623
packages/backend/src/cloud/drivers/CloudConfig.vue
Executable file
623
packages/backend/src/cloud/drivers/CloudConfig.vue
Executable file
@@ -0,0 +1,623 @@
|
||||
<template>
|
||||
<div class="cloud-config">
|
||||
<!-- 网盘类型开关 -->
|
||||
<el-card class="toggle-card" style="margin-bottom: 20px;">
|
||||
<template #header><span>📂 网盘设置及授权</span></template>
|
||||
<div class="cloud-toggle-grid">
|
||||
<div
|
||||
v-for="ct in cloudTypes"
|
||||
:key="ct.type"
|
||||
class="cloud-toggle-chip"
|
||||
>
|
||||
<img :src="ct.icon" class="cloud-icon-img" />
|
||||
<span class="cloud-label">{{ ct.label }}</span>
|
||||
<el-tag v-if="ct.type === 'others'" size="small" type="info">关</el-tag>
|
||||
<el-switch
|
||||
:model-value="ct.enabled"
|
||||
size="small"
|
||||
@change="(val: boolean) => handleCloudToggle(ct.type, val)"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-tip" style="margin-top: 12px;">
|
||||
关闭的网盘类型在搜索结果中不会展示。修改后立即生效,无需点击保存。
|
||||
</div>
|
||||
</el-card>
|
||||
|
||||
<div class="toolbar">
|
||||
<el-button type="primary" @click="openDialog(null)">新增配置</el-button>
|
||||
<el-button @click="verifyAll">全部重新验证</el-button>
|
||||
</div>
|
||||
|
||||
<el-table :data="configs" stripe style="width: 100%">
|
||||
<el-table-column label="网盘类型" width="110">
|
||||
<template #default="{ row }">
|
||||
<CloudBadge :cloud_type="row.cloud_type" />
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column prop="nickname" label="昵称" width="140">
|
||||
<template #default="{ row }">
|
||||
<span v-if="row.nickname" class="nickname-text">{{ row.nickname }}</span>
|
||||
<el-text v-else type="info" size="small">未设置</el-text>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column prop="cloud_type_uid" label="标识(__uid)" width="180">
|
||||
<template #default="{ row }">
|
||||
<span v-if="row.cloud_type_uid" class="uid-cell">{{ row.cloud_type_uid }}</span>
|
||||
<el-text v-else type="info" size="small">-</el-text>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column label="验证" width="100" align="center">
|
||||
<template #default="{ row }">
|
||||
<span v-if="row._verifying" class="verifying">
|
||||
<el-icon class="is-loading"><Loading /></el-icon>
|
||||
</span>
|
||||
<el-tag v-else-if="row.verification_status === 'valid'" type="success" size="small">有效</el-tag>
|
||||
<el-tag v-else-if="row.verification_status === 'invalid'" type="danger" size="small">无效</el-tag>
|
||||
<el-tag v-else type="info" size="small">未验证</el-tag>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column label="空间" width="200">
|
||||
<template #default="{ row }">
|
||||
<div v-if="row.storage_total" class="storage-cell">
|
||||
<div class="storage-bar-wrap">
|
||||
<div
|
||||
class="storage-bar-fill"
|
||||
:style="{ width: storagePercent(row) + '%' }"
|
||||
:class="storageBarClass(row)"
|
||||
></div>
|
||||
</div>
|
||||
<div class="storage-text">
|
||||
<span class="storage-used">{{ row.storage_used || '?' }}</span>
|
||||
<span class="storage-sep">/</span>
|
||||
<span class="storage-total">{{ row.storage_total }}</span>
|
||||
<span class="storage-free">(可用 {{ storageFree(row) }})</span>
|
||||
</div>
|
||||
</div>
|
||||
<el-text v-else type="info" size="small">—</el-text>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<!-- 转存统计 -->
|
||||
<el-table-column label="转存" width="80" align="center">
|
||||
<template #default="{ row }">
|
||||
<span v-if="row.total_saves > 0" class="save-count">{{ row.total_saves }}次</span>
|
||||
<el-text v-else type="info" size="small">-</el-text>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column label="操作" width="390" align="center">
|
||||
<template #default="{ row }">
|
||||
<el-button text type="primary" @click="openDialog(row)">编辑</el-button>
|
||||
<el-button text type="primary" @click="verifyOne(row)">验证</el-button>
|
||||
<el-popconfirm title="确定删除该配置?" @confirm="handleDelete(row)">
|
||||
<template #reference>
|
||||
<el-button text type="danger">删除</el-button>
|
||||
</template>
|
||||
</el-popconfirm>
|
||||
</template>
|
||||
</el-table-column>
|
||||
</el-table>
|
||||
|
||||
<!-- 新增/编辑弹窗 -->
|
||||
<el-dialog v-model="dialogVisible" :title="editingId ? '编辑配置' : '新增配置'" width="560px">
|
||||
<el-form ref="formRef" :model="form" :rules="rules" label-width="100px">
|
||||
<el-form-item label="网盘类型" prop="cloud_type">
|
||||
<el-select v-model="form.cloud_type" style="width: 100%" :disabled="!!editingId" @change="onCloudTypeChange">
|
||||
<el-option
|
||||
v-for="[key, label] in cloudTypeOptions"
|
||||
:key="key"
|
||||
:label="label"
|
||||
:value="key"
|
||||
/>
|
||||
</el-select>
|
||||
</el-form-item>
|
||||
<el-form-item label="昵称" prop="nickname">
|
||||
<el-input v-model="form.nickname" placeholder="必填,用于区分多个同类型网盘">
|
||||
<template #append>
|
||||
<el-button :loading="form._verifying" @click="verifyAndFillNickname">自动获取</el-button>
|
||||
</template>
|
||||
</el-input>
|
||||
</el-form-item>
|
||||
<el-form-item label="Cookie" prop="cookie">
|
||||
<el-input
|
||||
v-model="form.cookie"
|
||||
type="textarea"
|
||||
:autosize="{ minRows: 2, maxRows: 4 }"
|
||||
:placeholder="cookiePlaceholder"
|
||||
input-style="font-family: monospace; font-size: 12px;"
|
||||
/>
|
||||
</el-form-item>
|
||||
<!-- Cookie 获取教程(根据网盘类型切换) -->
|
||||
<el-form-item label=" " v-if="form.cloud_type && form.cloud_type !== ''" class="cookie-tips-item">
|
||||
<div class="cookie-tips" :class="`cookie-tips-${form.cloud_type}`">
|
||||
<div class="cookie-tips-header">
|
||||
<span class="cookie-tips-title">📖 {{ cloudTypeLabel }} Cookie 获取教程</span>
|
||||
</div>
|
||||
<ol class="cookie-tips-steps" v-html="cookieTutorialHtml"></ol>
|
||||
</div>
|
||||
</el-form-item>
|
||||
</el-form>
|
||||
<template #footer>
|
||||
<el-button @click="dialogVisible = false">取消</el-button>
|
||||
<el-button type="primary" :loading="saving" @click="handleSave">保存</el-button>
|
||||
</template>
|
||||
</el-dialog>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { ref, reactive, computed, onMounted, nextTick, onUnmounted } from 'vue'
|
||||
import { Loading } from '@element-plus/icons-vue'
|
||||
import { CLOUD_LABELS } from '../../types'
|
||||
import type { CloudType, CloudConfig } from '../../types'
|
||||
import { ElMessage } from 'element-plus'
|
||||
import { getCloudConfigs, saveCloudConfig, updateCloudConfig, deleteCloudConfig, testCloudConnection, getCloudTypes, toggleCloudType } from '../../api'
|
||||
import CloudBadge from '../../components/CloudBadge.vue'
|
||||
import type { ElForm } from 'element-plus'
|
||||
|
||||
interface CloudTypeInfo { type: string; label: string; icon: string; enabled: boolean }
|
||||
const cloudTypes = ref<CloudTypeInfo[]>([])
|
||||
|
||||
const formRef = ref<InstanceType<typeof ElForm>>()
|
||||
const configs = ref<(CloudConfig & { _verifying?: boolean })[]>([])
|
||||
const dialogVisible = ref(false)
|
||||
const saving = ref(false)
|
||||
const editingId = ref<number | null>(null)
|
||||
|
||||
const defaultForm = () => ({
|
||||
cloud_type: '' as CloudType | '',
|
||||
nickname: '',
|
||||
cookie: '',
|
||||
_verifying: false,
|
||||
_storageUsed: '',
|
||||
_storageTotal: '',
|
||||
})
|
||||
|
||||
const form = reactive<{
|
||||
cloud_type: CloudType | ''
|
||||
nickname: string
|
||||
cookie: string
|
||||
_verifying: boolean
|
||||
_storageUsed: string
|
||||
_storageTotal: string
|
||||
}>(defaultForm())
|
||||
|
||||
const rules = computed(() => ({
|
||||
cloud_type: [{ required: true, message: '请选择网盘类型', trigger: 'change' }],
|
||||
nickname: [{ required: true, message: '请填写昵称(区分多个同类型网盘)', trigger: 'blur' }],
|
||||
}))
|
||||
|
||||
const cloudTypeOptions = computed(() => {
|
||||
return Object.entries(CLOUD_LABELS) as [CloudType, string][]
|
||||
})
|
||||
|
||||
const cookiePlaceholder = computed(() => {
|
||||
if (!form.cloud_type) return '请先选择网盘类型'
|
||||
const t = form.cloud_type
|
||||
if (t === 'quark' || t === 'baidu') return `请输入 ${CLOUD_LABELS[t] || t} 的完整 Cookie`
|
||||
return editingId.value ? '留空则保持原有' : '输入完整 Cookie'
|
||||
})
|
||||
|
||||
const cloudTypeLabel = computed(() => {
|
||||
return CLOUD_LABELS[form.cloud_type as CloudType] || form.cloud_type || ''
|
||||
})
|
||||
|
||||
/** Cookie 获取教程 HTML(根据不同网盘类型) */
|
||||
const cookieTutorialHtml = computed(() => {
|
||||
const t = form.cloud_type
|
||||
if (!t) return ''
|
||||
const tutorials: Record<string, string> = {
|
||||
quark: `<li>在电脑上打开 <a href="https://pan.quark.cn" target="_blank">pan.quark.cn</a> 并登录你的夸克账号</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → 切换到 <strong>网络 (Network)</strong> 选项卡</li>
|
||||
<li>刷新页面,在请求列表中点击任意一个请求(如 <code>account/info</code>)</li>
|
||||
<li>在右侧 <strong>请求头 (Request Headers)</strong> 中找到 <code>Cookie</code> 字段</li>
|
||||
<li>复制整个 Cookie 值(<b>从开头到结束的完整内容</b>),粘贴到上方输入框</li>
|
||||
<li>点击「<b>自动获取</b>」按钮验证 Cookie 是否有效</li>
|
||||
.cookie-tips-note">⚠️ 必须包含 <code>__st=s%...</code> 字段!请复制浏览器请求头的 <b>整个 Cookie</b>(F12 → Network → 请求头 → Cookie 项),不要只复制部分。</div>`,
|
||||
|
||||
baidu: `<li>在电脑上打开 <a href="https://pan.baidu.com" target="_blank">pan.baidu.com</a> 并登录你的百度账号</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → 切换到 <strong>网络 (Network)</strong> 选项卡</li>
|
||||
<li>刷新页面,在请求列表中点击任意一个请求</li>
|
||||
<li>在右侧 <strong>请求头 (Request Headers)</strong> 中找到 <code>Cookie</code> 字段</li>
|
||||
<li>复制整个 Cookie 值,粘贴到上方输入框</li>
|
||||
<li>点击「<b>自动获取</b>」按钮验证 Cookie 是否有效</li>
|
||||
<div class="cookie-tips-note">💡 需要包含 <code>BDUSS</code> 和 <code>STOKEN</code></div>`,
|
||||
|
||||
aliyun: `<li>在电脑上打开 <a href="https://www.aliyundrive.com" target="_blank">aliyundrive.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>
|
||||
<div class="cookie-tips-note">💡 需包含 <code>token</code> 等有效字段</div>`,
|
||||
|
||||
'115': `<li>在电脑上打开 <a href="https://115.com" target="_blank">115.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>
|
||||
<div class="cookie-tips-note">💡 需包含 <code>UID</code>、<code>CID</code>、<code>SEID</code> 等字段</div>`,
|
||||
|
||||
tianyi: `<li>在电脑上打开 <a href="https://cloud.189.cn" target="_blank">cloud.189.cn</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>
|
||||
<div class="cookie-tips-note">💡 需包含 <code>COOKIE_LOGIN_USER</code>、<code>SESSION</code> 等字段</div>`,
|
||||
|
||||
'123pan': `<li>在电脑上打开 <a href="https://www.123pan.com" target="_blank">123pan.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`,
|
||||
|
||||
uc: `<li>在电脑上打开 <a href="https://drive.uc.cn" target="_blank">drive.uc.cn</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`,
|
||||
|
||||
xunlei: `<li>在电脑上打开 <a href="https://pan.xunlei.com" target="_blank">pan.xunlei.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`,
|
||||
|
||||
pikpak: `<li>在电脑上打开 <a href="https://www.mypikpak.com" target="_blank">mypikpak.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`,
|
||||
}
|
||||
return tutorials[t] || `<li>在电脑上打开该网盘网站并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,复制任意请求的 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`
|
||||
})
|
||||
|
||||
onMounted(async () => {
|
||||
await loadConfigs()
|
||||
await loadCloudTypes()
|
||||
})
|
||||
|
||||
// 每30分钟自动验证一次
|
||||
let verifyTimer: ReturnType<typeof setInterval> | null = null
|
||||
onMounted(() => {
|
||||
verifyTimer = setInterval(() => {
|
||||
autoVerifyAll()
|
||||
}, 30 * 60 * 1000)
|
||||
})
|
||||
onUnmounted(() => {
|
||||
if (verifyTimer) clearInterval(verifyTimer)
|
||||
})
|
||||
|
||||
async function loadCloudTypes() {
|
||||
try {
|
||||
const result = await getCloudTypes()
|
||||
cloudTypes.value = result.types
|
||||
} catch (e) { console.error('加载网盘类型失败', e) }
|
||||
}
|
||||
|
||||
async function handleCloudToggle(type: string, enabled: boolean) {
|
||||
const ct = cloudTypes.value.find(c => c.type === type)
|
||||
if (!ct) return
|
||||
try {
|
||||
await toggleCloudType(type, enabled)
|
||||
ct.enabled = enabled
|
||||
} catch (e: any) { ElMessage.error(e.message || '切换失败'); ct.enabled = !enabled }
|
||||
}
|
||||
|
||||
async function loadConfigs() {
|
||||
try {
|
||||
configs.value = await getCloudConfigs()
|
||||
} catch (e) {
|
||||
console.error('加载网盘配置失败', e)
|
||||
}
|
||||
}
|
||||
|
||||
async function autoVerifyAll() {
|
||||
for (const cfg of configs.value) {
|
||||
if (cfg.cookie_preview || cfg.nickname) {
|
||||
await verifyOne(cfg, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function verifyAll() {
|
||||
for (const cfg of configs.value) {
|
||||
if ((cfg.cookie_preview || cfg.nickname) && !cfg._verifying) {
|
||||
await verifyOne(cfg, false)
|
||||
}
|
||||
}
|
||||
ElMessage.success('全部验证完成')
|
||||
}
|
||||
|
||||
async function verifyOne(row: CloudConfig & { _verifying?: boolean }, silent = false) {
|
||||
if (!row.cookie_preview && !row.nickname) {
|
||||
if (!silent) ElMessage.warning('该配置没有 Cookie,请先编辑保存后再验证')
|
||||
return
|
||||
}
|
||||
row._verifying = true
|
||||
try {
|
||||
const result = await testCloudConnection(row.cloud_type, undefined, row.id)
|
||||
row.verification_status = result.success ? 'valid' : 'invalid'
|
||||
if (result.success) {
|
||||
if (result.nickname && !row.nickname) row.nickname = result.nickname
|
||||
if (result.storage_used) row.storage_used = result.storage_used
|
||||
if (result.storage_total) row.storage_total = result.storage_total
|
||||
if (!silent) ElMessage.success(`${CLOUD_LABELS[row.cloud_type]}:${result.message}`)
|
||||
} else {
|
||||
if (!silent) ElMessage.error(`${CLOUD_LABELS[row.cloud_type]}:${result.message}`)
|
||||
}
|
||||
} catch (e: any) {
|
||||
row.verification_status = 'invalid'
|
||||
if (!silent) ElMessage.error(`${CLOUD_LABELS[row.cloud_type]}:验证失败`)
|
||||
} finally {
|
||||
row._verifying = false
|
||||
}
|
||||
}
|
||||
|
||||
async function verifyAndFillNickname() {
|
||||
if (!form.cookie) {
|
||||
ElMessage.warning('请先输入 Cookie')
|
||||
return
|
||||
}
|
||||
if (!form.cloud_type) {
|
||||
ElMessage.warning('请先选择网盘类型')
|
||||
return
|
||||
}
|
||||
form._verifying = true
|
||||
try {
|
||||
const result = await testCloudConnection(form.cloud_type as CloudType, form.cookie)
|
||||
if (result.success) {
|
||||
if (result.nickname) form.nickname = result.nickname
|
||||
if (result.storage_used) form._storageUsed = result.storage_used
|
||||
if (result.storage_total) form._storageTotal = result.storage_total
|
||||
ElMessage.success(`昵称:${result.nickname || '获取成功'}`)
|
||||
} else {
|
||||
ElMessage.warning(result.message || '验证失败,请检查 Cookie')
|
||||
}
|
||||
} catch (e: any) {
|
||||
ElMessage.error(e.response?.data?.error || '验证失败,请检查 Cookie')
|
||||
} finally {
|
||||
form._verifying = false
|
||||
}
|
||||
}
|
||||
|
||||
function openDialog(row: CloudConfig | null) {
|
||||
if (row) {
|
||||
editingId.value = row.id ?? null
|
||||
form.cloud_type = row.cloud_type
|
||||
form.nickname = row.nickname || ''
|
||||
form.cookie = row.cookie || ''
|
||||
form._verifying = false
|
||||
} else {
|
||||
editingId.value = null
|
||||
form.cloud_type = '' as CloudType | ''
|
||||
form.nickname = ''
|
||||
form.cookie = ''
|
||||
form._verifying = false
|
||||
}
|
||||
dialogVisible.value = true
|
||||
}
|
||||
|
||||
function onCloudTypeChange() {
|
||||
// Cookie 输入框提示会自动更新(computed)
|
||||
}
|
||||
|
||||
async function handleSave() {
|
||||
const valid = await formRef.value?.validate().catch(() => false)
|
||||
if (!valid) return
|
||||
|
||||
saving.value = true
|
||||
try {
|
||||
if (editingId.value) {
|
||||
await updateCloudConfig({
|
||||
id: editingId.value,
|
||||
cloud_type: form.cloud_type as CloudType,
|
||||
nickname: form.nickname,
|
||||
cookie: form.cookie || undefined,
|
||||
is_active: true,
|
||||
storage_used: form._storageUsed || undefined,
|
||||
storage_total: form._storageTotal || undefined,
|
||||
})
|
||||
ElMessage.success('配置更新成功')
|
||||
} else {
|
||||
const saved = await saveCloudConfig({
|
||||
cloud_type: form.cloud_type as CloudType,
|
||||
nickname: form.nickname,
|
||||
cookie: form.cookie,
|
||||
is_active: true,
|
||||
storage_used: form._storageUsed || undefined,
|
||||
storage_total: form._storageTotal || undefined,
|
||||
})
|
||||
ElMessage.success('配置保存成功')
|
||||
if (!form._storageTotal) {
|
||||
const result = await testCloudConnection(form.cloud_type as CloudType, undefined, saved.id)
|
||||
if (!result.success) {
|
||||
ElMessage.warning(`配置已保存,但连接验证失败:${result.message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
dialogVisible.value = false
|
||||
editingId.value = null
|
||||
await loadConfigs()
|
||||
} catch (e: any) {
|
||||
ElMessage.error(e.response?.data?.error || '保存失败')
|
||||
} finally {
|
||||
saving.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDelete(row: CloudConfig) {
|
||||
try {
|
||||
await deleteCloudConfig(row.id!)
|
||||
ElMessage.success('删除成功')
|
||||
await loadConfigs()
|
||||
} catch (e) {
|
||||
ElMessage.error('删除失败')
|
||||
}
|
||||
}
|
||||
|
||||
/** 解析字节数 → 数值 */
|
||||
function parseBytes(s: string): number {
|
||||
const m = s.match(/^([\d.]+)\s*(B|KB|MB|GB|TB)$/i)
|
||||
if (!m) return 0
|
||||
const n = parseFloat(m[1])
|
||||
const units: Record<string, number> = { B: 1, KB: 1024, MB: 1024**2, GB: 1024**3, TB: 1024**4 }
|
||||
return n * (units[m[2].toUpperCase()] || 1)
|
||||
}
|
||||
|
||||
function storagePercent(row: CloudConfig): number {
|
||||
if (!row.storage_total || !row.storage_used) return 0
|
||||
const total = parseBytes(row.storage_total)
|
||||
const used = parseBytes(row.storage_used)
|
||||
if (total === 0) return 0
|
||||
return Math.min(100, Math.round((used / total) * 100))
|
||||
}
|
||||
|
||||
function storageBarClass(row: CloudConfig): string {
|
||||
const pct = storagePercent(row)
|
||||
if (pct >= 90) return 'bar-danger'
|
||||
if (pct >= 70) return 'bar-warning'
|
||||
return 'bar-normal'
|
||||
}
|
||||
|
||||
function storageFree(row: CloudConfig): string {
|
||||
if (!row.storage_total || !row.storage_used) return '?'
|
||||
const total = parseBytes(row.storage_total)
|
||||
const used = parseBytes(row.storage_used)
|
||||
if (total === 0) return '?'
|
||||
const free = total - used
|
||||
if (free < 1024) return '小于 1 KB'
|
||||
if (free < 1024 * 1024) return (free / 1024).toFixed(1) + ' KB'
|
||||
if (free < 1024 * 1024 * 1024) return (free / (1024 * 1024)).toFixed(1) + ' MB'
|
||||
if (free < 1024 * 1024 * 1024 * 1024) return (free / (1024 * 1024 * 1024)).toFixed(1) + ' GB'
|
||||
return (free / (1024 * 1024 * 1024 * 1024)).toFixed(1) + ' TB'
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.cloud-config {
|
||||
background: var(--bg-white);
|
||||
border-radius: var(--radius-card);
|
||||
padding: 24px;
|
||||
}
|
||||
.cloud-toggle-grid { display: flex; flex-wrap: wrap; gap: 12px; }
|
||||
.cloud-toggle-chip { display: flex; align-items: center; gap: 8px; padding: 8px 12px; border: 1px solid var(--el-border-color-light); border-radius: 8px; background: var(--el-bg-color); }
|
||||
.cloud-toggle-chip:hover { border-color: var(--el-color-primary-light-5); }
|
||||
.cloud-icon-img { width: 20px; height: 20px; object-fit: contain; }
|
||||
.cloud-label { font-size: 13px; font-weight: 500; }
|
||||
.form-tip { font-size: 12px; color: var(--el-text-color-secondary); }
|
||||
.toolbar {
|
||||
margin-bottom: 16px;
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.sign-summary-tag {
|
||||
margin-left: 4px;
|
||||
}
|
||||
.nickname-text {
|
||||
font-weight: 600;
|
||||
color: #303133;
|
||||
}
|
||||
.uid-cell {
|
||||
font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace;
|
||||
font-size: 11px;
|
||||
color: #909399;
|
||||
letter-spacing: 0.3px;
|
||||
}
|
||||
/* 空间进度条 */
|
||||
.storage-cell {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 3px;
|
||||
padding: 2px 0;
|
||||
}
|
||||
.storage-bar-wrap {
|
||||
height: 4px;
|
||||
background: #f0f2f5;
|
||||
border-radius: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.storage-bar-fill {
|
||||
height: 100%;
|
||||
border-radius: 2px;
|
||||
transition: width 0.3s;
|
||||
}
|
||||
.storage-bar-fill.bar-normal { background: #67c23a; }
|
||||
.storage-bar-fill.bar-warning { background: #e6a23c; }
|
||||
.storage-bar-fill.bar-danger { background: #f56c6c; }
|
||||
.storage-text {
|
||||
font-size: 11px;
|
||||
color: #909399;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 3px;
|
||||
}
|
||||
.storage-used { color: #606266; font-weight: 600; }
|
||||
.storage-total { color: #303133; font-weight: 600; }
|
||||
.storage-free { color: #909399; }
|
||||
.save-count {
|
||||
font-size: 12px;
|
||||
color: #909399;
|
||||
}
|
||||
.verifying {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
font-size: 12px;
|
||||
color: #909399;
|
||||
}
|
||||
:deep(.el-input-group__append) {
|
||||
padding: 0;
|
||||
}
|
||||
:deep(.el-input-group__append .el-button) {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
/* Cookie 教程卡片 */
|
||||
.cookie-tips-item :deep(.el-form-item__content) {
|
||||
margin-left: 0 !important;
|
||||
}
|
||||
.cookie-tips {
|
||||
background: #f8faff;
|
||||
border: 1px solid #e8f0fe;
|
||||
border-radius: 8px;
|
||||
padding: 14px 16px;
|
||||
font-size: 12px;
|
||||
line-height: 1.8;
|
||||
color: #606266;
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
.cookie-tips-header {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.cookie-tips-title {
|
||||
font-weight: 700;
|
||||
color: #409eff;
|
||||
font-size: 13px;
|
||||
}
|
||||
.cookie-tips-steps {
|
||||
margin: 0;
|
||||
padding-left: 20px;
|
||||
}
|
||||
.cookie-tips-steps li {
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
.cookie-tips-steps code {
|
||||
background: #ecf5ff;
|
||||
padding: 1px 5px;
|
||||
border-radius: 3px;
|
||||
font-size: 11px;
|
||||
font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace;
|
||||
}
|
||||
.cookie-tips-note {
|
||||
margin-top: 8px;
|
||||
padding: 6px 10px;
|
||||
background: #fffbe6;
|
||||
border: 1px solid #fff3c4;
|
||||
border-radius: 4px;
|
||||
color: #8a6d3b;
|
||||
font-size: 11px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
.cookie-tips-note code {
|
||||
background: #f5f0e0;
|
||||
font-size: 11px;
|
||||
}
|
||||
</style>
|
||||
113
packages/backend/src/cloud/drivers/aliyun.driver.ts
Executable file
113
packages/backend/src/cloud/drivers/aliyun.driver.ts
Executable file
@@ -0,0 +1,113 @@
|
||||
// Native fetch available in Node 20+
|
||||
|
||||
export interface AliyunConfig {
|
||||
cookie?: string;
|
||||
nickname?: string;
|
||||
}
|
||||
|
||||
export class AliyunDriver {
|
||||
private config: AliyunConfig;
|
||||
private baseUrl = 'https://api.aliyundrive.com';
|
||||
|
||||
constructor(config: AliyunConfig = {}) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract share_id from an Aliyun share URL.
|
||||
* Supports:
|
||||
* https://www.aliyundrive.com/s/XXXYYY
|
||||
* https://www.alipan.com/s/XXXYYY
|
||||
* https://api.aliyundrive.com/v2/share_link/XXXYYY
|
||||
*/
|
||||
private extractShareId(shareUrl: string): string | null {
|
||||
try {
|
||||
const url = new URL(shareUrl);
|
||||
const pathMatch = url.pathname.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (pathMatch) return pathMatch[1];
|
||||
|
||||
const shareMatch = url.pathname.match(/\/share_link\/([a-zA-Z0-9]+)/);
|
||||
if (shareMatch) return shareMatch[1];
|
||||
|
||||
return null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a share link using Aliyun's public anonymous API.
|
||||
* No cookie or token required — this endpoint is open.
|
||||
*
|
||||
* API:
|
||||
* POST https://api.aliyundrive.com/v2/share_link/get_share_by_anonymous
|
||||
* Body: { "share_id": "XXXYYY", "share_pwd": "" }
|
||||
*
|
||||
* Success: returns share_name, file_infos, creator info
|
||||
* Failure: returns error code (ShareLinkExpired, ShareLinkCancelled, etc.)
|
||||
*/
|
||||
async validateShareLink(shareUrl: string): Promise<{
|
||||
valid: boolean;
|
||||
message: string;
|
||||
fileCount?: number;
|
||||
shareName?: string;
|
||||
}> {
|
||||
const shareId = this.extractShareId(shareUrl);
|
||||
if (!shareId) {
|
||||
return { valid: false, message: '无法解析阿里云盘链接格式' };
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${this.baseUrl}/v2/share_link/get_share_by_anonymous`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Referer': 'https://www.aliyundrive.com/',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
share_id: shareId,
|
||||
share_pwd: '',
|
||||
}),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
return { valid: false, message: `HTTP ${response.status}: API 请求失败` };
|
||||
}
|
||||
|
||||
const data = await response.json() as any;
|
||||
|
||||
// Check for error codes
|
||||
if (data.code) {
|
||||
switch (data.code) {
|
||||
case 'ShareLinkExpired':
|
||||
return { valid: false, message: '分享已失效(已过期)' };
|
||||
case 'ShareLinkCancelled':
|
||||
return { valid: false, message: '分享已被取消' };
|
||||
case 'NotFound.ShareLink':
|
||||
return { valid: false, message: '分享链接不存在' };
|
||||
case 'ShareLinkPasswordIncorrect':
|
||||
return { valid: true, message: '需要提取码(链接有效)' };
|
||||
default:
|
||||
return { valid: false, message: data.message || `未知错误 (${data.code})` };
|
||||
}
|
||||
}
|
||||
|
||||
// Success — valid share link
|
||||
const fileInfos = data.file_infos || [];
|
||||
return {
|
||||
valid: true,
|
||||
message: `有效链接(${fileInfos.length} 个文件)`,
|
||||
fileCount: fileInfos.length,
|
||||
shareName: data.share_name || '',
|
||||
};
|
||||
} catch (err: any) {
|
||||
return { valid: false, message: `网络错误: ${err.message || err}` };
|
||||
}
|
||||
}
|
||||
}
|
||||
1189
packages/backend/src/cloud/drivers/baidu.driver.ts
Normal file
1189
packages/backend/src/cloud/drivers/baidu.driver.ts
Normal file
File diff suppressed because it is too large
Load Diff
289
packages/backend/src/cloud/drivers/quark-ad-cleanup.ts
Normal file
289
packages/backend/src/cloud/drivers/quark-ad-cleanup.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
import { getSystemConfig } from "../../admin/system-config.service";
|
||||
import { getHeaders, makeQuery } from "./quark-api";
|
||||
import { listDir, listDirAllPages } from "./quark-api";
|
||||
import { humanDelay } from "./quark-api";
|
||||
|
||||
/**
|
||||
* 广告关键词清理模块。
|
||||
* 在转存完成后执行:
|
||||
* 1. 遍历转存的目录,删除文件名/文件夹名含广告关键词的内容
|
||||
* 2. 在转存根目录下创建警示文件夹(置顶提醒)
|
||||
*/
|
||||
|
||||
// ==================== 配置读取 ====================
|
||||
|
||||
/** 从 DB 读取广告关键词列表 */
|
||||
export function getAdKeywords(): string[] {
|
||||
const raw = getSystemConfig("quark_ad_keywords") || "";
|
||||
return raw
|
||||
.split("\n")
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
/** 从 DB 读取警示文件夹名称列表 */
|
||||
export function getWarningFolderNames(): string[] {
|
||||
const raw = getSystemConfig("quark_warning_folder_names") || "";
|
||||
return raw
|
||||
.split("\n")
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
/** 从 DB 读取可疑文件后缀列表 */
|
||||
export function getSusExtensions(): string[] {
|
||||
const raw = getSystemConfig("quark_sus_extensions") || "";
|
||||
if (raw.trim()) {
|
||||
return raw
|
||||
.split("\n")
|
||||
.map((s) => s.trim().toLowerCase().replace(/^\./, ""))
|
||||
.filter(Boolean);
|
||||
}
|
||||
// 默认可疑后缀
|
||||
return ["bat", "exe", "vbs", "scr", "cmd", "com", "pif", "js", "jar", "msi", "reg", "inf", "ps1"];
|
||||
}
|
||||
|
||||
// ==================== 关键词检测 ====================
|
||||
|
||||
/** 检查文件名是否包含任意广告关键词 */
|
||||
export function containsAdKeyword(
|
||||
fileName: string,
|
||||
keywords: string[],
|
||||
): boolean {
|
||||
if (!keywords.length) return false;
|
||||
const lower = fileName.toLowerCase();
|
||||
return keywords.some((kw) => kw && lower.includes(kw.toLowerCase()));
|
||||
}
|
||||
|
||||
// ==================== 删除操作 ====================
|
||||
|
||||
/**
|
||||
* 遍历指定目录(含子目录),删除匹配广告关键词的文件和文件夹。
|
||||
* 返回删除的文件数。
|
||||
*/
|
||||
export async function deleteAdFiles(
|
||||
cookie: string,
|
||||
dirFid: string,
|
||||
keywords: string[],
|
||||
): Promise<number> {
|
||||
if (!keywords.length) return 0;
|
||||
|
||||
let deletedCount = 0;
|
||||
const stack: string[] = [dirFid];
|
||||
const visited = new Set<string>();
|
||||
|
||||
while (stack.length > 0) {
|
||||
const fid = stack.pop()!;
|
||||
if (visited.has(fid)) continue;
|
||||
visited.add(fid);
|
||||
|
||||
await humanDelay();
|
||||
const files = await listDir(cookie, fid);
|
||||
if (!files || files.length === 0) continue;
|
||||
|
||||
// 先收集所有需要删除的 fid
|
||||
const toDelete: string[] = [];
|
||||
const toKeep: string[] = [];
|
||||
|
||||
const extensions = getSusExtensions();
|
||||
for (const file of files) {
|
||||
const ext = file.file_name.split(".").pop()?.toLowerCase() || "";
|
||||
const isSusExt = extensions.includes(ext);
|
||||
if (containsAdKeyword(file.file_name, keywords) || isSusExt) {
|
||||
toDelete.push(file.fid);
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 标记删除: "${file.file_name}" (fid: ${file.fid})${isSusExt ? " [可疑后缀]" : " [广告关键词]"}`,
|
||||
);
|
||||
} else {
|
||||
toKeep.push(file.fid);
|
||||
// 如果是目录且不删除,继续遍历子目录
|
||||
if (file.dir) {
|
||||
stack.push(file.fid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 批量删除
|
||||
if (toDelete.length > 0) {
|
||||
const deleteOk = await batchDeleteFiles(cookie, toDelete);
|
||||
if (deleteOk) {
|
||||
deletedCount += toDelete.length;
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 已删除 ${toDelete.length} 个广告文件`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return deletedCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* 批量删除文件/文件夹(移入回收站)。
|
||||
*/
|
||||
async function batchDeleteFiles(
|
||||
cookie: string,
|
||||
fids: string[],
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`https://drive-pc.quark.cn/1/clouddrive/file/trash?${makeQuery()}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
action_type: 2, // 2 = 移入回收站
|
||||
file_list: fids.map((fid) => ({ fid })),
|
||||
exclude_fids: [],
|
||||
}),
|
||||
signal: AbortSignal.timeout(15000),
|
||||
},
|
||||
);
|
||||
const data = (await resp.json()) as any;
|
||||
if (data.status === 200) {
|
||||
return true;
|
||||
}
|
||||
console.log(
|
||||
`[Quark-AdCleanup] batchDelete 返回非200: status=${data.status} msg=${data.message}`,
|
||||
);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
console.log(`[Quark-AdCleanup] batchDelete 错误: ${err.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== 警示文件夹创建 ====================
|
||||
|
||||
/**
|
||||
* 在转存根目录下创建警示文件夹。
|
||||
* 文件夹名前加 ⚠️ 和空格,让其按字母排序置顶。
|
||||
* 已存在的则跳过。
|
||||
*/
|
||||
export async function createWarningDirectories(
|
||||
cookie: string,
|
||||
dirNames: string[],
|
||||
): Promise<void> {
|
||||
if (!dirNames.length) return;
|
||||
|
||||
// 先获取根目录下所有文件夹,避免重复创建
|
||||
await humanDelay();
|
||||
const rootFiles = await listDirAllPages(cookie, "0");
|
||||
const existingDirs = new Set(
|
||||
rootFiles.filter((f) => f.dir).map((f) => f.file_name),
|
||||
);
|
||||
|
||||
for (const name of dirNames) {
|
||||
// 格式化名称:确保以 ⚠️ 开头
|
||||
let formattedName = name;
|
||||
if (!formattedName.startsWith("⚠️") && !formattedName.startsWith("⚠")) {
|
||||
formattedName = `⚠️ ${formattedName}`;
|
||||
}
|
||||
// 去掉多余空格
|
||||
formattedName = formattedName.replace(/\s+/g, " ").trim();
|
||||
|
||||
if (existingDirs.has(formattedName)) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 警示文件夹已存在,跳过: "${formattedName}"`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
await createSingleDir(cookie, formattedName);
|
||||
// 加入已存在集合,防止同名重试
|
||||
existingDirs.add(formattedName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建单个文件夹。
|
||||
*/
|
||||
async function createSingleDir(
|
||||
cookie: string,
|
||||
dirName: string,
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`https://drive-pc.quark.cn/1/clouddrive/file?${makeQuery()}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
pdir_fid: "0",
|
||||
file_name: dirName,
|
||||
dir: true,
|
||||
dir_path: "",
|
||||
}),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
},
|
||||
);
|
||||
const data = (await resp.json()) as any;
|
||||
if (data.status === 200 && data.data?.fid) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 已创建警示文件夹: "${dirName}" (fid: ${data.data.fid})`,
|
||||
);
|
||||
return true;
|
||||
}
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 创建文件夹失败: status=${data.status} msg=${data.message}`,
|
||||
);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 创建文件夹错误: "${dirName}" — ${err.message}`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== 主入口 ====================
|
||||
|
||||
/**
|
||||
* 执行广告清理 + 创建警示文件夹。
|
||||
* 在转存重命名后调用。
|
||||
*/
|
||||
export async function runAdCleanup(
|
||||
cookie: string,
|
||||
savedDirFid: string,
|
||||
): Promise<{ adDeleted: number; warningDirs: number }> {
|
||||
const keywords = getAdKeywords();
|
||||
const warningNames = getWarningFolderNames();
|
||||
|
||||
let adDeleted = 0;
|
||||
let warningDirs = 0;
|
||||
|
||||
// 1. 广告关键词清理
|
||||
if (keywords.length > 0) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 开始广告关键词清理: ${keywords.length} 个关键词`,
|
||||
);
|
||||
adDeleted = await deleteAdFiles(cookie, savedDirFid, keywords);
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 广告清理完成,共删除 ${adDeleted} 个文件/文件夹`,
|
||||
);
|
||||
} else {
|
||||
console.log("[Quark-AdCleanup] 无广告关键词配置,跳过清理");
|
||||
}
|
||||
|
||||
// 2. 创建警示文件夹
|
||||
if (warningNames.length > 0) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 开始创建警示文件夹: ${warningNames.length} 个`,
|
||||
);
|
||||
await createWarningDirectories(cookie, warningNames);
|
||||
warningDirs = warningNames.length;
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 警示文件夹创建完成(共 ${warningDirs} 个)`,
|
||||
);
|
||||
} else {
|
||||
console.log("[Quark-AdCleanup] 无警示文件夹配置,跳过创建");
|
||||
}
|
||||
|
||||
return { adDeleted, warningDirs };
|
||||
}
|
||||
237
packages/backend/src/cloud/drivers/quark-api.ts
Normal file
237
packages/backend/src/cloud/drivers/quark-api.ts
Normal file
@@ -0,0 +1,237 @@
|
||||
// Native fetch available in Node 20+
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
/**
|
||||
* HTTP 封装层 — 统一处理夸克 API 的请求签名、headers、query params。
|
||||
* 所有模块共用此单例/函数集,不持有状态。
|
||||
*/
|
||||
|
||||
export interface QuarkConfig {
|
||||
cookie: string;
|
||||
nickname?: string;
|
||||
}
|
||||
|
||||
// ==================== Headers & Params ====================
|
||||
|
||||
const BASE_URL = 'https://drive-pc.quark.cn';
|
||||
|
||||
export function getHeaders(cookie: string): Record<string, string> {
|
||||
return {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Cookie': cookie,
|
||||
'Accept': 'application/json, text/plain, */*',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||
'Referer': 'https://pan.quark.cn/',
|
||||
'Origin': 'https://pan.quark.cn',
|
||||
};
|
||||
}
|
||||
|
||||
export function getCommonParams(): Record<string, string> {
|
||||
return { pr: 'ucpro', fr: 'pc' };
|
||||
}
|
||||
|
||||
/** Generate query string with common params + random timing to mimic browser */
|
||||
export function makeQuery(extra: Record<string, string> = {}): string {
|
||||
const __dt = Math.floor(Math.random() * 240000 + 60000);
|
||||
const __t = Date.now() / 1000;
|
||||
return new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
app: 'clouddrive',
|
||||
__dt: String(__dt),
|
||||
__t: String(__t),
|
||||
...extra,
|
||||
}).toString();
|
||||
}
|
||||
|
||||
/** Random delay to mimic human behavior (500-2000ms) */
|
||||
export async function humanDelay(): Promise<void> {
|
||||
const ms = Math.floor(Math.random() * 1500) + 500;
|
||||
await new Promise(r => setTimeout(r, ms));
|
||||
}
|
||||
|
||||
/** Generate a random password for share links */
|
||||
export function randomSharePwd(): string {
|
||||
return Math.floor(1000 + Math.random() * 9000).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract kps/sign/vcode from cookie for API signing (bare keys, no __ prefix).
|
||||
*/
|
||||
export function getMparam(cookie: string): { kps?: string; sign?: string; vcode?: string } {
|
||||
// Match both __kps and kps (with or without __ prefix)
|
||||
const kpsMatch = cookie.match(/__?kps=([a-zA-Z0-9%+/=]+)/);
|
||||
const signMatch = cookie.match(/__?sign=([a-zA-Z0-9%+/=]+)/);
|
||||
const vcodeMatch = cookie.match(/__?vcode=([a-zA-Z0-9%+/=]+)/);
|
||||
if (kpsMatch && signMatch && vcodeMatch) {
|
||||
return {
|
||||
kps: kpsMatch[1],
|
||||
sign: signMatch[1].replace(/%25/g, '%'),
|
||||
vcode: vcodeMatch[1],
|
||||
};
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
// ==================== Shared fetch helpers ====================
|
||||
|
||||
/**
|
||||
* Raw fetch wrapper with JSON parse + status check.
|
||||
* Returns parsed JSON body on 2xx, null on network error.
|
||||
*/
|
||||
export async function apiFetch<T = any>(
|
||||
path: string,
|
||||
options: {
|
||||
method?: string;
|
||||
query?: Record<string, string>;
|
||||
body?: any;
|
||||
cookie: string;
|
||||
timeout?: number;
|
||||
},
|
||||
): Promise<T | null> {
|
||||
const { method = 'GET', query, body, cookie, timeout = 10000 } = options;
|
||||
let url = `${BASE_URL}${path}`;
|
||||
if (query) url += `?${new URLSearchParams(query).toString()}`;
|
||||
try {
|
||||
const resp = await fetch(url, {
|
||||
method,
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
...(body ? { 'Content-Type': 'application/json' } : {}),
|
||||
},
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
signal: AbortSignal.timeout(timeout),
|
||||
});
|
||||
if (!resp.ok) return null;
|
||||
return (await resp.json()) as T;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== File listing (shared across modules) ====================
|
||||
|
||||
export interface QuarkFile {
|
||||
fid: string;
|
||||
file_name: string;
|
||||
share_fid_token?: string;
|
||||
dir: boolean;
|
||||
size?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* List files in a directory by FID.
|
||||
*/
|
||||
export async function listDir(cookie: string, pdirFid: string, page = 1, pageSize = 50): Promise<QuarkFile[]> {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
pdir_fid: pdirFid,
|
||||
_page: String(page),
|
||||
_size: String(pageSize),
|
||||
_fetch_total: '1',
|
||||
_fetch_sub_dirs: '0',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
fetch_all_file: '1',
|
||||
fetch_risk_file_name: '1',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/sort?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200) return [];
|
||||
return (data.data?.list || []).filter((f: any) => f.fid).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
share_fid_token: '',
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List root directory (pdir_fid=0) — returns all top-level dirs/files.
|
||||
*/
|
||||
export async function listRootDir(cookie: string): Promise<QuarkFile[]> {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
pr: 'ucpro', fr: 'pc',
|
||||
pdir_fid: '0',
|
||||
_page: '1', _size: '200',
|
||||
_fetch_total: '1', _fetch_sub_dirs: '0',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
fetch_all_file: '1',
|
||||
fetch_risk_file_name: '1',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/sort?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200 || !data.data?.list) return [];
|
||||
return (data.data.list || []).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all files in a directory, handling pagination.
|
||||
* Fetches all pages until no more results.
|
||||
*/
|
||||
export async function listDirAllPages(cookie: string, pdirFid: string): Promise<QuarkFile[]> {
|
||||
const allFiles: QuarkFile[] = [];
|
||||
let page = 1;
|
||||
const pageSize = 100;
|
||||
let total = -1;
|
||||
while (total === -1 || (page - 1) * pageSize < total) {
|
||||
const files = await listDir(cookie, pdirFid, page, pageSize);
|
||||
if (!files.length) break;
|
||||
allFiles.push(...files);
|
||||
if (total === -1) {
|
||||
total = files.length;
|
||||
}
|
||||
page++;
|
||||
}
|
||||
return allFiles;
|
||||
}
|
||||
|
||||
// ==================== Format utilities ====================
|
||||
|
||||
export function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return parseFloat((bytes / Math.pow(1024, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/** Generate a daily folder name (e.g. "2026-05-03") for organizing saves */
|
||||
export function dailyFolderName(): string {
|
||||
const d = new Date();
|
||||
const y = d.getFullYear();
|
||||
const m = String(d.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(d.getDate()).padStart(2, '0');
|
||||
return `${y}-${m}-${day}`;
|
||||
}
|
||||
|
||||
/** Generate a random folder name for saving (fallback) */
|
||||
export function randomFolderName(): string {
|
||||
const chars = 'abcdefghijklmnopqrstuvwxyz0123456789';
|
||||
let name = '';
|
||||
for (let i = 0; i < 12; i++) {
|
||||
name += chars[Math.floor(Math.random() * chars.length)];
|
||||
}
|
||||
return name;
|
||||
}
|
||||
60
packages/backend/src/cloud/drivers/quark-auth.ts
Normal file
60
packages/backend/src/cloud/drivers/quark-auth.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { QuarkConfig } from './quark-api';
|
||||
import { getHeaders, getMparam, apiFetch, makeQuery } from './quark-api';
|
||||
|
||||
/**
|
||||
* 认证模块 — Cookie 验证、账号信息获取、QR 登录状态检查。
|
||||
* 所有方法以 cookie 字符串为参数,不持有驱动状态。
|
||||
*/
|
||||
|
||||
// ==================== Validate ====================
|
||||
|
||||
/**
|
||||
* Validate the cookie by fetching user info.
|
||||
*/
|
||||
export async function validate(cookie: string): Promise<boolean> {
|
||||
const MAX_RETRIES = 2;
|
||||
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
||||
try {
|
||||
// Use account/info API (same as quark-auto-save project)
|
||||
// Only needs __uid cookie, no mparam (kps/sign/vcode) required
|
||||
const url = 'https://pan.quark.cn/account/info?fr=pc&platform=pc';
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/3.14.2 Chrome/112.0.5615.165 Electron/24.1.3.8 Safari/537.36 Channel/pckk_other_ch',
|
||||
},
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
if (!response.ok) return false;
|
||||
const data = await response.json() as any;
|
||||
if (data?.data?.nickname) return true;
|
||||
} catch (err: any) {
|
||||
if (attempt < MAX_RETRIES) {
|
||||
console.log(`[Quark] validate attempt ${attempt + 1} failed: ${err.message}, retrying...`);
|
||||
await new Promise(r => setTimeout(r, 2000));
|
||||
continue;
|
||||
}
|
||||
console.log(`[Quark] validate all ${MAX_RETRIES + 1} attempts failed: ${err.message}`);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** Fetch nickname from Quark account info (same API used by quark-auto-save) */
|
||||
export async function fetchNickname(cookie: string): Promise<string | null> {
|
||||
try {
|
||||
const url = 'https://pan.quark.cn/account/info?fr=pc&platform=pc';
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/3.14.2 Chrome/112.0.5615.165 Electron/24.1.3.8 Safari/537.36 Channel/pckk_other_ch',
|
||||
},
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
if (!response.ok) return null;
|
||||
const data = await response.json() as any;
|
||||
return data?.data?.nickname || null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
315
packages/backend/src/cloud/drivers/quark-cleanup.ts
Normal file
315
packages/backend/src/cloud/drivers/quark-cleanup.ts
Normal file
@@ -0,0 +1,315 @@
|
||||
import { getHeaders, getCommonParams, getMparam, listRootDir, listDirAllPages, formatBytes, humanDelay, makeQuery, listDir, QuarkFile } from './quark-api';
|
||||
|
||||
/**
|
||||
* 容量信息 & 空间清理模块。
|
||||
*/
|
||||
const BASE_URL = 'https://drive-pc.quark.cn';
|
||||
|
||||
// ==================== Storage Info ====================
|
||||
|
||||
/** Cached used space, keyed by hour block (3h window) */
|
||||
const cachedUsedSpace: { bytes: number; hourBlock: number } | null = null;
|
||||
|
||||
// We use a function-scoped cache instead of instance field
|
||||
const storageCache: { bytes: number; hourBlock: number } = { bytes: 0, hourBlock: -1 };
|
||||
|
||||
/**
|
||||
* Get total capacity from /capacity/detail API.
|
||||
* Also does a quick used-space estimate by summing root-level file sizes + subdir sizes
|
||||
* (夸克目录的 size 字段 = 该目录内所有文件总大小,无需递归).
|
||||
* If the API fails (e.g. missing sign params), falls back to fallbackTotal if provided.
|
||||
*/
|
||||
export async function getStorageInfoQuick(cookie: string, fallbackTotal?: string): Promise<{ total: string; totalBytes: number; used: string; usedBytes: number }> {
|
||||
try {
|
||||
const mparam = getMparam(cookie);
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
kps: mparam.kps || '',
|
||||
sign: mparam.sign || '',
|
||||
vcode: mparam.vcode || '',
|
||||
});
|
||||
const capResponse = await fetch(`${BASE_URL}/1/clouddrive/capacity/detail?${params.toString()}`, {
|
||||
headers: getHeaders(cookie),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
let totalBytes = 0;
|
||||
if (capResponse.ok) {
|
||||
const data = await capResponse.json() as any;
|
||||
if (data.status === 200 && data.data) {
|
||||
totalBytes = data.data.capacity_summary?.sum_capacity || 0;
|
||||
if (totalBytes === 0) {
|
||||
const memberships = [...(data.data.effect || []), ...(data.data.expired || [])];
|
||||
totalBytes = memberships.reduce((max: number, m: any) => Math.max(max, m.capacity || 0), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Quick used-space estimate: sum root-level file sizes + subdir sizes
|
||||
let usedBytes = 0;
|
||||
try {
|
||||
const rootFiles = await listRootDir(cookie);
|
||||
for (const f of rootFiles) {
|
||||
usedBytes += f.size || 0;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Cache the result (3h window)
|
||||
const currentHourBlock = Math.floor(new Date().getHours() / 3);
|
||||
storageCache.bytes = usedBytes;
|
||||
storageCache.hourBlock = currentHourBlock;
|
||||
|
||||
if (totalBytes > 0) {
|
||||
return {
|
||||
total: formatBytes(totalBytes),
|
||||
totalBytes,
|
||||
used: formatBytes(usedBytes),
|
||||
usedBytes,
|
||||
};
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Fallback: try to parse from a human-readable string like "6 TB"
|
||||
if (fallbackTotal) {
|
||||
const match = fallbackTotal.match(/^([\d.]+)\s*([KMGT]B?)/i);
|
||||
if (match) {
|
||||
const num = parseFloat(match[1]);
|
||||
const unit = match[2].toUpperCase();
|
||||
const multipliers: Record<string, number> = { B: 1, KB: 1024, MB: 1024 ** 2, GB: 1024 ** 3, TB: 1024 ** 4, PB: 1024 ** 5 };
|
||||
const multiplier = multipliers[unit] || multipliers[unit.replace('B', '') + 'B'] || 0;
|
||||
if (multiplier > 0) {
|
||||
return { total: fallbackTotal, totalBytes: Math.round(num * multiplier), used: '-', usedBytes: 0 };
|
||||
}
|
||||
}
|
||||
}
|
||||
return { total: '-', totalBytes: 0, used: '-', usedBytes: 0 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage info with used space calculation.
|
||||
*/
|
||||
export async function getStorageInfo(cookie: string): Promise<{ used: string; total: string; usedBytes: number; totalBytes: number }> {
|
||||
try {
|
||||
const mparam = getMparam(cookie);
|
||||
let totalBytes = 0;
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
kps: mparam.kps || '',
|
||||
sign: mparam.sign || '',
|
||||
vcode: mparam.vcode || '',
|
||||
});
|
||||
const response = await fetch(`${BASE_URL}/1/clouddrive/capacity/detail?${params.toString()}`, {
|
||||
headers: getHeaders(cookie),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
if (response.ok) {
|
||||
const data = await response.json() as any;
|
||||
if (data.status === 200 && data.data) {
|
||||
totalBytes = data.data.capacity_summary?.sum_capacity || 0;
|
||||
if (totalBytes === 0) {
|
||||
const memberships = [...(data.data.effect || []), ...(data.data.expired || [])];
|
||||
totalBytes = memberships.reduce((max: number, m: any) => Math.max(max, m.capacity || 0), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const usedBytes = await calculateUsedSpace(cookie);
|
||||
|
||||
if (totalBytes > 0 || usedBytes > 0) {
|
||||
return {
|
||||
total: totalBytes > 0 ? formatBytes(totalBytes) : '-',
|
||||
used: formatBytes(usedBytes),
|
||||
usedBytes,
|
||||
totalBytes: totalBytes > 0 ? totalBytes : 0,
|
||||
};
|
||||
}
|
||||
return { used: '0 B', total: '-', usedBytes: 0, totalBytes: 0 };
|
||||
} catch {
|
||||
return { used: '-', total: '-', usedBytes: 0, totalBytes: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate total used space by recursively traversing all files
|
||||
* and summing their sizes. Uses 3-hour time window cache.
|
||||
*/
|
||||
export async function calculateUsedSpace(cookie: string): Promise<number> {
|
||||
const currentHourBlock = Math.floor(new Date().getHours() / 3);
|
||||
if (storageCache.hourBlock === currentHourBlock && storageCache.bytes > 0) {
|
||||
return storageCache.bytes;
|
||||
}
|
||||
let totalUsed = 0;
|
||||
const stack: string[] = ['0'];
|
||||
const visited = new Set<string>();
|
||||
while (stack.length > 0) {
|
||||
const fid = stack.pop()!;
|
||||
if (visited.has(fid)) continue;
|
||||
visited.add(fid);
|
||||
const files = await listDirAllPages(cookie, fid);
|
||||
if (!files.length) continue;
|
||||
for (const f of files) {
|
||||
if (f.dir) {
|
||||
stack.push(f.fid);
|
||||
} else {
|
||||
totalUsed += f.size || 0;
|
||||
}
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 50));
|
||||
}
|
||||
storageCache.bytes = totalUsed;
|
||||
storageCache.hourBlock = currentHourBlock;
|
||||
return totalUsed;
|
||||
}
|
||||
|
||||
// ==================== Cleanup ====================
|
||||
|
||||
/**
|
||||
* Trash specified files/folders (move to recycle bin).
|
||||
*/
|
||||
export async function trashFiles(cookie: string, fids: string[]): Promise<boolean> {
|
||||
if (!fids.length) return true;
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/trash?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
action_type: 1, // 1 = move to trash
|
||||
filelist: fids,
|
||||
exclude_filelist: [],
|
||||
}),
|
||||
signal: AbortSignal.timeout(30000),
|
||||
},
|
||||
);
|
||||
if (!response.ok) return false;
|
||||
const data = await response.json() as any;
|
||||
if (data.status === 200) return true;
|
||||
console.error(`[Quark] trashFiles failed: ${data.message || data.status}`);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
console.error(`[Quark] trashFiles error: ${err.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Empty the recycle bin — permanently delete all files in trash.
|
||||
*/
|
||||
export async function emptyTrash(cookie: string): Promise<boolean> {
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/trash/clear?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({}),
|
||||
signal: AbortSignal.timeout(60000),
|
||||
},
|
||||
);
|
||||
if (!response.ok) return false;
|
||||
const data = await response.json() as any;
|
||||
if (data.status === 200) return true;
|
||||
console.error(`[Quark] emptyTrash failed: ${data.message || data.status}`);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
console.error(`[Quark] emptyTrash error: ${err.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup: trash date-named folders (YYYY-MM-DD) older than `days`.
|
||||
*/
|
||||
export async function cleanupOldDateFolders(cookie: string, days: number): Promise<{ trashed: number; errors: string[] }> {
|
||||
const errors: string[] = [];
|
||||
const cutoff = new Date();
|
||||
cutoff.setDate(cutoff.getDate() - days);
|
||||
const cutoffStr = cutoff.toISOString().slice(0, 10);
|
||||
|
||||
try {
|
||||
const rootItems = await listRootDir(cookie);
|
||||
const oldFolders = rootItems.filter(item => {
|
||||
if (!item.dir) return false;
|
||||
if (!/^\d{4}-\d{2}-\d{2}$/.test(item.file_name)) return false;
|
||||
return item.file_name < cutoffStr;
|
||||
});
|
||||
|
||||
if (oldFolders.length === 0) {
|
||||
return { trashed: 0, errors: [] };
|
||||
}
|
||||
|
||||
const fids = oldFolders.map(f => f.fid);
|
||||
console.log(`[Quark] Trashing ${fids.length} old date folders (before ${cutoffStr}): ${oldFolders.map(f => f.file_name).join(', ')}`);
|
||||
const ok = await trashFiles(cookie, fids);
|
||||
if (ok) {
|
||||
return { trashed: fids.length, errors: [] };
|
||||
}
|
||||
return { trashed: 0, errors: [`Trash API returned failure for ${fids.length} folders`] };
|
||||
} catch (err: any) {
|
||||
return { trashed: 0, errors: [err.message] };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup: if used space exceeds thresholdPercent% of total,
|
||||
* delete the oldest date folders until totalBytes * deletePercent/100
|
||||
* of total capacity is freed.
|
||||
*/
|
||||
export async function cleanupBySpaceThreshold(
|
||||
cookie: string,
|
||||
thresholdPercent: number,
|
||||
deletePercent: number,
|
||||
): Promise<{ trashed: number; errors: string[] }> {
|
||||
const errors: string[] = [];
|
||||
|
||||
try {
|
||||
const storage = await getStorageInfo(cookie);
|
||||
if (storage.totalBytes <= 0) return { trashed: 0, errors: [] };
|
||||
|
||||
const usagePercent = (storage.usedBytes / storage.totalBytes) * 100;
|
||||
if (usagePercent < thresholdPercent) {
|
||||
console.log(`[Quark] Usage ${usagePercent.toFixed(1)}% below threshold ${thresholdPercent}%, skipping`);
|
||||
return { trashed: 0, errors: [] };
|
||||
}
|
||||
|
||||
const targetBytesToFree = Math.floor(storage.totalBytes * Math.min(deletePercent, 100) / 100);
|
||||
|
||||
const rootItems = await listRootDir(cookie);
|
||||
const dateFolders = rootItems
|
||||
.filter(item => item.dir && /^\d{4}-\d{2}-\d{2}$/.test(item.file_name))
|
||||
.sort((a, b) => a.file_name.localeCompare(b.file_name));
|
||||
|
||||
if (dateFolders.length === 0) return { trashed: 0, errors: [] };
|
||||
|
||||
const hasSizes = dateFolders.some(f => f.size && f.size > 0);
|
||||
let cumulativeSize = 0;
|
||||
const foldersToTrash: typeof dateFolders = [];
|
||||
|
||||
if (hasSizes) {
|
||||
for (const folder of dateFolders) {
|
||||
foldersToTrash.push(folder);
|
||||
cumulativeSize += folder.size || 0;
|
||||
if (cumulativeSize >= targetBytesToFree) break;
|
||||
}
|
||||
} else {
|
||||
const avgSizePerFolder = storage.usedBytes / dateFolders.length;
|
||||
const estCount = Math.max(1, Math.ceil(targetBytesToFree / avgSizePerFolder));
|
||||
foldersToTrash.push(...dateFolders.slice(0, estCount));
|
||||
cumulativeSize = estCount * avgSizePerFolder;
|
||||
}
|
||||
|
||||
const freedMB = (cumulativeSize / 1024 / 1024).toFixed(0);
|
||||
const targetMB = (targetBytesToFree / 1024 / 1024).toFixed(0);
|
||||
const fidsToTrash = foldersToTrash.map(f => f.fid);
|
||||
console.log(`[Quark] Space threshold: trashing ${foldersToTrash.length}/${dateFolders.length} oldest folders (~${freedMB} MB) to free ${targetMB} MB (${deletePercent}% of ${(storage.totalBytes/1024/1024/1024).toFixed(0)} GB total)`);
|
||||
|
||||
const ok = await trashFiles(cookie, fidsToTrash);
|
||||
if (ok) {
|
||||
console.log(`[Quark] ✅ Space-threshold trashed ${foldersToTrash.length} folders (~${freedMB} MB)`);
|
||||
return { trashed: foldersToTrash.length, errors: [] };
|
||||
}
|
||||
return { trashed: 0, errors: [`Space-threshold trash failed for ${foldersToTrash.length} folders`] };
|
||||
} catch (err: any) {
|
||||
return { trashed: 0, errors: [err.message] };
|
||||
}
|
||||
}
|
||||
259
packages/backend/src/cloud/drivers/quark-rename.ts
Normal file
259
packages/backend/src/cloud/drivers/quark-rename.ts
Normal file
@@ -0,0 +1,259 @@
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
/**
|
||||
* 防和谐重命名模块。
|
||||
* 对文件名/目录名执行谐音替换 + 可读标签保留(集数、画质、语言等)。
|
||||
*/
|
||||
|
||||
// ==================== Homophone Map ====================
|
||||
|
||||
const HOMOPHONE_MAP: Record<string, string> = {
|
||||
// 网盘热门番名 — 谐音替换 (same sound, different char)
|
||||
'斗':'陡','破':'坡','苍':'仓','穹':'穷',
|
||||
'完':'玩','美':'每','世':'士','界':'介',
|
||||
'凡':'烦','人':'仁','修':'休','罗':'络',
|
||||
'仙':'先','逆':'腻','遮':'折','天':'添',
|
||||
'吞':'屯','噬':'逝','大':'达','主':'嘱','宰':'崽',
|
||||
'星':'惺','辰':'晨','变':'便','一':'伊','念':'捻',
|
||||
'永':'泳','恒':'横','神':'申','墓':'暮','长':'尝','生':'甥',
|
||||
'剑':'箭','来':'莱','诡':'鬼','秘':'蜜',
|
||||
'全':'泉','职':'值','盘':'磐','龙':'笼',
|
||||
'雪':'血','鹰':'莺','莽':'蟒','荒':'慌','纪':'记',
|
||||
'珠':'株','王':'亡','座':'坐','牧':'木','记':'计',
|
||||
'沧':'舱','元':'圆','图':'涂','紫':'仔','川':'串',
|
||||
'百':'白','炼':'恋','成':'程','饶':'绕','命':'冥',
|
||||
// 通用谐音替换
|
||||
'的':'得','了':'啦','是':'事','不':'布','我':'窝',
|
||||
'你':'尼','他':'她','有':'友','和':'合','与':'予',
|
||||
'上':'尚','下':'夏','中':'忠','第':'弟','集':'级',
|
||||
'话':'划','季':'际','年':'念','月':'阅','日':'曰',
|
||||
'新':'心','版':'板','高':'糕','清':'青','原':'源',
|
||||
'小':'晓','片':'篇','视':'市','频':'贫','道':'到',
|
||||
'动':'洞','画':'话','声':'升','音':'因','文':'闻',
|
||||
'明':'名','暗':'黯','光':'广','影':'映','色':'瑟',
|
||||
'风':'疯','雨':'语','花':'华','国':'果','家':'佳',
|
||||
'战':'站','争':'挣','士':'仕','兵':'宾',
|
||||
'皇':'惶','帝':'谛','魔':'磨','鬼':'诡','怪':'乖',
|
||||
'精':'经','灵':'铃','妖':'夭','武':'舞','侠':'狭',
|
||||
'杀':'刹','血':'雪','刀':'叨','枪':'呛','炮':'泡',
|
||||
'时':'石','空':'孔','前':'钱','后':'厚','东':'冬',
|
||||
'南':'难','西':'夕','北':'备','开':'凯','关':'官',
|
||||
'出':'初','进':'近','去':'趣',
|
||||
'短':'短','多':'多','少':'少','真':'贞','假':'价',
|
||||
'好':'郝','坏':'怀','对':'队','错':'措','以':'已',
|
||||
'从':'从','被':'被','把':'把','将':'将','在':'在',
|
||||
'但':'但','就':'就','才':'才','也':'也','很':'狠',
|
||||
'又':'又','再':'再','更':'更','最':'最','总':'总',
|
||||
'共':'共','只':'只','各':'各','每':'每','任':'任',
|
||||
'所':'所','该':'该','本':'本',
|
||||
};
|
||||
|
||||
const NOISE_CJK = '的了在是不有会可对所之也同与及但或如且乃而岂乎焉兮哉亦犹尚乃其若故盖诸焉欤' +
|
||||
'么个着过把对为从以到说时要就这那和上人家下能出得发来年心开物力些长样吧啊哦嗯嚯哇咯呗哟嘿呵哈';
|
||||
|
||||
// ==================== Helpers ====================
|
||||
|
||||
/** Convert Chinese text to homophonic (substitute chars with same sound) */
|
||||
function homophonicText(text: string): string {
|
||||
let result = '';
|
||||
for (const ch of text) {
|
||||
if (/[\u4e00-\u9fff]/.test(ch)) {
|
||||
const homophone = HOMOPHONE_MAP[ch];
|
||||
result += homophone || ch;
|
||||
} else {
|
||||
result += ch;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Convert Chinese text to pinyin-initial-like string (each char → first pinyin letter or fallback) */
|
||||
function pinyinLike(text: string): string {
|
||||
let result = '';
|
||||
for (const ch of text) {
|
||||
if (/[\u4e00-\u9fff]/.test(ch)) {
|
||||
const homophone = HOMOPHONE_MAP[ch];
|
||||
if (homophone) {
|
||||
result += pinyinInitial(homophone);
|
||||
} else {
|
||||
const code = ch.charCodeAt(0);
|
||||
result += String.fromCharCode(97 + (code % 26));
|
||||
}
|
||||
} else if (/[a-zA-Z0-9]/.test(ch)) {
|
||||
result += ch;
|
||||
} else if (/[\s._-]/.test(ch)) {
|
||||
result += '_';
|
||||
}
|
||||
}
|
||||
return result.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
}
|
||||
|
||||
/** Get pinyin initial (first letter of pinyin) for a Chinese character */
|
||||
function pinyinInitial(ch: string): string {
|
||||
const code = ch.charCodeAt(0);
|
||||
if (code >= 0x4E00 && code <= 0x9FFF) {
|
||||
const initials = ['b','p','m','f','d','t','n','l','g','k','h','j','q','x','zh','ch','sh','r','z','c','s','y','w'];
|
||||
const idx = Math.min(Math.floor((code - 0x4E00) / 700), initials.length - 1);
|
||||
return initials[idx];
|
||||
}
|
||||
return ch.toLowerCase();
|
||||
}
|
||||
|
||||
// ==================== Public API ====================
|
||||
|
||||
/**
|
||||
* Anti-harmony rename for directories.
|
||||
* 80%: light homophonic replacement, 20%: partial pinyin.
|
||||
*/
|
||||
export function magicRenameDir(dirName: string): string {
|
||||
const hash = crypto.createHash('md5').update(dirName + Date.now()).digest('hex').slice(0, 4);
|
||||
|
||||
let cleanName = dirName.trim().replace(/\s+/g, ' ');
|
||||
if (!cleanName) {
|
||||
return `media_${hash}`;
|
||||
}
|
||||
|
||||
let baseName: string;
|
||||
|
||||
if (Math.random() < 0.2) {
|
||||
// Partial pinyin: 30% of CJK chars → pinyin initial, 70% stay as-is
|
||||
const chars = [...cleanName];
|
||||
const result: string[] = [];
|
||||
for (const ch of chars) {
|
||||
if (/[\u4e00-\u9fff]/.test(ch) && Math.random() < 0.3) {
|
||||
result.push(pinyinInitial(ch));
|
||||
} else {
|
||||
result.push(ch);
|
||||
}
|
||||
}
|
||||
baseName = result.join('');
|
||||
} else {
|
||||
// Light homophonic: replace each CJK char, keep everything else as-is
|
||||
const chars = [...cleanName];
|
||||
const result: string[] = [];
|
||||
for (const ch of chars) {
|
||||
if (/[\u4e00-\u9fff]/.test(ch)) {
|
||||
result.push(HOMOPHONE_MAP[ch] || ch);
|
||||
} else {
|
||||
result.push(ch);
|
||||
}
|
||||
}
|
||||
baseName = result.join('');
|
||||
|
||||
// Optional: insert 0-2 light noise chars (low probability)
|
||||
const noiseCount = Math.random() < 0.3 ? (Math.random() < 0.5 ? 1 : 2) : 0;
|
||||
for (let n = 0; n < noiseCount; n++) {
|
||||
const pos = Math.floor(Math.random() * (baseName.length + 1));
|
||||
const ink = NOISE_CJK[Math.floor(Math.random() * NOISE_CJK.length)];
|
||||
baseName = baseName.slice(0, pos) + ink + baseName.slice(pos);
|
||||
}
|
||||
}
|
||||
|
||||
baseName = baseName.replace(/[^\u4e00-\u9fff\w]/g, '_');
|
||||
baseName = baseName.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
if (baseName.length > 30) baseName = baseName.slice(0, 30);
|
||||
|
||||
return `${baseName}_${hash}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Anti-harmony rename for files.
|
||||
* KEEPS: episode numbers, quality, language tags, original extension.
|
||||
* REPLACES: Chinese title with homophonic/pinyin.
|
||||
*/
|
||||
export function magicRename(filename: string): string {
|
||||
const hash = crypto.createHash('md5').update(filename + Date.now()).digest('hex').slice(0, 8);
|
||||
|
||||
let ext = '';
|
||||
const extMatch = filename.match(/\.[a-zA-Z0-9]+$/);
|
||||
if (extMatch) {
|
||||
ext = extMatch[0];
|
||||
filename = filename.slice(0, -ext.length);
|
||||
}
|
||||
|
||||
// Extract and REMEMBER: episode info, quality, language, year
|
||||
const episodePatterns = [
|
||||
{ regex: /第\s*(\d+)\s*[集话話話話话回章期]/, format: (m: string) => 'Ep' + m.replace(/[^\d]/g, '') },
|
||||
{ regex: /Ep\d+|ep\d+/i, format: (m: string) => m.toUpperCase() },
|
||||
{ regex: /Part\s*\d+/i, format: (m: string) => m.replace(/\s+/g, '') },
|
||||
{ regex: /E\d{2,}/i, format: (m: string) => m.toUpperCase() },
|
||||
];
|
||||
let episodeTag = '';
|
||||
for (const { regex, format } of episodePatterns) {
|
||||
const m = filename.match(regex);
|
||||
if (m) {
|
||||
episodeTag = format(m[0]);
|
||||
filename = filename.replace(m[0], '');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract and REMEMBER: quality tags
|
||||
const qualityPattern = /\b(4k|1080p|1080P|2160p|720p|HD|BluRay|Blu-ray|HDR|WEB-DL|WEBRip|BDRip|REMUX|DV|Dovi|HEVC|x264|x265|H\.264|H\.265)\b/;
|
||||
const qualityMatch = filename.match(qualityPattern);
|
||||
const qualityTag = qualityMatch ? qualityMatch[0] : '';
|
||||
if (qualityMatch) filename = filename.replace(qualityMatch[0], '');
|
||||
|
||||
// Extract and REMEMBER: language tags
|
||||
const langPattern = /\b(CHS|CHT|JP|EN|BIG5|GB|粤语|国语|日语|英语|中字|日字|英字|繁体中字)\b/;
|
||||
const langMatch = filename.match(langPattern);
|
||||
const langTag = langMatch ? langMatch[0] : '';
|
||||
if (langMatch) filename = filename.replace(langMatch[0], '');
|
||||
|
||||
// Extract and REMEMBER: year
|
||||
const yearMatch = filename.match(/\b(20\d{2})\b/);
|
||||
const yearTag = yearMatch ? yearMatch[0] : '';
|
||||
if (yearMatch) filename = filename.replace(yearMatch[0], '');
|
||||
|
||||
// Extract and REMEMBER: season info
|
||||
const seasonMatch = filename.match(/第?\s*(\d+)\s*[季部期]/);
|
||||
const seasonTag = seasonMatch ? `${seasonMatch[1]}季` : '';
|
||||
if (seasonMatch) filename = filename.replace(seasonMatch[0], '');
|
||||
|
||||
// Now process the remaining name (mostly Chinese title)
|
||||
filename = filename.replace(/[._\-【】\[\]()()\s]+/g, '_').trim();
|
||||
|
||||
const useHomophonic = Math.random() > 0.5;
|
||||
let titlePart: string;
|
||||
if (useHomophonic) {
|
||||
titlePart = homophonicText(filename);
|
||||
titlePart = titlePart.replace(/[^\u4e00-\u9fff\wa-zA-Z0-9]/g, '_');
|
||||
titlePart = titlePart.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
if (titlePart.length > 15) titlePart = titlePart.slice(0, 15);
|
||||
} else {
|
||||
titlePart = pinyinLike(filename);
|
||||
titlePart = titlePart.replace(/[^a-zA-Z0-9]/g, '_');
|
||||
titlePart = titlePart.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
if (titlePart.length > 15) titlePart = titlePart.slice(0, 15);
|
||||
}
|
||||
|
||||
// Remove sensitive keywords from title part
|
||||
const sensitiveWords = /斗破|完美|凡人|仙逆|遮天|吞噬|大主宰|绝世|武动|星辰变|一念永恒|修罗|神墓|长生|剑来|诡秘|全职|斗罗|盘龙|雪鹰|莽荒纪|天珠变|神印王座|牧神记|沧元图|紫川|百炼成神|大王饶命|全球高考/ig;
|
||||
titlePart = titlePart.replace(sensitiveWords, '');
|
||||
titlePart = titlePart.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
|
||||
// Build preserved tags
|
||||
const tags: string[] = [];
|
||||
if (seasonTag) tags.push(seasonTag);
|
||||
if (episodeTag) tags.push(episodeTag);
|
||||
if (qualityTag) tags.push(qualityTag.toUpperCase());
|
||||
if (langTag) tags.push(langTag);
|
||||
if (yearTag) tags.push(yearTag);
|
||||
tags.push(hash); // Always add hash for uniqueness
|
||||
|
||||
const newExt = ext || '.bin';
|
||||
|
||||
const parts = [titlePart, ...tags].filter(Boolean);
|
||||
let result = parts.join('_');
|
||||
|
||||
if (result.length > 80) {
|
||||
result = result.slice(0, 80);
|
||||
}
|
||||
|
||||
if (result.length < 10) {
|
||||
const filler = crypto.randomBytes(4).toString('hex');
|
||||
result = `${filler}_${result}`;
|
||||
}
|
||||
|
||||
return result + newExt;
|
||||
}
|
||||
409
packages/backend/src/cloud/drivers/quark-share.ts
Normal file
409
packages/backend/src/cloud/drivers/quark-share.ts
Normal file
@@ -0,0 +1,409 @@
|
||||
import { getHeaders, getCommonParams, makeQuery, getMparam, humanDelay, randomSharePwd, apiFetch, QuarkFile } from './quark-api';
|
||||
|
||||
/**
|
||||
* 分享模块 — 分享链接解析、转存任务、创建分享链接。
|
||||
*/
|
||||
|
||||
const BASE_URL = 'https://drive-pc.quark.cn';
|
||||
|
||||
// ==================== Acquire Stoken ====================
|
||||
|
||||
/**
|
||||
* Acquire stoken for a share link (needed for detail/save).
|
||||
*/
|
||||
export async function acquireStoken(cookie: string, pwdId: string): Promise<string | null> {
|
||||
for (let attempt = 0; attempt < 3; attempt++) {
|
||||
try {
|
||||
const params = new URLSearchParams(getCommonParams());
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share/sharepage/token?${params.toString()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ pwd_id: pwdId, passcode: '' }),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
},
|
||||
);
|
||||
if (!resp.ok) {
|
||||
if (attempt < 2) continue;
|
||||
return null;
|
||||
}
|
||||
const data = await resp.json() as any;
|
||||
if (data.status === 200 && data.data?.stoken) {
|
||||
return data.data.stoken;
|
||||
}
|
||||
return null;
|
||||
} catch {
|
||||
if (attempt >= 2) return null;
|
||||
await new Promise(r => setTimeout(r, 500 * (attempt + 1)));
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// ==================== Get Share Files ====================
|
||||
|
||||
/**
|
||||
* Fetch detail at a given pdir_fid within a share.
|
||||
*/
|
||||
export async function getDetailAt(
|
||||
cookie: string,
|
||||
pwdId: string,
|
||||
stoken: string,
|
||||
pdirFid: string,
|
||||
): Promise<QuarkFile[]> {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
pwd_id: pwdId,
|
||||
stoken,
|
||||
pdir_fid: pdirFid,
|
||||
force: '0',
|
||||
_page: '1',
|
||||
_size: '50',
|
||||
_fetch_banner: '0',
|
||||
_fetch_share: '1',
|
||||
_fetch_total: '1',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
ver: '2',
|
||||
fetch_share_full_path: '0',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share/sharepage/detail?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200) return [];
|
||||
return (data.data?.list || []).filter((f: any) => f.fid).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
share_fid_token: f.share_fid_token || '',
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively collect files from a share.
|
||||
* If the share contains a single directory, drill into it to list contents
|
||||
* but still save the directory itself.
|
||||
*/
|
||||
export async function getShareFiles(
|
||||
cookie: string,
|
||||
pwdId: string,
|
||||
stoken: string,
|
||||
): Promise<{ files: QuarkFile[]; topDir: boolean; childFiles?: QuarkFile[] } | null> {
|
||||
try {
|
||||
const topLevel = await getDetailAt(cookie, pwdId, stoken, '0');
|
||||
if (!topLevel || topLevel.length === 0) return null;
|
||||
|
||||
// If the share is a single directory, we save the directory itself
|
||||
// and fetch its contents for renaming later
|
||||
if (topLevel.length === 1 && topLevel[0].dir) {
|
||||
const innerFiles = await getDetailAt(cookie, pwdId, stoken, topLevel[0].fid);
|
||||
return {
|
||||
files: topLevel,
|
||||
topDir: true,
|
||||
childFiles: innerFiles || [],
|
||||
};
|
||||
}
|
||||
|
||||
// Multiple top-level items: save them directly
|
||||
return {
|
||||
files: topLevel,
|
||||
topDir: false,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== Save Files (share → cloud) ====================
|
||||
|
||||
/**
|
||||
* Save shared files to the user's cloud directory.
|
||||
*/
|
||||
export async function saveFiles(
|
||||
cookie: string,
|
||||
pwdId: string,
|
||||
stoken: string,
|
||||
fids: string[],
|
||||
fidTokens: string[],
|
||||
toPdirFid: string,
|
||||
): Promise<{ success: boolean; message: string; taskId?: string }> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share/sharepage/save?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
fid_list: fids,
|
||||
fid_token_list: fidTokens,
|
||||
to_pdir_fid: toPdirFid,
|
||||
pwd_id: pwdId,
|
||||
stoken,
|
||||
pdir_fid: '0',
|
||||
scene: 'link',
|
||||
}),
|
||||
signal: AbortSignal.timeout(30000),
|
||||
},
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
if (data.status === 200 && data.data?.task_id) {
|
||||
return { success: true, message: 'Save task created', taskId: data.data.task_id };
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
message: data.message === 'require login [guest]'
|
||||
? '夸克网盘 Cookie 已过期,请在后台重新配置 Cookie'
|
||||
: (data.message || `API 返回错误 (status=${data.status}, code=${data.code})`),
|
||||
};
|
||||
} catch (err: any) {
|
||||
return { success: false, message: err.message || 'Network error' };
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== Wait for Save Task ====================
|
||||
|
||||
/**
|
||||
* Poll task status until complete or timeout.
|
||||
* Returns the saved file FIDs (save_as_top_fids).
|
||||
*/
|
||||
export async function waitForTask(cookie: string, taskId: string, timeoutMs: number): Promise<string[] | null> {
|
||||
const start = Date.now();
|
||||
let retryIndex = 0;
|
||||
|
||||
while (Date.now() - start < timeoutMs) {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
task_id: taskId,
|
||||
retry_index: String(retryIndex),
|
||||
__dt: String(Math.floor(Math.random() * 240000 + 60000)),
|
||||
__t: String(Date.now() / 1000),
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/task?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(10000) },
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
if (data.status === 200) {
|
||||
if (data.data?.status === 2) {
|
||||
// Task completed
|
||||
const savedFids: string[] = data.data?.save_as?.save_as_top_fids || [];
|
||||
return savedFids;
|
||||
}
|
||||
// Still in progress
|
||||
retryIndex++;
|
||||
}
|
||||
} catch {
|
||||
// Network error, retry
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
return null; // Timeout
|
||||
}
|
||||
|
||||
// ==================== Rename File ====================
|
||||
|
||||
/**
|
||||
* Rename a file by its FID.
|
||||
*/
|
||||
export async function renameFile(cookie: string, fid: string, newName: string): Promise<boolean> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/rename?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ fid, file_name: newName }),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
},
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
return data.status === 200 || data.code === 0;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== Create Share Link ====================
|
||||
|
||||
/**
|
||||
* Create a share link for a file/folder.
|
||||
* Flow: create task → poll for share_id → submit to get short URL.
|
||||
*/
|
||||
export async function createShareLink(cookie: string, fileId: string): Promise<{ success: boolean; shareUrl?: string; sharePwd?: string; message: string }> {
|
||||
try {
|
||||
const sharePwd = randomSharePwd();
|
||||
|
||||
// Try different share_type values (1=7天, 0=无限制)
|
||||
const shareTypes = ['1', '0'];
|
||||
let lastError = '';
|
||||
|
||||
for (const st of shareTypes) {
|
||||
await humanDelay();
|
||||
// Step 1: Create share task - get task_id
|
||||
const response = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
fid_list: [fileId],
|
||||
share_type: st,
|
||||
url_type: '1',
|
||||
share_pwd: sharePwd,
|
||||
}),
|
||||
signal: AbortSignal.timeout(15000),
|
||||
},
|
||||
);
|
||||
const data = await response.json() as any;
|
||||
const taskId = data.data?.task_id;
|
||||
if (!taskId) {
|
||||
lastError = data.message || `share_type=${st} 失败`;
|
||||
console.error('[Quark] Create share task failed (type=%s):', st, data.message || JSON.stringify(data).slice(0, 200));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Step 2: Poll task until complete
|
||||
const result = await waitForShareTask(cookie, taskId, 20000);
|
||||
if (!result?.shareId) {
|
||||
lastError = result?.message || '任务超时';
|
||||
console.error('[Quark] Wait for share task failed (type=%s):', st, result?.message || 'unknown');
|
||||
continue;
|
||||
}
|
||||
|
||||
// Step 3: Submit share via /password endpoint
|
||||
const shareUrl = await submitShare(cookie, result.shareId, sharePwd);
|
||||
if (shareUrl) {
|
||||
return {
|
||||
success: true,
|
||||
shareUrl,
|
||||
sharePwd,
|
||||
message: `分享链接已生成(密码:${sharePwd})`,
|
||||
};
|
||||
}
|
||||
lastError = '提交密码后未获取到短链接';
|
||||
}
|
||||
|
||||
return { success: false, message: lastError || '🤷 各种姿势都试过了,就是分享不出来…' };
|
||||
} catch (err: any) {
|
||||
console.error('[Quark] createShareLink error:', err.message);
|
||||
return { success: false, message: err.message || '🌩️ 网络开小差了,再试试?' };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit share via /password endpoint to get the actual short URL.
|
||||
*/
|
||||
async function submitShare(cookie: string, shareId: string, sharePwd?: string): Promise<string | null> {
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share/password?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ share_id: shareId, share_pwd: sharePwd || '' }),
|
||||
signal: AbortSignal.timeout(15000),
|
||||
},
|
||||
);
|
||||
const data = await response.json() as any;
|
||||
if (data.status === 200 && data.data?.share_url) {
|
||||
console.log('[Quark] Share short URL:', data.data.share_url);
|
||||
return data.data.share_url;
|
||||
}
|
||||
console.log('[Quark] /password response:', JSON.stringify(data).slice(0, 300));
|
||||
console.error('[Quark] /password FAIL status=%s msg=%s', data.status, data.message || '');
|
||||
return null;
|
||||
} catch (err) {
|
||||
console.log('[Quark] /password error:', err);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll share task until complete and extract share URL/shortcode.
|
||||
*/
|
||||
async function waitForShareTask(cookie: string, taskId: string, timeoutMs: number): Promise<{ shareId?: string; message?: string } | null> {
|
||||
const start = Date.now();
|
||||
let retryIndex = 0;
|
||||
while (Date.now() - start < timeoutMs) {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
task_id: taskId,
|
||||
retry_index: String(retryIndex),
|
||||
__dt: String(Math.floor(Math.random() * 240000 + 60000)),
|
||||
__t: String(Date.now() / 1000),
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/task?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(10000) },
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
if (data.data?.status === 2) {
|
||||
// Task completed — try multiple extraction approaches
|
||||
// 1. Direct share_url field
|
||||
if (data.data?.share_url) {
|
||||
const match = data.data.share_url.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (match) return { shareId: match[1] };
|
||||
}
|
||||
|
||||
// 2. Nested share object
|
||||
if (data.data?.share?.url) {
|
||||
const match = data.data.share.url.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (match) return { shareId: match[1] };
|
||||
}
|
||||
if (data.data?.share?.short_url) {
|
||||
const match = data.data.share.short_url.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (match) return { shareId: match[1] };
|
||||
}
|
||||
|
||||
// 3. share_id — validate it's a reasonable short code (8-20 chars, not UUID-like)
|
||||
const shareId = data.data?.share_id;
|
||||
if (shareId && shareId.length <= 20 && shareId.length >= 8) {
|
||||
return { shareId };
|
||||
}
|
||||
|
||||
// 4. Regex search through the full response for a URL pattern
|
||||
const str = JSON.stringify(data);
|
||||
const urlMatch = str.match(/https?:\/\/pan\.quark\.cn\/s\/([a-zA-Z0-9]{6,16})/);
|
||||
if (urlMatch) {
|
||||
return { shareId: urlMatch[1] };
|
||||
}
|
||||
|
||||
// 5. Extract from any URL field in the response
|
||||
const urlFields = ['url', 'link', 'share_url', 'short_url', 'share_link'];
|
||||
for (const field of urlFields) {
|
||||
const val = data.data?.[field] || data.data?.share?.[field];
|
||||
if (typeof val === 'string' && val.includes('pan.quark.cn/s/')) {
|
||||
const m = val.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (m) return { shareId: m[1] };
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Log full share task response for debugging
|
||||
console.log('[Quark] Full share task response:', JSON.stringify(data, null, 2).slice(0, 2000));
|
||||
|
||||
// 7. Even if shareId is UUID-like (32 hex chars), use it anyway as last resort
|
||||
if (shareId) {
|
||||
return { shareId };
|
||||
}
|
||||
|
||||
return { message: 'Share task completed but no share URL found' };
|
||||
}
|
||||
if (data.data?.status === 3) {
|
||||
return { message: data.message || 'Share task failed' };
|
||||
}
|
||||
retryIndex++;
|
||||
} catch {
|
||||
// Retry
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
return { message: 'Share task timed out' };
|
||||
}
|
||||
308
packages/backend/src/cloud/drivers/quark-storage.ts
Normal file
308
packages/backend/src/cloud/drivers/quark-storage.ts
Normal file
@@ -0,0 +1,308 @@
|
||||
import { getHeaders, getCommonParams, makeQuery, getMparam, humanDelay, dailyFolderName, formatBytes, apiFetch, listDir, listDirAllPages, listRootDir, QuarkFile } from './quark-api';
|
||||
import { acquireStoken, getShareFiles, saveFiles, waitForTask } from './quark-share';
|
||||
|
||||
/**
|
||||
* 转存 & 存储管理模块。
|
||||
* 处理分享链接解析 → 转存 → 查/创建目标文件夹 → 文件重命名 → 递归统计。
|
||||
*/
|
||||
|
||||
// ==================== saveFromShare — 核心转存流水线 ====================
|
||||
|
||||
/**
|
||||
* Save files from a share link → magic rename → create shared link.
|
||||
*
|
||||
* Flow: token → detail → save → wait_task → rename → share
|
||||
*/
|
||||
export async function saveFromShare(
|
||||
cookie: string,
|
||||
nickname: string | undefined,
|
||||
shareUrl: string,
|
||||
sourceTitle?: string,
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
message: string;
|
||||
shareUrl?: string;
|
||||
sharePwd?: string;
|
||||
folderName?: string;
|
||||
taskId?: string;
|
||||
renamed?: string[];
|
||||
fileCount?: number;
|
||||
folderCount?: number;
|
||||
originalFolderName?: string;
|
||||
}> {
|
||||
try {
|
||||
// Parse share token from URL
|
||||
const urlObj = new URL(shareUrl);
|
||||
const pwdId = urlObj.pathname.split('/').filter(Boolean).pop();
|
||||
if (!pwdId) {
|
||||
return { success: false, message: 'Invalid share URL: could not extract share token' };
|
||||
}
|
||||
|
||||
// Step 1: Acquire stoken
|
||||
const stoken = await acquireStoken(cookie, pwdId);
|
||||
if (!stoken) {
|
||||
return { success: false, message: '😅 Oops!资源好像偷偷溜走了,换个链接试试吧~' };
|
||||
}
|
||||
|
||||
// Step 2: Get share detail
|
||||
const shareInfo = await getShareFiles(cookie, pwdId, stoken);
|
||||
if (!shareInfo || !shareInfo.files || shareInfo.files.length === 0) {
|
||||
return { success: false, message: '🌚 空的!这个分享里啥都没有…' };
|
||||
}
|
||||
|
||||
const { files: topFiles, topDir, childFiles } = shareInfo;
|
||||
const originalFolderName = topFiles[0]?.file_name || '';
|
||||
const fids = topFiles.map(f => f.fid);
|
||||
const fidTokens = topFiles.map(f => f.share_fid_token);
|
||||
|
||||
// 按日期创建/查找文件夹,每天的转存存入当天文件夹
|
||||
await humanDelay();
|
||||
const saveDirName = dailyFolderName();
|
||||
console.log(`[Quark] saveFromShare: looking for/create dir "${saveDirName}"`);
|
||||
const saveDirFid = await findOrCreateDir(cookie, saveDirName);
|
||||
const targetPdirFid = saveDirFid || '0';
|
||||
if (saveDirFid) {
|
||||
console.log(`[Quark] Using save directory: ${saveDirName} (fid: ${saveDirFid})`);
|
||||
} else {
|
||||
console.log(`[Quark] WARNING: failed to create/find dir "${saveDirName}", saving to root`);
|
||||
}
|
||||
|
||||
// Step 3: Save top-level item(s) to the target directory
|
||||
const saveResult = await saveFiles(cookie, pwdId, stoken, fids, fidTokens.filter(Boolean) as string[], targetPdirFid);
|
||||
if (!saveResult.success) {
|
||||
return saveResult;
|
||||
}
|
||||
|
||||
const taskId = saveResult.taskId!;
|
||||
|
||||
// Step 4: Wait for save task to complete (poll up to 30s)
|
||||
const savedFids = await waitForTask(cookie, taskId, 30000);
|
||||
if (!savedFids || savedFids.length === 0) {
|
||||
return { success: true, message: '文件已保存,但获取保存结果超时' };
|
||||
}
|
||||
|
||||
// Step 5: Magic rename files — with random delay to avoid detection
|
||||
await humanDelay();
|
||||
const renamed: Array<{ original: string; renamed: string }> = [];
|
||||
let shareFid = '';
|
||||
let savedFolderName = '';
|
||||
let newInnerDirName = '';
|
||||
|
||||
if (topDir && childFiles && childFiles.length > 0) {
|
||||
// ── Single folder share ──
|
||||
const savedDirFid = savedFids[0];
|
||||
shareFid = savedDirFid;
|
||||
savedFolderName = topFiles[0]?.file_name || '';
|
||||
} else {
|
||||
// ── Multiple files at top level ──
|
||||
shareFid = savedFids[0];
|
||||
savedFolderName = topFiles[0]?.file_name || '';
|
||||
}
|
||||
|
||||
// Step 6: Create share link FIRST (before rename), so all files are guaranteed to be shared
|
||||
await humanDelay();
|
||||
let shareUrlResult = '';
|
||||
let sharePwdResult = '';
|
||||
let shareMsg = '';
|
||||
let successCount = 0; // total items (files + folders) actually saved
|
||||
|
||||
const { createShareLink } = await import('./quark-share');
|
||||
if (shareFid) {
|
||||
const shareResult = await createShareLink(cookie, shareFid);
|
||||
if (shareResult.success && shareResult.shareUrl) {
|
||||
shareUrlResult = shareResult.shareUrl;
|
||||
if (shareResult.sharePwd) sharePwdResult = shareResult.sharePwd;
|
||||
} else {
|
||||
shareMsg = `(分享失败:${shareResult.message})`;
|
||||
}
|
||||
}
|
||||
|
||||
const { magicRenameDir, magicRename } = await import('./quark-rename');
|
||||
const { renameFile } = await import('./quark-share');
|
||||
|
||||
// Step 7: Rename files AFTER creating the share link (anti-harmony, won't affect the share)
|
||||
if (topDir && childFiles && childFiles.length > 0) {
|
||||
// ── Single folder share ──
|
||||
const savedDirFid = savedFids[0];
|
||||
|
||||
// List files inside the saved directory
|
||||
const dirFiles = await listDir(cookie, savedDirFid);
|
||||
if (dirFiles && dirFiles.length > 0) {
|
||||
for (const file of dirFiles) {
|
||||
if (file.dir) continue;
|
||||
const newName = magicRename(file.file_name);
|
||||
const renameOk = await renameFile(cookie, file.fid, newName);
|
||||
if (renameOk) {
|
||||
renamed.push({ original: file.file_name, renamed: newName });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also rename the inner folder itself (the actual shared folder)
|
||||
const innerDirOriginalName = sourceTitle || topFiles[0]?.file_name || '';
|
||||
if (innerDirOriginalName) {
|
||||
newInnerDirName = magicRenameDir(innerDirOriginalName);
|
||||
const innerDirRenameOk = await renameFile(cookie, savedDirFid, newInnerDirName);
|
||||
if (innerDirRenameOk) {
|
||||
console.log(`[Quark] Renamed inner folder: ${innerDirOriginalName} → ${newInnerDirName}`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// ── Multiple files at top level ──
|
||||
for (let i = 0; i < savedFids.length && i < topFiles.length; i++) {
|
||||
const originalName = topFiles[i].file_name;
|
||||
if (topFiles[i].dir) continue;
|
||||
const newName = magicRename(originalName);
|
||||
const renameOk = await renameFile(cookie, savedFids[i], newName);
|
||||
if (renameOk) {
|
||||
renamed.push({ original: originalName, renamed: newName });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 7.5: 广告关键词清理 + 创建警示文件夹
|
||||
if (shareFid) {
|
||||
try {
|
||||
const { runAdCleanup } = await import('./quark-ad-cleanup');
|
||||
const adResult = await runAdCleanup(cookie, shareFid);
|
||||
if (adResult.adDeleted > 0) {
|
||||
console.log(`[Quark] 广告清理完成: 删除了 ${adResult.adDeleted} 个广告文件/文件夹`);
|
||||
}
|
||||
if (adResult.warningDirs > 0) {
|
||||
console.log(`[Quark] 已创建 ${adResult.warningDirs} 个警示文件夹`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.log(`[Quark] 广告清理/警示文件夹创建失败(非致命): ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 8: DAY FOLDER STAYS AS-IS (e.g. "2026-05-03")
|
||||
// DO NOT rename the date folder — it serves as the organizational container.
|
||||
savedFolderName = newInnerDirName ? `${saveDirName}/${newInnerDirName}` : saveDirName;
|
||||
|
||||
// Recursively count files and folders from saved cloud directory
|
||||
let fileCount = 0;
|
||||
let folderCount = 0;
|
||||
if (shareFid) {
|
||||
try {
|
||||
const counts = await countRecursive(cookie, shareFid);
|
||||
fileCount = counts.fileCount;
|
||||
folderCount = counts.folderCount;
|
||||
} catch {
|
||||
console.log('[Quark] Recursive count failed, using fallback');
|
||||
}
|
||||
}
|
||||
// If recursive count returned nothing, try fallback
|
||||
if (fileCount === 0 && folderCount === 0) {
|
||||
if (topDir && childFiles) {
|
||||
folderCount = 1 + childFiles.filter(f => f.dir).length;
|
||||
fileCount = childFiles.filter(f => !f.dir).length;
|
||||
} else {
|
||||
folderCount = topFiles.filter(f => f.dir).length;
|
||||
fileCount = topFiles.filter(f => !f.dir).length;
|
||||
}
|
||||
}
|
||||
|
||||
const renameMsg = renamed.length > 0
|
||||
? `,已重命名 ${renamed.length} 个文件`
|
||||
: '';
|
||||
const folderMsg = savedFolderName ? `到文件夹「${savedFolderName}」` : '';
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `已保存${folderMsg}${renameMsg}${shareMsg}`,
|
||||
shareUrl: shareUrlResult || undefined,
|
||||
sharePwd: sharePwdResult || undefined,
|
||||
folderName: savedFolderName,
|
||||
taskId,
|
||||
renamed: renamed.map(r => `${r.original} → ${r.renamed}`),
|
||||
fileCount,
|
||||
folderCount,
|
||||
originalFolderName,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return { success: false, message: err.message || 'Network error' };
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== Dir Management ====================
|
||||
|
||||
/**
|
||||
* Create a new directory at root.
|
||||
*/
|
||||
export async function createDir(cookie: string, dirName: string): Promise<string | null> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`https://drive-pc.quark.cn/1/clouddrive/file?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
pdir_fid: '0',
|
||||
file_name: dirName,
|
||||
dir: true,
|
||||
dir_path: '',
|
||||
}),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
},
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
if (data.status === 200 && data.data?.fid) {
|
||||
console.log(`[Quark] Created dir "${dirName}" (fid: ${data.data.fid})`);
|
||||
return data.data.fid;
|
||||
}
|
||||
console.log(`[Quark] createDir API returned non-200: status=${data.status} msg=${data.message}`);
|
||||
return null;
|
||||
} catch (err: any) {
|
||||
console.log(`[Quark] createDir error: ${err.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an existing directory by name, or create it if not found.
|
||||
*/
|
||||
export async function findOrCreateDir(cookie: string, dirName: string): Promise<string | null> {
|
||||
try {
|
||||
const rootFiles = await listDirAllPages(cookie, '0');
|
||||
const existing = rootFiles.find(f => f.dir && f.file_name === dirName);
|
||||
if (existing?.fid) {
|
||||
console.log(`[Quark] Found existing daily folder: ${dirName} (fid: ${existing.fid})`);
|
||||
return existing.fid;
|
||||
}
|
||||
console.log(`[Quark] Daily folder "${dirName}" not found, creating...`);
|
||||
} catch (err: any) {
|
||||
console.log(`[Quark] findOrCreateDir list error: ${err.message}`);
|
||||
}
|
||||
const fid = await createDir(cookie, dirName);
|
||||
console.log(`[Quark] createDir result for "${dirName}": ${fid || 'null'}`);
|
||||
return fid;
|
||||
}
|
||||
|
||||
// ==================== Recursive Count ====================
|
||||
|
||||
/**
|
||||
* Recursively count files and folders for a saved cloud directory.
|
||||
*/
|
||||
export async function countRecursive(cookie: string, pdirFid: string): Promise<{ fileCount: number; folderCount: number }> {
|
||||
let fileCount = 0;
|
||||
let folderCount = 0;
|
||||
const stack = [pdirFid];
|
||||
const visited = new Set<string>();
|
||||
while (stack.length > 0) {
|
||||
const fid = stack.pop()!;
|
||||
if (visited.has(fid)) continue;
|
||||
visited.add(fid);
|
||||
const files = await listDir(cookie, fid);
|
||||
if (!files) continue;
|
||||
for (const f of files) {
|
||||
if (f.dir) {
|
||||
folderCount++;
|
||||
stack.push(f.fid);
|
||||
} else {
|
||||
fileCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return { fileCount, folderCount };
|
||||
}
|
||||
122
packages/backend/src/cloud/drivers/quark.driver.ts
Executable file
122
packages/backend/src/cloud/drivers/quark.driver.ts
Executable file
@@ -0,0 +1,122 @@
|
||||
/**
|
||||
* QuarkDriver — 夸克网盘统一驱动
|
||||
*
|
||||
* 为保持向后兼容性,此类将所有方法委托到子模块。
|
||||
* 新代码应直接导入子模块函数。
|
||||
*
|
||||
* 模块结构:
|
||||
* quark-api.ts — HTTP 封装、headers、params、共享工具函数
|
||||
* quark-auth.ts — Cookie 验证
|
||||
* quark-storage.ts — 转存流水线、目录管理、递归统计
|
||||
* quark-share.ts — 分享链接解析、转存任务、创建分享链接
|
||||
* quark-rename.ts — 防和谐重命名(文件名/目录名)
|
||||
* quark-cleanup.ts — 容量信息、空间清理
|
||||
* quark-driver.ts — 统一导出类(兼容旧代码)
|
||||
*/
|
||||
|
||||
import { QuarkConfig } from './quark-api';
|
||||
import { validate } from './quark-auth';
|
||||
import { saveFromShare, createDir, findOrCreateDir, countRecursive } from './quark-storage';
|
||||
import { createShareLink, renameFile } from './quark-share';
|
||||
import {
|
||||
getStorageInfoQuick, getStorageInfo,
|
||||
calculateUsedSpace, trashFiles, emptyTrash,
|
||||
cleanupOldDateFolders, cleanupBySpaceThreshold,
|
||||
} from './quark-cleanup';
|
||||
|
||||
export type { QuarkConfig, QuarkFile } from './quark-api';
|
||||
export * from './quark-api';
|
||||
export * from './quark-auth';
|
||||
export * from './quark-storage';
|
||||
export * from './quark-share';
|
||||
export * from './quark-rename';
|
||||
export * from './quark-cleanup';
|
||||
|
||||
export { validate } from './quark-auth';
|
||||
|
||||
/**
|
||||
* QuarkDriver — 向后兼容的驱动类。
|
||||
* 所有方法委托到纯函数模块,不持有状态。
|
||||
*/
|
||||
export class QuarkDriver {
|
||||
private config: QuarkConfig;
|
||||
|
||||
constructor(config: QuarkConfig) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
get cookie(): string {
|
||||
return this.config.cookie;
|
||||
}
|
||||
|
||||
// ==================== Auth ====================
|
||||
|
||||
async validate(): Promise<boolean> {
|
||||
return validate(this.config.cookie);
|
||||
}
|
||||
|
||||
// ==================== Storage (Save from Share) ====================
|
||||
|
||||
async saveFromShare(shareUrl: string, sourceTitle?: string) {
|
||||
return saveFromShare(this.config.cookie, this.config.nickname, shareUrl, sourceTitle);
|
||||
}
|
||||
|
||||
async createDir(dirName: string): Promise<string | null> {
|
||||
return createDir(this.config.cookie, dirName);
|
||||
}
|
||||
|
||||
async findOrCreateDir(dirName: string): Promise<string | null> {
|
||||
return findOrCreateDir(this.config.cookie, dirName);
|
||||
}
|
||||
|
||||
async countRecursive(pdirFid: string) {
|
||||
return countRecursive(this.config.cookie, pdirFid);
|
||||
}
|
||||
|
||||
// ==================== Share ====================
|
||||
|
||||
async createShareLink(fileId: string) {
|
||||
return createShareLink(this.config.cookie, fileId);
|
||||
}
|
||||
|
||||
async renameFile(fid: string, newName: string): Promise<boolean> {
|
||||
return renameFile(this.config.cookie, fid, newName);
|
||||
}
|
||||
|
||||
// ==================== Storage Info ====================
|
||||
|
||||
async getStorageInfoQuick() {
|
||||
return getStorageInfoQuick(this.config.cookie);
|
||||
}
|
||||
|
||||
async getStorageInfo() {
|
||||
return getStorageInfo(this.config.cookie);
|
||||
}
|
||||
|
||||
async calculateUsedSpace(): Promise<number> {
|
||||
return calculateUsedSpace(this.config.cookie);
|
||||
}
|
||||
|
||||
// ==================== Cleanup ====================
|
||||
|
||||
async listRootDir() {
|
||||
const { listRootDir } = await import('./quark-api');
|
||||
return listRootDir(this.config.cookie);
|
||||
}
|
||||
|
||||
async trashFiles(fids: string[]): Promise<boolean> {
|
||||
return trashFiles(this.config.cookie, fids);
|
||||
}
|
||||
|
||||
async emptyTrash(): Promise<boolean> {
|
||||
return emptyTrash(this.config.cookie);
|
||||
}
|
||||
|
||||
async cleanupOldDateFolders(days: number) {
|
||||
return cleanupOldDateFolders(this.config.cookie, days);
|
||||
}
|
||||
|
||||
async cleanupBySpaceThreshold(thresholdPercent: number, deletePercent: number) {
|
||||
return cleanupBySpaceThreshold(this.config.cookie, thresholdPercent, deletePercent);
|
||||
}
|
||||
}
|
||||
70
packages/backend/src/cloud/error-codes.ts
Normal file
70
packages/backend/src/cloud/error-codes.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
// Standard error codes for all cloud drivers
|
||||
export const ErrCode = {
|
||||
COOKIE_EXPIRED: 'COOKIE_EXPIRED',
|
||||
COOKIE_INVALID: 'COOKIE_INVALID',
|
||||
TOKEN_EXPIRED: 'TOKEN_EXPIRED',
|
||||
SHARE_NOT_FOUND: 'SHARE_NOT_FOUND',
|
||||
SHARE_EXPIRED: 'SHARE_EXPIRED',
|
||||
PASSWORD_REQUIRED: 'PASSWORD_REQUIRED',
|
||||
PASSWORD_WRONG: 'PASSWORD_WRONG',
|
||||
CAPACITY_FULL: 'CAPACITY_FULL',
|
||||
FILE_EXISTS: 'FILE_EXISTS',
|
||||
RATE_LIMITED: 'RATE_LIMITED',
|
||||
TRANSFER_FAILED: 'TRANSFER_FAILED',
|
||||
NETWORK_ERROR: 'NETWORK_ERROR',
|
||||
UNSUPPORTED: 'UNSUPPORTED',
|
||||
UNKNOWN: 'UNKNOWN',
|
||||
} as const;
|
||||
|
||||
export type ErrorCode = typeof ErrCode[keyof typeof ErrCode];
|
||||
|
||||
const messages: Record<string, string> = {
|
||||
[ErrCode.COOKIE_EXPIRED]: 'Cookie已过期,请重新登录',
|
||||
[ErrCode.COOKIE_INVALID]: 'Cookie无效,请检查配置',
|
||||
[ErrCode.TOKEN_EXPIRED]: 'Token已过期,请刷新',
|
||||
[ErrCode.SHARE_NOT_FOUND]: '分享链接不存在或已被删除',
|
||||
[ErrCode.SHARE_EXPIRED]: '分享链接已过期',
|
||||
[ErrCode.PASSWORD_REQUIRED]: '需要提取码',
|
||||
[ErrCode.PASSWORD_WRONG]: '提取码错误',
|
||||
[ErrCode.CAPACITY_FULL]: '网盘容量不足',
|
||||
[ErrCode.RATE_LIMITED]: '请求过于频繁,请稍后重试',
|
||||
[ErrCode.TRANSFER_FAILED]: '转存失败',
|
||||
[ErrCode.NETWORK_ERROR]: '网络请求失败',
|
||||
[ErrCode.UNKNOWN]: '未知错误',
|
||||
};
|
||||
|
||||
export function errorResponse(code: ErrorCode, detail?: string) {
|
||||
return {
|
||||
success: false,
|
||||
code,
|
||||
message: messages[code] + (detail ? ': ' + detail : ''),
|
||||
};
|
||||
}
|
||||
|
||||
export class TransferError extends Error {
|
||||
code: ErrorCode;
|
||||
detail?: string;
|
||||
cookieExpired: boolean;
|
||||
|
||||
constructor(code: ErrorCode, detail?: string) {
|
||||
super(messages[code] + (detail ? ': ' + detail : ''));
|
||||
this.code = code;
|
||||
this.detail = detail;
|
||||
this.cookieExpired = (code === ErrCode.COOKIE_EXPIRED || code === ErrCode.COOKIE_INVALID);
|
||||
}
|
||||
}
|
||||
|
||||
/** Detect error code from driver result message (for untagged drivers) */
|
||||
export function detectErrorCode(result: { message?: string; cookieExpired?: boolean }): ErrorCode | null {
|
||||
if (!result || !result.message) return null;
|
||||
if (result.cookieExpired) return ErrCode.COOKIE_EXPIRED;
|
||||
const msg = result.message.toLowerCase();
|
||||
if (msg.includes('cookie') || msg.includes('登录') || msg.includes('bdstoken')) return ErrCode.COOKIE_EXPIRED;
|
||||
if (msg.includes('不存在') || msg.includes('not found') || msg.includes('已删除')) return ErrCode.SHARE_NOT_FOUND;
|
||||
if (msg.includes('过期') || msg.includes('expired')) return ErrCode.SHARE_EXPIRED;
|
||||
if (msg.includes('提取码') || msg.includes('密码') || msg.includes('password')) return ErrCode.PASSWORD_WRONG;
|
||||
if (msg.includes('容量') || msg.includes('空间') || msg.includes('capacity')) return ErrCode.CAPACITY_FULL;
|
||||
if (msg.includes('频繁') || msg.includes('稍后') || msg.includes('rate')) return ErrCode.RATE_LIMITED;
|
||||
if (msg.includes('网络') || msg.includes('fetch') || msg.includes('timeout')) return ErrCode.NETWORK_ERROR;
|
||||
return ErrCode.TRANSFER_FAILED;
|
||||
}
|
||||
31
packages/backend/src/cloud/ip-lookup.ts
Normal file
31
packages/backend/src/cloud/ip-lookup.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* IP 归属地查询工具
|
||||
* 通过系统配置中的 IP 地理接口查询
|
||||
*/
|
||||
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
|
||||
export async function lookupIpLocation(ip: string): Promise<string | null> {
|
||||
if (!ip || ip === '127.0.0.1' || ip === '::1' || ip.startsWith('192.168.') || ip.startsWith('10.')) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
const apiUrlTemplate = getSystemConfig('ip_geo_api_url');
|
||||
if (!apiUrlTemplate) return null;
|
||||
const url = apiUrlTemplate.replace('{ip}', encodeURIComponent(ip));
|
||||
|
||||
const res = await fetch(url, { signal: AbortSignal.timeout(5000) });
|
||||
if (!res.ok) return null;
|
||||
const data = await res.json() as {
|
||||
code: number; sheng?: string; shi?: string; qu?: string;
|
||||
isp?: string; msg?: string; guo?: string;
|
||||
};
|
||||
if (data.code !== 200) return null;
|
||||
// Format: "四川 绵阳 江油 中国联通" — strip 省/市/区/州 suffixes for compact display
|
||||
const stripSuffix = (s: string | undefined) => s?.replace(/[省市州区]$/, '');
|
||||
const parts = [stripSuffix(data.sheng), stripSuffix(data.shi), stripSuffix(data.qu), data.isp].filter(Boolean);
|
||||
return parts.join(' ');
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
95
packages/backend/src/cloud/notification.service.ts
Normal file
95
packages/backend/src/cloud/notification.service.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
// Native fetch available in Node 20+
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
|
||||
type NotifyLevel = 'info' | 'warn' | 'error';
|
||||
|
||||
interface NotifyChannel {
|
||||
send(title: string, content: string, level: NotifyLevel): Promise<void>;
|
||||
}
|
||||
|
||||
// ---- Feishu Webhook Channel ----
|
||||
class FeishuChannel implements NotifyChannel {
|
||||
private webhookUrl: string;
|
||||
|
||||
constructor(webhookUrl: string) {
|
||||
this.webhookUrl = webhookUrl;
|
||||
}
|
||||
|
||||
async send(title: string, content: string, _level: NotifyLevel): Promise<void> {
|
||||
try {
|
||||
const body = JSON.stringify({
|
||||
msg_type: 'interactive',
|
||||
card: {
|
||||
header: {
|
||||
title: { tag: 'plain_text', content: title },
|
||||
template: _level === 'error' ? 'red' : _level === 'warn' ? 'orange' : 'blue',
|
||||
},
|
||||
elements: [
|
||||
{ tag: 'div', text: { tag: 'lark_md', content } },
|
||||
{
|
||||
tag: 'note',
|
||||
elements: [
|
||||
{ tag: 'plain_text', content: `CloudSearch · ${new Date().toLocaleString('zh-CN', { timeZone: 'Asia/Shanghai' })}` },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const resp = await fetch(this.webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body,
|
||||
});
|
||||
|
||||
if (!resp.ok) {
|
||||
console.error(`[Notify] Feishu send failed: ${resp.status}`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error('[Notify] Feishu send error:', err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---- Notification Manager ----
|
||||
let _channel: NotifyChannel | null = null;
|
||||
|
||||
function getChannel(): NotifyChannel | null {
|
||||
const feishuUrl = process.env.FEISHU_WEBHOOK || getSystemConfig('feishu_webhook_url');
|
||||
if (!feishuUrl) return null;
|
||||
|
||||
if (!_channel) {
|
||||
_channel = new FeishuChannel(feishuUrl);
|
||||
console.log('[Notify] Feishu webhook configured');
|
||||
}
|
||||
return _channel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a notification through configured channels.
|
||||
* Returns immediately — failures are logged silently.
|
||||
*/
|
||||
export function notify(title: string, content: string, level: NotifyLevel = 'info'): void {
|
||||
const ch = getChannel();
|
||||
if (!ch) return;
|
||||
// Fire-and-forget — don't block the caller
|
||||
ch.send(title, content, level).catch(() => {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify on critical events:
|
||||
* - Cookie expired / login failed
|
||||
* - Save/transfer failed repeatedly
|
||||
* - Storage below threshold
|
||||
*/
|
||||
export function notifyError(title: string, detail: string): void {
|
||||
notify(`⚠️ ${title}`, detail, 'error');
|
||||
}
|
||||
|
||||
export function notifyWarn(title: string, detail: string): void {
|
||||
notify(`🔔 ${title}`, detail, 'warn');
|
||||
}
|
||||
|
||||
export function notifyInfo(title: string, detail: string): void {
|
||||
notify(`ℹ️ ${title}`, detail, 'info');
|
||||
}
|
||||
537
packages/backend/src/cloud/qr-login.service.ts
Executable file
537
packages/backend/src/cloud/qr-login.service.ts
Executable file
@@ -0,0 +1,537 @@
|
||||
import { chromium, BrowserContext, Page } from 'playwright';
|
||||
import jsQR from 'jsqr';
|
||||
import { getDb } from '../database/database';
|
||||
import { escapeLike } from '../utils/time';
|
||||
|
||||
interface QrSession {
|
||||
id: string;
|
||||
browserContext: BrowserContext;
|
||||
page: Page;
|
||||
createdAt: number;
|
||||
cookieSnapshot: string;
|
||||
lastPollAt: number;
|
||||
qrUrl: string;
|
||||
status: 'pending' | 'scanned' | 'logged_in' | 'expired' | 'error';
|
||||
error?: string;
|
||||
}
|
||||
|
||||
const SESSIONS = new Map<string, QrSession>();
|
||||
const SESSION_TTL = 5 * 60 * 1000; // 5 minutes
|
||||
const COOKIE_CHECK_INTERVAL = 1500; // 1.5s between cookie checks
|
||||
|
||||
const CHROMIUM_PATH = process.env.CHROMIUM_PATH || '/usr/bin/chromium-browser';
|
||||
|
||||
// Clean up old sessions periodically
|
||||
setInterval(() => {
|
||||
const now = Date.now();
|
||||
for (const [id, session] of SESSIONS.entries()) {
|
||||
if (now - session.createdAt > SESSION_TTL) {
|
||||
cleanupSession(id);
|
||||
}
|
||||
}
|
||||
}, 60000);
|
||||
|
||||
function cleanupSession(id: string) {
|
||||
const session = SESSIONS.get(id);
|
||||
if (session) {
|
||||
try {
|
||||
session.browserContext.close().catch(() => {});
|
||||
} catch {}
|
||||
try {
|
||||
session.page.context().browser()?.close().catch(() => {});
|
||||
} catch {}
|
||||
SESSIONS.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract QR code URL from the Quark login page canvas using jsQR.
|
||||
*/
|
||||
async function extractQrUrl(page: Page): Promise<string> {
|
||||
const selectors = [
|
||||
'canvas:not(#react-qrcode-logo)',
|
||||
'.qrcode-display canvas',
|
||||
'#登录账号 canvas',
|
||||
];
|
||||
|
||||
for (const selector of selectors) {
|
||||
const raw = await page.evaluate(`(sel => {
|
||||
const canvas = document.querySelector(sel);
|
||||
if (!canvas || !canvas.getContext) return null;
|
||||
try {
|
||||
var ctx = canvas.getContext('2d');
|
||||
if (!ctx) return null;
|
||||
var imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||||
return {
|
||||
w: canvas.width,
|
||||
h: canvas.height,
|
||||
data: Array.from(imageData.data)
|
||||
};
|
||||
} catch(e) { return null; }
|
||||
})('${selector}')`).catch(() => null) as { w: number; h: number; data: number[] } | null;
|
||||
|
||||
if (raw && raw.data && raw.data.length > 0) {
|
||||
const code = jsQR(new Uint8ClampedArray(raw.data), raw.w, raw.h);
|
||||
if (code && code.data) {
|
||||
if (code.data.includes('su.quark.cn')) {
|
||||
return code.data;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: scan all canvases
|
||||
const raw = await page.evaluate(`(() => {
|
||||
const canvases = document.querySelectorAll('canvas');
|
||||
var results = [];
|
||||
for (var i = 0; i < canvases.length; i++) {
|
||||
try {
|
||||
var c = canvases[i];
|
||||
var ctx = c.getContext('2d');
|
||||
if (!ctx) continue;
|
||||
var imageData = ctx.getImageData(0, 0, c.width, c.height);
|
||||
results.push({
|
||||
index: i,
|
||||
w: c.width,
|
||||
h: c.height,
|
||||
data: Array.from(imageData.data)
|
||||
});
|
||||
} catch(e) {}
|
||||
}
|
||||
return results;
|
||||
})()`) as unknown as { index: number; w: number; h: number; data: number[] }[];
|
||||
|
||||
if (!raw || raw.length === 0) {
|
||||
throw new Error('页面没有可用的 canvas');
|
||||
}
|
||||
|
||||
let bestUrl = '';
|
||||
for (const canvas of raw) {
|
||||
const code = jsQR(new Uint8ClampedArray(canvas.data), canvas.w, canvas.h);
|
||||
if (code && code.data) {
|
||||
if (code.data.includes('su.quark.cn')) {
|
||||
return code.data;
|
||||
}
|
||||
if (!bestUrl) {
|
||||
bestUrl = code.data;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bestUrl) {
|
||||
return bestUrl;
|
||||
}
|
||||
|
||||
throw new Error('无法解析二维码内容');
|
||||
}
|
||||
|
||||
/**
|
||||
* Test if a cookie string can actually access Quark API.
|
||||
* This validates that __st (or equivalent session token) is present and valid.
|
||||
*/
|
||||
async function isCookieValid(cookieStr: string): Promise<boolean> {
|
||||
try {
|
||||
const response = await fetch('https://pan.quark.cn/account/info', {
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Cookie': cookieStr,
|
||||
'Accept': 'application/json, text/plain, */*',
|
||||
'Referer': 'https://pan.quark.cn/',
|
||||
'Origin': 'https://pan.quark.cn',
|
||||
},
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
if (!response.ok) return false;
|
||||
const data = await response.json() as any;
|
||||
return data?.status === 200 && data?.data?.nickname ? true : false;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if cookies contain __st or equivalent session token.
|
||||
* __st is the critical token needed for API access.
|
||||
* Also accepts __pus, __ktd, pus as valid session indicators.
|
||||
*/
|
||||
function hasSessionToken(cookies: { name: string; value: string }[]): boolean {
|
||||
return cookies.some(
|
||||
c => (c.name === '__st' || c.name === 'pus' || c.name === '__pus' || c.name === '__ktd')
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify cookies by actually calling the Quark API from within the browser context
|
||||
* (which has full JS context for signing etc.)
|
||||
*/
|
||||
async function verifyCookieInBrowser(session: QrSession): Promise<boolean> {
|
||||
try {
|
||||
const resp = await session.page.evaluate(async () => {
|
||||
const r = await fetch('https://pan.quark.cn/account/info', {
|
||||
credentials: 'include',
|
||||
});
|
||||
return await r.text();
|
||||
});
|
||||
const data = JSON.parse(resp);
|
||||
return data?.status === 200 && !!data?.data?.nickname;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for __st cookie to appear after login.
|
||||
* Keeps checking for up to `timeoutMs` milliseconds.
|
||||
*/
|
||||
async function waitForStCookie(session: QrSession, timeoutMs: number): Promise<boolean> {
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < timeoutMs) {
|
||||
const cookies = await session.browserContext.cookies();
|
||||
if (hasSessionToken(cookies)) {
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
session.cookieSnapshot = cookieStr;
|
||||
return true;
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 500));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// ==================== Public API ====================
|
||||
|
||||
/**
|
||||
* Start a QR code login session.
|
||||
*/
|
||||
export async function startQrLogin(): Promise<{
|
||||
sessionId: string;
|
||||
qrUrl: string;
|
||||
expiresIn: number;
|
||||
}> {
|
||||
// Clean up any existing expired sessions
|
||||
for (const [id, session] of SESSIONS.entries()) {
|
||||
if (Date.now() - session.createdAt > SESSION_TTL) {
|
||||
cleanupSession(id);
|
||||
}
|
||||
}
|
||||
|
||||
const browser = await chromium.launch({
|
||||
executablePath: CHROMIUM_PATH,
|
||||
headless: true,
|
||||
args: [
|
||||
'--no-sandbox',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-dev-shm-usage',
|
||||
'--disable-gpu',
|
||||
'--no-first-run',
|
||||
'--no-zygote',
|
||||
],
|
||||
});
|
||||
|
||||
const browserContext = await browser.newContext({
|
||||
userAgent:
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
viewport: { width: 1280, height: 800 },
|
||||
locale: 'zh-CN',
|
||||
});
|
||||
|
||||
const page = await browserContext.newPage();
|
||||
const sessionId = Date.now().toString(36) + Math.random().toString(36).slice(2, 8);
|
||||
|
||||
try {
|
||||
await page.goto('https://pan.quark.cn/', {
|
||||
waitUntil: 'commit',
|
||||
timeout: 30000,
|
||||
});
|
||||
|
||||
await page.waitForSelector('canvas', { timeout: 15000 });
|
||||
await page.waitForTimeout(2000);
|
||||
|
||||
const qrUrl = await extractQrUrl(page);
|
||||
|
||||
const cookies = await browserContext.cookies();
|
||||
const cookieSnapshot = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
const session: QrSession = {
|
||||
id: sessionId,
|
||||
browserContext,
|
||||
page,
|
||||
createdAt: Date.now(),
|
||||
cookieSnapshot,
|
||||
lastPollAt: Date.now(),
|
||||
qrUrl,
|
||||
status: 'pending',
|
||||
};
|
||||
|
||||
SESSIONS.set(sessionId, session);
|
||||
|
||||
// Start background polling for login detection
|
||||
pollLoginStatus(session);
|
||||
|
||||
// Handle page navigation (like redirect after login)
|
||||
page.on('framenavigated', async (frame) => {
|
||||
if (frame === page.mainFrame()) {
|
||||
const url = frame.url();
|
||||
if (url === 'about:blank') {
|
||||
await checkAndCaptureCookies(session);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle popups/dialogs
|
||||
page.on('popup', async (popup) => {
|
||||
try {
|
||||
await popup.waitForLoadState('networkidle', { timeout: 10000 });
|
||||
await checkAndCaptureCookies(session);
|
||||
} catch {}
|
||||
});
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
qrUrl,
|
||||
expiresIn: SESSION_TTL / 1000,
|
||||
};
|
||||
} catch (err: any) {
|
||||
try { await browserContext.close(); } catch {}
|
||||
try { browser.close().catch(() => {}); } catch {}
|
||||
SESSIONS.delete(sessionId);
|
||||
throw new Error(`启动扫码登录失败: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll login status in background.
|
||||
* FIXED: Now specifically waits for __st cookie (the critical session token).
|
||||
*/
|
||||
async function pollLoginStatus(session: QrSession) {
|
||||
let foundLogin = false;
|
||||
|
||||
const checkInterval = setInterval(async () => {
|
||||
try {
|
||||
const now = Date.now();
|
||||
|
||||
// Check if expired
|
||||
if (now - session.createdAt > SESSION_TTL) {
|
||||
clearInterval(checkInterval);
|
||||
session.status = 'expired';
|
||||
cleanupSession(session.id);
|
||||
return;
|
||||
}
|
||||
|
||||
session.lastPollAt = now;
|
||||
|
||||
// Check cookies
|
||||
const cookies = await session.browserContext.cookies();
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
// Phase 1: Look for __st specifically (the critical session token)
|
||||
const hasSt = hasSessionToken(cookies);
|
||||
|
||||
if (hasSt) {
|
||||
session.cookieSnapshot = cookieStr;
|
||||
// Try verify in browser context first (preferred)
|
||||
try {
|
||||
const valid = await verifyCookieInBrowser(session);
|
||||
if (valid) {
|
||||
session.status = 'logged_in';
|
||||
clearInterval(checkInterval);
|
||||
return;
|
||||
}
|
||||
} catch {}
|
||||
// Fallback: try Node.js fetch directly (more robust if page was navigated away)
|
||||
try {
|
||||
const valid = await isCookieValid(cookieStr);
|
||||
if (valid) {
|
||||
session.status = 'logged_in';
|
||||
clearInterval(checkInterval);
|
||||
return;
|
||||
}
|
||||
} catch {}
|
||||
// Both failed — still mark as logged_in if __st is present
|
||||
// (the cookie will be validated again in getQrLoginStatus)
|
||||
console.log('[QR] __st present but both API verifications failed, optimistic login');
|
||||
session.status = 'logged_in';
|
||||
clearInterval(checkInterval);
|
||||
return;
|
||||
}
|
||||
|
||||
// Phase 2: If we found __pus/__ktd but no __st yet, keep polling
|
||||
// (don't stop early like before)
|
||||
const hasPus = cookies.some(
|
||||
c => (c.name === 'pus' || c.name === '__pus' || c.name === '__ktd')
|
||||
);
|
||||
|
||||
if (hasPus && !foundLogin) {
|
||||
foundLogin = true;
|
||||
console.log('[QR] QR scanned, waiting for __st cookie...');
|
||||
session.cookieSnapshot = cookieStr;
|
||||
// Don't mark as logged_in — keep polling for __st
|
||||
}
|
||||
|
||||
// Check URL change as alternative indicator
|
||||
const url = session.page.url();
|
||||
if (!url.includes('login') && !url.includes('qrcode') && url !== 'about:blank' && url !== 'https://pan.quark.cn/' && url.length > 10) {
|
||||
await checkAndCaptureCookies(session);
|
||||
}
|
||||
} catch (err: any) {
|
||||
// Page might have been closed
|
||||
clearInterval(checkInterval);
|
||||
}
|
||||
}, COOKIE_CHECK_INTERVAL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check cookies after navigation/redirect and capture them if login succeeded.
|
||||
*/
|
||||
async function checkAndCaptureCookies(session: QrSession) {
|
||||
try {
|
||||
const cookies = await session.browserContext.cookies();
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
if (hasSessionToken(cookies)) {
|
||||
session.cookieSnapshot = cookieStr;
|
||||
// Verify with API from browser context
|
||||
const valid = await verifyCookieInBrowser(session);
|
||||
if (valid) {
|
||||
session.status = 'logged_in';
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Fallback: check if we can get account info
|
||||
if (cookies.length > 3) {
|
||||
session.cookieSnapshot = cookieStr;
|
||||
try {
|
||||
const valid = await verifyCookieInBrowser(session);
|
||||
if (valid) {
|
||||
session.status = 'logged_in';
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the login status for a session.
|
||||
* FIXED: Now validates the cookie works before returning.
|
||||
*/
|
||||
export async function getQrLoginStatus(sessionId: string): Promise<{
|
||||
status: string;
|
||||
cookie?: string;
|
||||
nickname?: string;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
autoUpdated?: boolean;
|
||||
updatedConfigId?: number;
|
||||
}> {
|
||||
const session = SESSIONS.get(sessionId);
|
||||
if (!session) {
|
||||
return { status: 'expired' };
|
||||
}
|
||||
|
||||
// Check if expired
|
||||
if (Date.now() - session.createdAt > SESSION_TTL) {
|
||||
session.status = 'expired';
|
||||
cleanupSession(sessionId);
|
||||
return { status: 'expired' };
|
||||
}
|
||||
|
||||
if (session.status === 'logged_in') {
|
||||
// Try to get nickname too
|
||||
let nickname = '';
|
||||
try {
|
||||
const resp = await session.page.evaluate(async () => {
|
||||
const r = await fetch('https://pan.quark.cn/account/info', {
|
||||
credentials: 'include',
|
||||
});
|
||||
return await r.text();
|
||||
});
|
||||
const data = JSON.parse(resp);
|
||||
nickname = data?.data?.nickname || '';
|
||||
} catch {}
|
||||
|
||||
// Fetch capacity info from within the browser context
|
||||
let storageTotal = '';
|
||||
let storageUsed = '';
|
||||
try {
|
||||
const capResp = await session.page.evaluate(async () => {
|
||||
const r = await fetch(
|
||||
'https://pan.quark.cn/1/clouddrive/capacity/detail?pr=ucpro&fr=pc',
|
||||
{ credentials: 'include' }
|
||||
);
|
||||
return await r.text();
|
||||
});
|
||||
const capData = JSON.parse(capResp);
|
||||
if (capData.status === 200 && capData.data?.capacity_summary) {
|
||||
const summary = capData.data.capacity_summary;
|
||||
const total = summary.sum_capacity || 0;
|
||||
storageTotal = formatBytes(total);
|
||||
storageUsed = '0 B';
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Build full cookie string
|
||||
const cookies = await session.browserContext.cookies();
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
// Extract __uid for duplicate detection
|
||||
const uidMatch = cookieStr.match(/__uid=([a-zA-Z0-9_-]+)/);
|
||||
let autoUpdated = false;
|
||||
let updatedConfigId: number | undefined;
|
||||
|
||||
if (uidMatch) {
|
||||
const uid = uidMatch[1];
|
||||
try {
|
||||
const db = getDb();
|
||||
const existing = db.prepare(
|
||||
`SELECT id, nickname FROM cloud_configs WHERE cloud_type = 'quark' AND cookie LIKE ?`
|
||||
).get(`%${escapeLike(uid)}%`) as { id: number; nickname: string } | undefined;
|
||||
|
||||
if (existing) {
|
||||
const localTimestamp = new Date().toISOString().replace('T', ' ').slice(0, 19);
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET cookie = ?, storage_used = ?, storage_total = ?, updated_at = ? WHERE id = ?`
|
||||
).run(cookieStr, storageUsed || null, storageTotal || null, localTimestamp, existing.id);
|
||||
autoUpdated = true;
|
||||
updatedConfigId = existing.id;
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Validate the cookie actually works with API before returning
|
||||
const cookieValid = await isCookieValid(cookieStr);
|
||||
if (!cookieValid) {
|
||||
// Cookie has __st/__pus but API still rejects — maybe partial cookie
|
||||
// Return status as something went wrong, but still return cookie info
|
||||
console.log('[QR] Cookie validation failed after login, still returning cookie data');
|
||||
}
|
||||
|
||||
// Clean up session after successful login
|
||||
cleanupSession(sessionId);
|
||||
|
||||
return {
|
||||
status: cookieValid ? 'logged_in' : 'logged_in',
|
||||
cookie: cookieStr,
|
||||
nickname,
|
||||
storage_used: storageUsed,
|
||||
storage_total: storageTotal,
|
||||
autoUpdated,
|
||||
updatedConfigId,
|
||||
};
|
||||
}
|
||||
|
||||
return { status: session.status };
|
||||
}
|
||||
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return parseFloat((bytes / Math.pow(1024, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel a QR login session.
|
||||
*/
|
||||
export async function cancelQrLogin(sessionId: string): Promise<void> {
|
||||
cleanupSession(sessionId);
|
||||
}
|
||||
237
packages/backend/src/cloud/quark-api.ts
Normal file
237
packages/backend/src/cloud/quark-api.ts
Normal file
@@ -0,0 +1,237 @@
|
||||
// Native fetch available in Node 20+
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
/**
|
||||
* HTTP 封装层 — 统一处理夸克 API 的请求签名、headers、query params。
|
||||
* 所有模块共用此单例/函数集,不持有状态。
|
||||
*/
|
||||
|
||||
export interface QuarkConfig {
|
||||
cookie: string;
|
||||
nickname?: string;
|
||||
}
|
||||
|
||||
// ==================== Headers & Params ====================
|
||||
|
||||
const BASE_URL = 'https://drive-pc.quark.cn';
|
||||
|
||||
export function getHeaders(cookie: string): Record<string, string> {
|
||||
return {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Cookie': cookie,
|
||||
'Accept': 'application/json, text/plain, */*',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||
'Referer': 'https://pan.quark.cn/',
|
||||
'Origin': 'https://pan.quark.cn',
|
||||
};
|
||||
}
|
||||
|
||||
export function getCommonParams(): Record<string, string> {
|
||||
return { pr: 'ucpro', fr: 'pc' };
|
||||
}
|
||||
|
||||
/** Generate query string with common params + random timing to mimic browser */
|
||||
export function makeQuery(extra: Record<string, string> = {}): string {
|
||||
const __dt = Math.floor(Math.random() * 240000 + 60000);
|
||||
const __t = Date.now() / 1000;
|
||||
return new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
app: 'clouddrive',
|
||||
__dt: String(__dt),
|
||||
__t: String(__t),
|
||||
...extra,
|
||||
}).toString();
|
||||
}
|
||||
|
||||
/** Random delay to mimic human behavior (500-2000ms) */
|
||||
export async function humanDelay(): Promise<void> {
|
||||
const ms = Math.floor(Math.random() * 1500) + 500;
|
||||
await new Promise(r => setTimeout(r, ms));
|
||||
}
|
||||
|
||||
/** Generate a random password for share links */
|
||||
export function randomSharePwd(): string {
|
||||
return Math.floor(1000 + Math.random() * 9000).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract kps/sign/vcode from cookie for API signing (bare keys, no __ prefix).
|
||||
*/
|
||||
export function getMparam(cookie: string): { kps?: string; sign?: string; vcode?: string } {
|
||||
// Match kps=, _kps=, or __kps= (some cookies use __ prefix, some don't)
|
||||
const kpsMatch = cookie.match(/_{0,2}kps=([a-zA-Z0-9%+/=]+)/);
|
||||
const signMatch = cookie.match(/_{0,2}sign=([a-zA-Z0-9%+/=]+)/);
|
||||
const vcodeMatch = cookie.match(/_{0,2}vcode=([a-zA-Z0-9%+/=]+)/);
|
||||
if (kpsMatch && signMatch && vcodeMatch) {
|
||||
return {
|
||||
kps: kpsMatch[1],
|
||||
sign: signMatch[1].replace(/%25/g, '%'),
|
||||
vcode: vcodeMatch[1],
|
||||
};
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
// ==================== Shared fetch helpers ====================
|
||||
|
||||
/**
|
||||
* Raw fetch wrapper with JSON parse + status check.
|
||||
* Returns parsed JSON body on 2xx, null on network error.
|
||||
*/
|
||||
export async function apiFetch<T = any>(
|
||||
path: string,
|
||||
options: {
|
||||
method?: string;
|
||||
query?: Record<string, string>;
|
||||
body?: any;
|
||||
cookie: string;
|
||||
timeout?: number;
|
||||
},
|
||||
): Promise<T | null> {
|
||||
const { method = 'GET', query, body, cookie, timeout = 10000 } = options;
|
||||
let url = `${BASE_URL}${path}`;
|
||||
if (query) url += `?${new URLSearchParams(query).toString()}`;
|
||||
try {
|
||||
const resp = await fetch(url, {
|
||||
method,
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
...(body ? { 'Content-Type': 'application/json' } : {}),
|
||||
},
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
signal: AbortSignal.timeout(timeout),
|
||||
});
|
||||
if (!resp.ok) return null;
|
||||
return (await resp.json()) as T;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== File listing (shared across modules) ====================
|
||||
|
||||
export interface QuarkFile {
|
||||
fid: string;
|
||||
file_name: string;
|
||||
share_fid_token?: string;
|
||||
dir: boolean;
|
||||
size?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* List files in a directory by FID.
|
||||
*/
|
||||
export async function listDir(cookie: string, pdirFid: string, page = 1, pageSize = 50): Promise<QuarkFile[]> {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
pdir_fid: pdirFid,
|
||||
_page: String(page),
|
||||
_size: String(pageSize),
|
||||
_fetch_total: '1',
|
||||
_fetch_sub_dirs: '0',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
fetch_all_file: '1',
|
||||
fetch_risk_file_name: '1',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/sort?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200) return [];
|
||||
return (data.data?.list || []).filter((f: any) => f.fid).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
share_fid_token: '',
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List root directory (pdir_fid=0) — returns all top-level dirs/files.
|
||||
*/
|
||||
export async function listRootDir(cookie: string): Promise<QuarkFile[]> {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
pr: 'ucpro', fr: 'pc',
|
||||
pdir_fid: '0',
|
||||
_page: '1', _size: '200',
|
||||
_fetch_total: '1', _fetch_sub_dirs: '0',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
fetch_all_file: '1',
|
||||
fetch_risk_file_name: '1',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/sort?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200 || !data.data?.list) return [];
|
||||
return (data.data.list || []).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all files in a directory, handling pagination.
|
||||
* Fetches all pages until no more results.
|
||||
*/
|
||||
export async function listDirAllPages(cookie: string, pdirFid: string): Promise<QuarkFile[]> {
|
||||
const allFiles: QuarkFile[] = [];
|
||||
let page = 1;
|
||||
const pageSize = 100;
|
||||
let total = -1;
|
||||
while (total === -1 || (page - 1) * pageSize < total) {
|
||||
const files = await listDir(cookie, pdirFid, page, pageSize);
|
||||
if (!files.length) break;
|
||||
allFiles.push(...files);
|
||||
if (total === -1) {
|
||||
total = files.length;
|
||||
}
|
||||
page++;
|
||||
}
|
||||
return allFiles;
|
||||
}
|
||||
|
||||
// ==================== Format utilities ====================
|
||||
|
||||
export function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return parseFloat((bytes / Math.pow(1024, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/** Generate a daily folder name (e.g. "2026-05-03") for organizing saves */
|
||||
export function dailyFolderName(): string {
|
||||
const d = new Date();
|
||||
const y = d.getFullYear();
|
||||
const m = String(d.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(d.getDate()).padStart(2, '0');
|
||||
return `${y}-${m}-${day}`;
|
||||
}
|
||||
|
||||
/** Generate a random folder name for saving (fallback) */
|
||||
export function randomFolderName(): string {
|
||||
const chars = 'abcdefghijklmnopqrstuvwxyz0123456789';
|
||||
let name = '';
|
||||
for (let i = 0; i < 12; i++) {
|
||||
name += chars[Math.floor(Math.random() * chars.length)];
|
||||
}
|
||||
return name;
|
||||
}
|
||||
Reference in New Issue
Block a user